2017-12-14 20:12:14 +02:00
const BaseModel = require ( 'lib/BaseModel.js' ) ;
2017-11-03 02:09:34 +02:00
const { Database } = require ( 'lib/database.js' ) ;
2017-12-14 20:12:14 +02:00
const Setting = require ( 'lib/models/Setting.js' ) ;
2019-05-28 19:10:21 +02:00
const ItemChange = require ( 'lib/models/ItemChange.js' ) ;
2018-01-02 21:17:14 +02:00
const JoplinError = require ( 'lib/JoplinError.js' ) ;
2017-11-03 02:09:34 +02:00
const { time } = require ( 'lib/time-utils.js' ) ;
const { sprintf } = require ( 'sprintf-js' ) ;
2017-12-14 22:21:36 +02:00
const { _ } = require ( 'lib/locale.js' ) ;
2017-11-03 02:09:34 +02:00
const moment = require ( 'moment' ) ;
2018-05-23 13:14:38 +02:00
const markdownUtils = require ( 'lib/markdownUtils' ) ;
2017-06-15 20:18:48 +02:00
class BaseItem extends BaseModel {
static useUuid ( ) {
return true ;
}
2017-12-13 20:57:40 +02:00
static encryptionSupported ( ) {
return true ;
}
2017-07-06 21:48:17 +02:00
static loadClass ( className , classRef ) {
for ( let i = 0 ; i < BaseItem . syncItemDefinitions _ . length ; i ++ ) {
if ( BaseItem . syncItemDefinitions _ [ i ] . className == className ) {
BaseItem . syncItemDefinitions _ [ i ] . classRef = classRef ;
return ;
}
}
2019-09-19 23:51:18 +02:00
throw new Error ( ` Invalid class name: ${ className } ` ) ;
2017-07-06 21:48:17 +02:00
}
2018-06-27 22:45:31 +02:00
static async findUniqueItemTitle ( title ) {
let counter = 1 ;
let titleToTry = title ;
while ( true ) {
const item = await this . loadByField ( 'title' , titleToTry ) ;
if ( ! item ) return titleToTry ;
2019-09-19 23:51:18 +02:00
titleToTry = ` ${ title } ( ${ counter } ) ` ;
2018-06-27 22:45:31 +02:00
counter ++ ;
2019-09-19 23:51:18 +02:00
if ( counter >= 100 ) titleToTry = ` ${ title } ( ${ new Date ( ) . getTime ( ) } ) ` ;
2018-06-27 22:45:31 +02:00
if ( counter >= 1000 ) throw new Error ( 'Cannot find unique title' ) ;
}
}
2017-07-02 14:02:07 +02:00
// Need to dynamically load the classes like this to avoid circular dependencies
static getClass ( name ) {
2017-07-06 21:48:17 +02:00
for ( let i = 0 ; i < BaseItem . syncItemDefinitions _ . length ; i ++ ) {
if ( BaseItem . syncItemDefinitions _ [ i ] . className == name ) {
2017-12-28 21:57:21 +02:00
const classRef = BaseItem . syncItemDefinitions _ [ i ] . classRef ;
2019-09-19 23:51:18 +02:00
if ( ! classRef ) throw new Error ( ` Class has not been loaded: ${ name } ` ) ;
2017-07-06 21:48:17 +02:00
return BaseItem . syncItemDefinitions _ [ i ] . classRef ;
}
}
2019-09-19 23:51:18 +02:00
throw new Error ( ` Invalid class name: ${ name } ` ) ;
2017-07-10 20:09:58 +02:00
}
2017-08-20 16:29:18 +02:00
static getClassByItemType ( itemType ) {
for ( let i = 0 ; i < BaseItem . syncItemDefinitions _ . length ; i ++ ) {
if ( BaseItem . syncItemDefinitions _ [ i ] . type == itemType ) {
return BaseItem . syncItemDefinitions _ [ i ] . classRef ;
}
}
2019-09-19 23:51:18 +02:00
throw new Error ( ` Invalid item type: ${ itemType } ` ) ;
2017-08-20 16:29:18 +02:00
}
2017-07-16 18:06:05 +02:00
static async syncedCount ( syncTarget ) {
const ItemClass = this . itemClass ( this . modelType ( ) ) ;
const itemType = ItemClass . modelType ( ) ;
// The fact that we don't check if the item_id still exist in the corresponding item table, means
// that the returned number might be innaccurate (for example if a sync operation was cancelled)
const sql = 'SELECT count(*) as total FROM sync_items WHERE sync_target = ? AND item_type = ?' ;
2019-07-29 15:43:53 +02:00
const r = await this . db ( ) . selectOne ( sql , [ syncTarget , itemType ] ) ;
2017-07-16 18:06:05 +02:00
return r . total ;
2017-07-02 14:02:07 +02:00
}
2018-11-11 22:17:43 +02:00
static systemPath ( itemOrId , extension = null ) {
2019-07-29 15:43:53 +02:00
if ( extension === null ) extension = 'md' ;
2018-11-11 22:17:43 +02:00
2019-09-19 23:51:18 +02:00
if ( typeof itemOrId === 'string' ) return ` ${ itemOrId } . ${ extension } ` ;
else return ` ${ itemOrId . id } . ${ extension } ` ;
2017-06-15 20:18:48 +02:00
}
2017-07-18 22:03:07 +02:00
static isSystemPath ( path ) {
// 1b175bb38bba47baac22b0b47f778113.md
if ( ! path || ! path . length ) return false ;
let p = path . split ( '/' ) ;
p = p [ p . length - 1 ] ;
p = p . split ( '.' ) ;
if ( p . length != 2 ) return false ;
return p [ 0 ] . length == 32 && p [ 1 ] == 'md' ;
}
2017-06-17 20:40:08 +02:00
static itemClass ( item ) {
if ( ! item ) throw new Error ( 'Item cannot be null' ) ;
2017-06-19 00:06:10 +02:00
if ( typeof item === 'object' ) {
if ( ! ( 'type_' in item ) ) throw new Error ( 'Item does not have a type_ property' ) ;
2017-07-02 14:02:07 +02:00
return this . itemClass ( item . type _ ) ;
2017-06-19 00:06:10 +02:00
} else {
2017-07-04 00:08:14 +02:00
for ( let i = 0 ; i < BaseItem . syncItemDefinitions _ . length ; i ++ ) {
2020-03-14 01:46:14 +02:00
const d = BaseItem . syncItemDefinitions _ [ i ] ;
2017-07-04 00:08:14 +02:00
if ( Number ( item ) == d . type ) return this . getClass ( d . className ) ;
}
2019-09-19 23:51:18 +02:00
throw new JoplinError ( ` Unknown type: ${ item } ` , 'unknownItemType' ) ;
2017-06-19 00:06:10 +02:00
}
2017-06-17 20:40:08 +02:00
}
2017-07-01 12:30:50 +02:00
// Returns the IDs of the items that have been synced at least once
2017-07-23 16:11:44 +02:00
static async syncedItemIds ( syncTarget ) {
2017-07-16 14:53:59 +02:00
if ( ! syncTarget ) throw new Error ( 'No syncTarget specified' ) ;
2020-03-14 01:46:14 +02:00
const temp = await this . db ( ) . selectAll ( 'SELECT item_id FROM sync_items WHERE sync_time > 0 AND sync_target = ?' , [ syncTarget ] ) ;
const output = [ ] ;
2017-07-23 16:11:44 +02:00
for ( let i = 0 ; i < temp . length ; i ++ ) {
output . push ( temp [ i ] . item _id ) ;
}
return output ;
2017-07-01 12:30:50 +02:00
}
2019-05-12 02:15:52 +02:00
static async allSyncItems ( syncTarget ) {
const output = await this . db ( ) . selectAll ( 'SELECT * FROM sync_items WHERE sync_target = ?' , [ syncTarget ] ) ;
return output ;
}
2017-06-15 20:18:48 +02:00
static pathToId ( path ) {
2020-03-14 01:46:14 +02:00
const p = path . split ( '/' ) ;
const s = p [ p . length - 1 ] . split ( '.' ) ;
2019-05-11 13:08:28 +02:00
let name = s [ 0 ] ;
if ( ! name ) return name ;
2019-05-11 18:35:39 +02:00
name = name . split ( '-' ) ;
2019-05-11 13:08:28 +02:00
return name [ name . length - 1 ] ;
2017-06-15 20:18:48 +02:00
}
static loadItemByPath ( path ) {
2017-07-02 12:34:07 +02:00
return this . loadItemById ( this . pathToId ( path ) ) ;
}
2017-07-02 20:38:34 +02:00
static async loadItemById ( id ) {
2020-03-14 01:46:14 +02:00
const classes = this . syncItemClassNames ( ) ;
2017-07-02 20:38:34 +02:00
for ( let i = 0 ; i < classes . length ; i ++ ) {
2020-03-14 01:46:14 +02:00
const item = await this . getClass ( classes [ i ] ) . load ( id ) ;
2017-06-15 20:18:48 +02:00
if ( item ) return item ;
2017-07-02 20:38:34 +02:00
}
return null ;
2017-06-15 20:18:48 +02:00
}
2019-07-17 23:50:12 +02:00
static async loadItemsByIds ( ids ) {
const classes = this . syncItemClassNames ( ) ;
let output = [ ] ;
for ( let i = 0 ; i < classes . length ; i ++ ) {
const ItemClass = this . getClass ( classes [ i ] ) ;
2019-09-19 23:51:18 +02:00
const sql = ` SELECT * FROM ${ ItemClass . tableName ( ) } WHERE id IN (" ${ ids . join ( '","' ) } ") ` ;
2019-07-17 23:50:12 +02:00
const models = await ItemClass . modelSelectAll ( sql ) ;
output = output . concat ( models ) ;
}
return output ;
}
2017-06-25 09:52:25 +02:00
static loadItemByField ( itemType , field , value ) {
2020-03-14 01:46:14 +02:00
const ItemClass = this . itemClass ( itemType ) ;
2017-06-25 09:52:25 +02:00
return ItemClass . loadByField ( field , value ) ;
}
static loadItem ( itemType , id ) {
2020-03-14 01:46:14 +02:00
const ItemClass = this . itemClass ( itemType ) ;
2017-06-25 09:52:25 +02:00
return ItemClass . load ( id ) ;
}
static deleteItem ( itemType , id ) {
2020-03-14 01:46:14 +02:00
const ItemClass = this . itemClass ( itemType ) ;
2017-06-25 09:52:25 +02:00
return ItemClass . delete ( id ) ;
}
2017-07-03 21:50:45 +02:00
static async delete ( id , options = null ) {
2017-07-11 20:17:23 +02:00
return this . batchDelete ( [ id ] , options ) ;
}
static async batchDelete ( ids , options = null ) {
2019-05-06 22:35:29 +02:00
if ( ! options ) options = { } ;
2017-07-04 00:08:14 +02:00
let trackDeleted = true ;
2017-07-03 21:50:45 +02:00
if ( options && options . trackDeleted !== null && options . trackDeleted !== undefined ) trackDeleted = options . trackDeleted ;
2017-07-18 21:27:10 +02:00
// Don't create a deleted_items entry when conflicted notes are deleted
// since no other client have (or should have) them.
let conflictNoteIds = [ ] ;
if ( this . modelType ( ) == BaseModel . TYPE _NOTE ) {
2019-09-19 23:51:18 +02:00
const conflictNotes = await this . db ( ) . selectAll ( ` SELECT id FROM notes WHERE id IN (" ${ ids . join ( '","' ) } ") AND is_conflict = 1 ` ) ;
2020-05-21 10:14:33 +02:00
conflictNoteIds = conflictNotes . map ( n => {
2019-07-29 15:43:53 +02:00
return n . id ;
} ) ;
2017-07-18 21:27:10 +02:00
}
2017-07-11 20:17:23 +02:00
await super . batchDelete ( ids , options ) ;
2017-07-03 21:50:45 +02:00
if ( trackDeleted ) {
2017-07-24 20:58:11 +02:00
const syncTargetIds = Setting . enumOptionValues ( 'sync.target' ) ;
2020-03-14 01:46:14 +02:00
const queries = [ ] ;
const now = time . unixMs ( ) ;
2017-07-11 20:17:23 +02:00
for ( let i = 0 ; i < ids . length ; i ++ ) {
2017-07-18 21:27:10 +02:00
if ( conflictNoteIds . indexOf ( ids [ i ] ) >= 0 ) continue ;
2017-07-19 21:15:55 +02:00
// For each deleted item, for each sync target, we need to add an entry in deleted_items.
// That way, each target can later delete the remote item.
for ( let j = 0 ; j < syncTargetIds . length ; j ++ ) {
queries . push ( {
sql : 'INSERT INTO deleted_items (item_type, item_id, deleted_time, sync_target) VALUES (?, ?, ?, ?)' ,
params : [ this . modelType ( ) , ids [ i ] , now , syncTargetIds [ j ] ] ,
} ) ;
}
2017-07-11 20:17:23 +02:00
}
await this . db ( ) . transactionExecBatch ( queries ) ;
2017-07-03 21:50:45 +02:00
}
}
2018-01-15 20:35:39 +02:00
// Note: Currently, once a deleted_items entry has been processed, it is removed from the database. In practice it means that
2018-01-15 20:10:14 +02:00
// the following case will not work as expected:
// - Client 1 creates a note and sync with target 1 and 2
// - Client 2 sync with target 1
// - Client 2 deletes note and sync with target 1
// - Client 1 syncs with target 1 only (note is deleted from local machine, as expected)
// - Client 1 syncs with target 2 only => the note is *not* deleted from target 2 because no information
// that it was previously deleted exist (deleted_items entry has been deleted).
// The solution would be to permanently store the list of deleted items on each client.
2017-07-19 21:15:55 +02:00
static deletedItems ( syncTarget ) {
return this . db ( ) . selectAll ( 'SELECT * FROM deleted_items WHERE sync_target = ?' , [ syncTarget ] ) ;
2017-07-03 21:50:45 +02:00
}
2017-07-19 21:15:55 +02:00
static async deletedItemCount ( syncTarget ) {
2020-03-14 01:46:14 +02:00
const r = await this . db ( ) . selectOne ( 'SELECT count(*) as total FROM deleted_items WHERE sync_target = ?' , [ syncTarget ] ) ;
2017-07-10 21:16:59 +02:00
return r [ 'total' ] ;
}
2017-07-19 21:15:55 +02:00
static remoteDeletedItem ( syncTarget , itemId ) {
return this . db ( ) . exec ( 'DELETE FROM deleted_items WHERE item_id = ? AND sync_target = ?' , [ itemId , syncTarget ] ) ;
2017-07-03 21:50:45 +02:00
}
2017-06-19 00:06:10 +02:00
static serialize _format ( propName , propValue ) {
2017-08-20 22:11:32 +02:00
if ( [ 'created_time' , 'updated_time' , 'sync_time' , 'user_updated_time' , 'user_created_time' ] . indexOf ( propName ) >= 0 ) {
2017-06-15 20:18:48 +02:00
if ( ! propValue ) return '' ;
2019-10-11 19:49:47 +02:00
propValue = ` ${ moment . unix ( propValue / 1000 ) . utc ( ) . format ( 'YYYY-MM-DDTHH:mm:ss.SSS' ) } Z ` ;
2019-05-06 22:35:29 +02:00
} else if ( [ 'title_diff' , 'body_diff' ] . indexOf ( propName ) >= 0 ) {
if ( ! propValue ) return '' ;
propValue = JSON . stringify ( propValue ) ;
2017-06-15 20:18:48 +02:00
} else if ( propValue === null || propValue === undefined ) {
propValue = '' ;
}
return propValue ;
}
2017-06-19 00:06:10 +02:00
static unserialize _format ( type , propName , propValue ) {
2017-07-02 20:38:34 +02:00
if ( propName [ propName . length - 1 ] == '_' ) return propValue ; // Private property
2017-06-15 20:18:48 +02:00
2018-10-31 02:35:57 +02:00
const ItemClass = this . itemClass ( type ) ;
2017-06-19 00:06:10 +02:00
2020-03-15 14:07:01 +02:00
if ( [ 'title_diff' , 'body_diff' ] . indexOf ( propName ) >= 0 ) {
2019-05-06 22:35:29 +02:00
if ( ! propValue ) return '' ;
propValue = JSON . parse ( propValue ) ;
2020-04-30 17:56:47 +02:00
} else if ( [ 'longitude' , 'latitude' , 'altitude' ] . indexOf ( propName ) >= 0 ) {
const places = ( propName === 'altitude' ) ? 4 : 8 ;
propValue = Number ( propValue ) . toFixed ( places ) ;
2017-06-15 20:18:48 +02:00
} else {
2020-03-15 14:07:01 +02:00
if ( [ 'created_time' , 'updated_time' , 'user_created_time' , 'user_updated_time' ] . indexOf ( propName ) >= 0 ) {
propValue = ( ! propValue ) ? '0' : moment ( propValue , 'YYYY-MM-DDTHH:mm:ss.SSSZ' ) . format ( 'x' ) ;
}
2017-06-19 00:06:10 +02:00
propValue = Database . formatValue ( ItemClass . fieldType ( propName ) , propValue ) ;
2017-06-15 20:18:48 +02:00
}
return propValue ;
}
2018-10-07 21:11:33 +02:00
static async serialize ( item , shownKeys = null ) {
if ( shownKeys === null ) {
shownKeys = this . itemClass ( item ) . fieldNames ( ) ;
shownKeys . push ( 'type_' ) ;
}
2017-06-29 22:52:52 +02:00
item = this . filter ( item ) ;
2020-03-14 01:46:14 +02:00
const output = { } ;
2017-06-15 20:18:48 +02:00
2017-07-13 20:47:31 +02:00
if ( 'title' in item && shownKeys . indexOf ( 'title' ) >= 0 ) {
2017-07-13 23:26:45 +02:00
output . title = item . title ;
2017-07-03 22:38:26 +02:00
}
2017-07-13 20:47:31 +02:00
if ( 'body' in item && shownKeys . indexOf ( 'body' ) >= 0 ) {
2017-07-13 23:26:45 +02:00
output . body = item . body ;
2017-07-03 22:38:26 +02:00
}
2017-07-13 23:26:45 +02:00
output . props = [ ] ;
2017-06-15 20:18:48 +02:00
for ( let i = 0 ; i < shownKeys . length ; i ++ ) {
2017-07-02 20:38:34 +02:00
let key = shownKeys [ i ] ;
2017-07-13 20:47:31 +02:00
if ( key == 'title' || key == 'body' ) continue ;
2017-07-02 20:38:34 +02:00
let value = null ;
if ( typeof key === 'function' ) {
2020-03-14 01:46:14 +02:00
const r = await key ( ) ;
2017-07-02 20:38:34 +02:00
key = r . key ;
value = r . value ;
} else {
value = this . serialize _format ( key , item [ key ] ) ;
}
2019-09-19 23:51:18 +02:00
output . props . push ( ` ${ key } : ${ value } ` ) ;
2017-06-15 20:18:48 +02:00
}
2020-03-14 01:46:14 +02:00
const temp = [ ] ;
2017-07-13 23:26:45 +02:00
2019-07-29 15:43:53 +02:00
if ( typeof output . title === 'string' ) temp . push ( output . title ) ;
2017-07-13 23:26:45 +02:00
if ( output . body ) temp . push ( output . body ) ;
2019-07-29 15:43:53 +02:00
if ( output . props . length ) temp . push ( output . props . join ( '\n' ) ) ;
2017-07-13 23:26:45 +02:00
2019-07-29 15:43:53 +02:00
return temp . join ( '\n\n' ) ;
2017-06-15 20:18:48 +02:00
}
2017-12-14 20:53:08 +02:00
static encryptionService ( ) {
if ( ! this . encryptionService _ ) throw new Error ( 'BaseItem.encryptionService_ is not set!!' ) ;
return this . encryptionService _ ;
}
2019-05-06 22:35:29 +02:00
static revisionService ( ) {
if ( ! this . revisionService _ ) throw new Error ( 'BaseItem.revisionService_ is not set!!' ) ;
return this . revisionService _ ;
}
2017-12-13 20:57:40 +02:00
static async serializeForSync ( item ) {
const ItemClass = this . itemClass ( item ) ;
2020-03-14 01:46:14 +02:00
const shownKeys = ItemClass . fieldNames ( ) ;
2018-10-07 21:18:43 +02:00
shownKeys . push ( 'type_' ) ;
const serialized = await ItemClass . serialize ( item , shownKeys ) ;
2019-12-17 14:45:57 +02:00
if ( ! Setting . value ( 'encryption.enabled' ) || ! ItemClass . encryptionSupported ( ) || item . is _shared ) {
2018-01-02 21:17:14 +02:00
// Normally not possible since itemsThatNeedSync should only return decrypted items
2019-07-29 15:43:53 +02:00
if ( item . encryption _applied ) throw new JoplinError ( 'Item is encrypted but encryption is currently disabled' , 'cannotSyncEncrypted' ) ;
2017-12-20 21:45:25 +02:00
return serialized ;
}
2019-07-29 15:43:53 +02:00
if ( item . encryption _applied ) {
const e = new Error ( 'Trying to encrypt item that is already encrypted' ) ;
e . code = 'cannotEncryptEncrypted' ;
throw e ;
}
2017-12-13 20:57:40 +02:00
2020-03-04 18:53:45 +02:00
let cipherText = null ;
try {
cipherText = await this . encryptionService ( ) . encryptString ( serialized ) ;
} catch ( error ) {
const msg = [ ` Could not encrypt item ${ item . id } ` ] ;
if ( error && error . message ) msg . push ( error . message ) ;
2020-03-06 21:11:51 +02:00
const newError = new Error ( msg . join ( ': ' ) ) ;
newError . stack = error . stack ;
throw newError ;
2020-03-04 18:53:45 +02:00
}
2017-12-13 20:57:40 +02:00
2017-12-14 02:23:32 +02:00
// List of keys that won't be encrypted - mostly foreign keys required to link items
// with each others and timestamp required for synchronisation.
const keepKeys = [ 'id' , 'note_id' , 'tag_id' , 'parent_id' , 'updated_time' , 'type_' ] ;
2017-12-21 21:06:08 +02:00
const reducedItem = { } ;
2017-12-13 20:57:40 +02:00
2017-12-21 21:06:08 +02:00
for ( let i = 0 ; i < keepKeys . length ; i ++ ) {
const n = keepKeys [ i ] ;
if ( ! item . hasOwnProperty ( n ) ) continue ;
reducedItem [ n ] = item [ n ] ;
2017-12-13 20:57:40 +02:00
}
2017-12-14 19:58:10 +02:00
reducedItem . encryption _applied = 1 ;
2017-12-13 20:57:40 +02:00
reducedItem . encryption _cipher _text = cipherText ;
2019-07-29 15:43:53 +02:00
return ItemClass . serialize ( reducedItem ) ;
2017-12-13 20:57:40 +02:00
}
static async decrypt ( item ) {
2019-09-19 23:51:18 +02:00
if ( ! item . encryption _cipher _text ) throw new Error ( ` Item is not encrypted: ${ item . id } ` ) ;
2017-12-13 20:57:40 +02:00
const ItemClass = this . itemClass ( item ) ;
2017-12-14 20:53:08 +02:00
const plainText = await this . encryptionService ( ) . decryptString ( item . encryption _cipher _text ) ;
2017-12-13 20:57:40 +02:00
// Note: decryption does not count has a change, so don't update any timestamp
const plainItem = await ItemClass . unserialize ( plainText ) ;
plainItem . updated _time = item . updated _time ;
plainItem . encryption _cipher _text = '' ;
2017-12-14 19:58:10 +02:00
plainItem . encryption _applied = 0 ;
2019-05-28 19:10:21 +02:00
return ItemClass . save ( plainItem , { autoTimestamp : false , changeSource : ItemChange . SOURCE _DECRYPTION } ) ;
2017-12-13 20:57:40 +02:00
}
2017-07-02 17:46:03 +02:00
static async unserialize ( content ) {
2020-03-14 01:46:14 +02:00
const lines = content . split ( '\n' ) ;
2017-06-15 20:18:48 +02:00
let output = { } ;
let state = 'readingProps' ;
2020-03-14 01:46:14 +02:00
const body = [ ] ;
2017-07-05 20:31:11 +02:00
2017-06-15 20:18:48 +02:00
for ( let i = lines . length - 1 ; i >= 0 ; i -- ) {
let line = lines [ i ] ;
if ( state == 'readingProps' ) {
line = line . trim ( ) ;
if ( line == '' ) {
state = 'readingBody' ;
continue ;
}
2020-03-14 01:46:14 +02:00
const p = line . indexOf ( ':' ) ;
2019-09-19 23:51:18 +02:00
if ( p < 0 ) throw new Error ( ` Invalid property format: ${ line } : ${ content } ` ) ;
2020-03-14 01:46:14 +02:00
const key = line . substr ( 0 , p ) . trim ( ) ;
const value = line . substr ( p + 1 ) . trim ( ) ;
2017-06-19 00:06:10 +02:00
output [ key ] = value ;
2017-06-15 20:18:48 +02:00
} else if ( state == 'readingBody' ) {
body . splice ( 0 , 0 , line ) ;
}
}
2019-07-29 15:43:53 +02:00
2019-09-19 23:51:18 +02:00
if ( ! output . type _ ) throw new Error ( ` Missing required property: type_: ${ content } ` ) ;
2017-06-19 00:06:10 +02:00
output . type _ = Number ( output . type _ ) ;
2017-07-03 22:38:26 +02:00
if ( body . length ) {
2020-03-14 01:46:14 +02:00
const title = body . splice ( 0 , 2 ) ;
2017-07-03 22:38:26 +02:00
output . title = title [ 0 ] ;
}
2019-07-29 15:43:53 +02:00
if ( output . type _ === BaseModel . TYPE _NOTE ) output . body = body . join ( '\n' ) ;
2017-06-15 20:18:48 +02:00
2018-10-31 02:35:57 +02:00
const ItemClass = this . itemClass ( output . type _ ) ;
output = ItemClass . removeUnknownFields ( output ) ;
2020-03-14 01:46:14 +02:00
for ( const n in output ) {
2017-06-19 00:06:10 +02:00
if ( ! output . hasOwnProperty ( n ) ) continue ;
2017-07-02 17:46:03 +02:00
output [ n ] = await this . unserialize _format ( output . type _ , n , output [ n ] ) ;
2017-06-19 00:06:10 +02:00
}
2017-06-15 20:18:48 +02:00
return output ;
}
2017-12-24 10:36:31 +02:00
static async encryptedItemsStats ( ) {
const classNames = this . encryptableItemClassNames ( ) ;
let encryptedCount = 0 ;
let totalCount = 0 ;
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const ItemClass = this . getClass ( classNames [ i ] ) ;
encryptedCount += await ItemClass . count ( { where : 'encryption_applied = 1' } ) ;
totalCount += await ItemClass . count ( ) ;
}
return {
encrypted : encryptedCount ,
total : totalCount ,
} ;
}
static async encryptedItemsCount ( ) {
const classNames = this . encryptableItemClassNames ( ) ;
let output = 0 ;
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const className = classNames [ i ] ;
const ItemClass = this . getClass ( className ) ;
const count = await ItemClass . count ( { where : 'encryption_applied = 1' } ) ;
output += count ;
}
return output ;
}
2017-12-20 21:45:25 +02:00
static async hasEncryptedItems ( ) {
const classNames = this . encryptableItemClassNames ( ) ;
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const className = classNames [ i ] ;
const ItemClass = this . getClass ( className ) ;
const count = await ItemClass . count ( { where : 'encryption_applied = 1' } ) ;
if ( count ) return true ;
}
return false ;
}
2017-12-14 20:53:08 +02:00
static async itemsThatNeedDecryption ( exclusions = [ ] , limit = 100 ) {
const classNames = this . encryptableItemClassNames ( ) ;
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const className = classNames [ i ] ;
const ItemClass = this . getClass ( className ) ;
2019-05-22 16:56:07 +02:00
let whereSql = [ 'encryption_applied = 1' ] ;
if ( className === 'Resource' ) {
const blobDownloadedButEncryptedSql = 'encryption_blob_encrypted = 1 AND id IN (SELECT resource_id FROM resource_local_states WHERE fetch_status = 2))' ;
2019-09-19 23:51:18 +02:00
whereSql = [ ` (encryption_applied = 1 OR ( ${ blobDownloadedButEncryptedSql } ) ` ] ;
2019-05-22 16:56:07 +02:00
}
2019-07-29 15:43:53 +02:00
2019-09-19 23:51:18 +02:00
if ( exclusions . length ) whereSql . push ( ` id NOT IN (" ${ exclusions . join ( '","' ) } ") ` ) ;
2017-12-14 20:53:08 +02:00
2019-07-29 15:43:53 +02:00
const sql = sprintf (
`
2017-12-14 20:53:08 +02:00
SELECT *
FROM % s
WHERE % s
LIMIT % d
` ,
this . db ( ) . escapeField ( ItemClass . tableName ( ) ) ,
whereSql . join ( ' AND ' ) ,
limit
) ;
const items = await ItemClass . modelSelectAll ( sql ) ;
if ( i >= classNames . length - 1 ) {
return { hasMore : items . length >= limit , items : items } ;
} else {
if ( items . length ) return { hasMore : true , items : items } ;
}
}
throw new Error ( 'Unreachable' ) ;
}
2017-07-16 14:53:59 +02:00
static async itemsThatNeedSync ( syncTarget , limit = 100 ) {
const classNames = this . syncItemClassNames ( ) ;
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const className = classNames [ i ] ;
const ItemClass = this . getClass ( className ) ;
2020-03-14 01:46:14 +02:00
const fieldNames = ItemClass . fieldNames ( 'items' ) ;
2017-07-19 21:15:55 +02:00
// // NEVER SYNCED:
// 'SELECT * FROM [ITEMS] WHERE id NOT INT (SELECT item_id FROM sync_items WHERE sync_target = ?)'
// // CHANGED:
// 'SELECT * FROM [ITEMS] items JOIN sync_items s ON s.item_id = items.id WHERE sync_target = ? AND'
2017-07-16 14:53:59 +02:00
2018-01-02 21:17:14 +02:00
let extraWhere = [ ] ;
if ( className == 'Note' ) extraWhere . push ( 'is_conflict = 0' ) ;
if ( className == 'Resource' ) extraWhere . push ( 'encryption_blob_encrypted = 0' ) ;
if ( ItemClass . encryptionSupported ( ) ) extraWhere . push ( 'encryption_applied = 0' ) ;
2019-09-19 23:51:18 +02:00
extraWhere = extraWhere . length ? ` AND ${ extraWhere . join ( ' AND ' ) } ` : '' ;
2017-07-18 00:22:22 +02:00
2017-07-19 21:15:55 +02:00
// First get all the items that have never been synced under this sync target
2020-03-13 19:42:50 +02:00
//
// We order them by date descending so that latest modified notes go first.
// In most case it doesn't make a big difference, but when re-syncing the whole
// data set it does. In that case it means the recent notes, those that are likely
// to be modified again, will be synced first, thus avoiding potential conflicts.
2017-07-19 21:15:55 +02:00
2020-03-14 01:46:14 +02:00
const sql = sprintf ( `
2017-07-19 21:15:55 +02:00
SELECT % s
FROM % s items
WHERE id NOT IN (
SELECT item _id FROM sync _items WHERE sync _target = % d
)
% s
2020-03-13 19:42:50 +02:00
ORDER BY items . updated _time DESC
2017-07-16 14:53:59 +02:00
LIMIT % d
` ,
2020-03-13 19:42:50 +02:00
this . db ( ) . escapeFields ( fieldNames ) ,
this . db ( ) . escapeField ( ItemClass . tableName ( ) ) ,
Number ( syncTarget ) ,
extraWhere ,
limit
2019-07-29 15:43:53 +02:00
) ;
2017-07-16 14:53:59 +02:00
2020-03-14 01:46:14 +02:00
const neverSyncedItem = await ItemClass . modelSelectAll ( sql ) ;
2017-07-19 21:15:55 +02:00
// Secondly get the items that have been synced under this sync target but that have been changed since then
const newLimit = limit - neverSyncedItem . length ;
let changedItems = [ ] ;
if ( newLimit > 0 ) {
fieldNames . push ( 'sync_time' ) ;
2020-03-14 01:46:14 +02:00
const sql = sprintf (
2019-07-29 15:43:53 +02:00
`
2017-07-19 21:15:55 +02:00
SELECT % s FROM % s items
JOIN sync _items s ON s . item _id = items . id
WHERE sync _target = % d
2017-12-14 23:12:02 +02:00
AND ( s . sync _time < items . updated _time OR force _sync = 1 )
2017-12-05 01:38:09 +02:00
AND s . sync _disabled = 0
2017-07-19 21:15:55 +02:00
% s
2020-03-13 19:42:50 +02:00
ORDER BY items . updated _time DESC
2017-07-19 21:15:55 +02:00
LIMIT % d
` ,
2019-07-29 15:43:53 +02:00
this . db ( ) . escapeFields ( fieldNames ) ,
this . db ( ) . escapeField ( ItemClass . tableName ( ) ) ,
Number ( syncTarget ) ,
extraWhere ,
newLimit
) ;
2017-07-19 21:15:55 +02:00
changedItems = await ItemClass . modelSelectAll ( sql ) ;
}
const items = neverSyncedItem . concat ( changedItems ) ;
2017-07-16 14:53:59 +02:00
if ( i >= classNames . length - 1 ) {
return { hasMore : items . length >= limit , items : items } ;
} else {
if ( items . length ) return { hasMore : true , items : items } ;
}
}
throw new Error ( 'Unreachable' ) ;
2017-06-19 00:06:10 +02:00
}
2017-07-04 00:08:14 +02:00
static syncItemClassNames ( ) {
2020-05-21 10:14:33 +02:00
return BaseItem . syncItemDefinitions _ . map ( def => {
2017-07-04 00:08:14 +02:00
return def . className ;
} ) ;
}
2017-12-14 20:53:08 +02:00
static encryptableItemClassNames ( ) {
const temp = this . syncItemClassNames ( ) ;
2020-03-14 01:46:14 +02:00
const output = [ ] ;
2017-12-14 20:53:08 +02:00
for ( let i = 0 ; i < temp . length ; i ++ ) {
if ( temp [ i ] === 'MasterKey' ) continue ;
output . push ( temp [ i ] ) ;
}
return output ;
}
2017-08-20 16:29:18 +02:00
static syncItemTypes ( ) {
2020-05-21 10:14:33 +02:00
return BaseItem . syncItemDefinitions _ . map ( def => {
2017-08-20 16:29:18 +02:00
return def . type ;
} ) ;
}
2017-07-14 20:02:45 +02:00
static modelTypeToClassName ( type ) {
for ( let i = 0 ; i < BaseItem . syncItemDefinitions _ . length ; i ++ ) {
if ( BaseItem . syncItemDefinitions _ [ i ] . type == type ) return BaseItem . syncItemDefinitions _ [ i ] . className ;
}
2019-09-19 23:51:18 +02:00
throw new Error ( ` Invalid type: ${ type } ` ) ;
2017-07-14 20:02:45 +02:00
}
2017-12-05 21:21:01 +02:00
static async syncDisabledItems ( syncTargetId ) {
const rows = await this . db ( ) . selectAll ( 'SELECT * FROM sync_items WHERE sync_disabled = 1 AND sync_target = ?' , [ syncTargetId ] ) ;
2020-03-14 01:46:14 +02:00
const output = [ ] ;
2017-12-05 01:38:09 +02:00
for ( let i = 0 ; i < rows . length ; i ++ ) {
2019-05-12 02:15:52 +02:00
const row = rows [ i ] ;
const item = await this . loadItem ( row . item _type , row . item _id ) ;
if ( row . item _location === BaseItem . SYNC _ITEM _LOCATION _LOCAL && ! item ) continue ; // The referenced item no longer exist
2017-12-05 01:38:09 +02:00
output . push ( {
2019-05-12 02:15:52 +02:00
syncInfo : row ,
location : row . item _location ,
2017-12-05 01:38:09 +02:00
item : item ,
} ) ;
}
return output ;
}
2019-05-12 02:15:52 +02:00
static updateSyncTimeQueries ( syncTarget , item , syncTime , syncDisabled = false , syncDisabledReason = '' , itemLocation = null ) {
2017-07-16 14:53:59 +02:00
const itemType = item . type _ ;
const itemId = item . id ;
2019-05-06 22:35:29 +02:00
if ( ! itemType || ! itemId || syncTime === undefined ) throw new Error ( sprintf ( 'Invalid parameters in updateSyncTimeQueries(): %d, %s, %d' , syncTarget , JSON . stringify ( item ) , syncTime ) ) ;
2017-07-16 14:53:59 +02:00
2019-05-12 02:15:52 +02:00
if ( itemLocation === null ) itemLocation = BaseItem . SYNC _ITEM _LOCATION _LOCAL ;
2017-07-16 14:53:59 +02:00
return [
{
sql : 'DELETE FROM sync_items WHERE sync_target = ? AND item_type = ? AND item_id = ?' ,
params : [ syncTarget , itemType , itemId ] ,
} ,
{
2019-05-12 02:15:52 +02:00
sql : 'INSERT INTO sync_items (sync_target, item_type, item_id, item_location, sync_time, sync_disabled, sync_disabled_reason) VALUES (?, ?, ?, ?, ?, ?, ?)' ,
2019-09-19 23:51:18 +02:00
params : [ syncTarget , itemType , itemId , itemLocation , syncTime , syncDisabled ? 1 : 0 , ` ${ syncDisabledReason } ` ] ,
2019-07-29 15:43:53 +02:00
} ,
2017-07-16 14:53:59 +02:00
] ;
}
static async saveSyncTime ( syncTarget , item , syncTime ) {
const queries = this . updateSyncTimeQueries ( syncTarget , item , syncTime ) ;
return this . db ( ) . transactionExecBatch ( queries ) ;
}
2019-05-12 02:15:52 +02:00
static async saveSyncDisabled ( syncTargetId , item , syncDisabledReason , itemLocation = null ) {
2017-12-05 01:38:09 +02:00
const syncTime = 'sync_time' in item ? item . sync _time : 0 ;
2019-05-12 02:15:52 +02:00
const queries = this . updateSyncTimeQueries ( syncTargetId , item , syncTime , true , syncDisabledReason , itemLocation ) ;
2017-12-05 01:38:09 +02:00
return this . db ( ) . transactionExecBatch ( queries ) ;
}
2017-11-21 20:48:50 +02:00
// When an item is deleted, its associated sync_items data is not immediately deleted for
// performance reason. So this function is used to look for these remaining sync_items and
// delete them.
2017-07-16 14:53:59 +02:00
static async deleteOrphanSyncItems ( ) {
const classNames = this . syncItemClassNames ( ) ;
2020-03-14 01:46:14 +02:00
const queries = [ ] ;
2017-07-16 14:53:59 +02:00
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const className = classNames [ i ] ;
const ItemClass = this . getClass ( className ) ;
2019-09-19 23:51:18 +02:00
let selectSql = ` SELECT id FROM ${ ItemClass . tableName ( ) } ` ;
2017-07-16 18:06:05 +02:00
if ( ItemClass . modelType ( ) == this . TYPE _NOTE ) selectSql += ' WHERE is_conflict = 0' ;
2019-09-19 23:51:18 +02:00
queries . push ( ` DELETE FROM sync_items WHERE item_location = ${ BaseItem . SYNC _ITEM _LOCATION _LOCAL } AND item_type = ${ ItemClass . modelType ( ) } AND item_id NOT IN ( ${ selectSql } ) ` ) ;
2017-07-16 14:53:59 +02:00
}
await this . db ( ) . transactionExecBatch ( queries ) ;
}
2017-12-14 22:21:36 +02:00
static displayTitle ( item ) {
if ( ! item ) return '' ;
2019-09-19 23:51:18 +02:00
if ( item . encryption _applied ) return ` 🔑 ${ _ ( 'Encrypted' ) } ` ;
2019-07-29 15:43:53 +02:00
return item . title ? item . title : _ ( 'Untitled' ) ;
2017-12-14 22:21:36 +02:00
}
2017-12-14 23:12:02 +02:00
static async markAllNonEncryptedForSync ( ) {
const classNames = this . encryptableItemClassNames ( ) ;
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const className = classNames [ i ] ;
const ItemClass = this . getClass ( className ) ;
2019-07-29 15:43:53 +02:00
const sql = sprintf (
`
2017-12-14 23:12:02 +02:00
SELECT id
FROM % s
WHERE encryption _applied = 0 ` ,
2018-01-09 21:45:08 +02:00
this . db ( ) . escapeField ( ItemClass . tableName ( ) )
2017-12-14 23:12:02 +02:00
) ;
const items = await ItemClass . modelSelectAll ( sql ) ;
2020-05-21 10:14:33 +02:00
const ids = items . map ( item => {
2019-07-29 15:43:53 +02:00
return item . id ;
} ) ;
2017-12-14 23:12:02 +02:00
if ( ! ids . length ) continue ;
2019-09-19 23:51:18 +02:00
await this . db ( ) . exec ( ` UPDATE sync_items SET force_sync = 1 WHERE item_id IN (" ${ ids . join ( '","' ) } ") ` ) ;
2017-12-14 23:12:02 +02:00
}
}
2019-12-17 14:45:57 +02:00
static async updateShareStatus ( item , isShared ) {
if ( ! item . id || ! item . type _ ) throw new Error ( 'Item must have an ID and a type' ) ;
if ( ! ! item . is _shared === ! ! isShared ) return false ;
const ItemClass = this . getClassByItemType ( item . type _ ) ;
// No auto-timestamp because sharing a note is not seen as an update
await ItemClass . save ( {
id : item . id ,
is _shared : isShared ? 1 : 0 ,
updated _time : Date . now ( ) ,
} , { autoTimestamp : false } ) ;
// The timestamps have not been changed but still need the note to be synced
// so we force-sync it.
// await this.forceSync(item.id);
return true ;
}
2017-12-20 21:45:25 +02:00
static async forceSync ( itemId ) {
await this . db ( ) . exec ( 'UPDATE sync_items SET force_sync = 1 WHERE item_id = ?' , [ itemId ] ) ;
}
static async forceSyncAll ( ) {
await this . db ( ) . exec ( 'UPDATE sync_items SET force_sync = 1' ) ;
}
2017-12-14 22:21:36 +02:00
static async save ( o , options = null ) {
if ( ! options ) options = { } ;
if ( options . userSideValidation === true ) {
2019-07-29 15:43:53 +02:00
if ( o . encryption _applied ) throw new Error ( _ ( 'Encrypted items cannot be modified' ) ) ;
2017-12-14 22:21:36 +02:00
}
return super . save ( o , options ) ;
}
2018-09-30 21:15:30 +02:00
static markdownTag ( itemOrId ) {
2019-07-30 09:35:42 +02:00
const item = typeof itemOrId === 'object' ? itemOrId : {
id : itemOrId ,
title : '' ,
} ;
2018-09-30 21:15:30 +02:00
2018-05-02 16:13:20 +02:00
const output = [ ] ;
output . push ( '[' ) ;
2020-02-08 00:15:41 +02:00
output . push ( markdownUtils . escapeTitleText ( item . title ) ) ;
2018-05-02 16:13:20 +02:00
output . push ( ']' ) ;
2019-09-19 23:51:18 +02:00
output . push ( ` (:/ ${ item . id } ) ` ) ;
2018-05-02 16:13:20 +02:00
return output . join ( '' ) ;
}
2020-04-10 19:59:51 +02:00
static isMarkdownTag ( md ) {
if ( ! md ) return false ;
return ! ! md . match ( /^\[.*?\]\(:\/[0-9a-zA-Z]{32}\)$/ ) ;
}
2017-06-15 20:18:48 +02:00
}
2017-12-13 20:57:40 +02:00
BaseItem . encryptionService _ = null ;
2019-05-06 22:35:29 +02:00
BaseItem . revisionService _ = null ;
2017-12-13 20:57:40 +02:00
2017-07-04 00:08:14 +02:00
// Also update:
// - itemsThatNeedSync()
// - syncedItems()
2019-07-29 15:43:53 +02:00
BaseItem . syncItemDefinitions _ = [ { type : BaseModel . TYPE _NOTE , className : 'Note' } , { type : BaseModel . TYPE _FOLDER , className : 'Folder' } , { type : BaseModel . TYPE _RESOURCE , className : 'Resource' } , { type : BaseModel . TYPE _TAG , className : 'Tag' } , { type : BaseModel . TYPE _NOTE _TAG , className : 'NoteTag' } , { type : BaseModel . TYPE _MASTER _KEY , className : 'MasterKey' } , { type : BaseModel . TYPE _REVISION , className : 'Revision' } ] ;
2017-07-04 00:08:14 +02:00
2019-05-12 02:15:52 +02:00
BaseItem . SYNC _ITEM _LOCATION _LOCAL = 1 ;
BaseItem . SYNC _ITEM _LOCATION _REMOTE = 2 ;
2019-07-29 15:43:53 +02:00
module . exports = BaseItem ;