2021-10-14 17:34:53 +02:00
import { ModelType , DeleteOptions } from '../BaseModel' ;
2021-06-15 18:17:12 +02:00
import { BaseItemEntity , NoteEntity } from '../services/database/types' ;
2021-01-22 19:41:11 +02:00
import Setting from './Setting' ;
import BaseModel from '../BaseModel' ;
import time from '../time' ;
import markdownUtils from '../markdownUtils' ;
import { _ } from '../locale' ;
2021-01-29 20:45:11 +02:00
import Database from '../database' ;
2021-01-23 17:51:19 +02:00
import ItemChange from './ItemChange' ;
2021-05-13 18:57:37 +02:00
import ShareService from '../services/share/ShareService' ;
2021-06-15 18:17:12 +02:00
import itemCanBeEncrypted from './utils/itemCanBeEncrypted' ;
2021-08-12 17:54:10 +02:00
import { getEncryptionEnabled } from '../services/synchronizer/syncInfoUtils' ;
2021-11-03 18:24:40 +02:00
import JoplinError from '../JoplinError' ;
2017-11-03 02:09:34 +02:00
const { sprintf } = require ( 'sprintf-js' ) ;
const moment = require ( 'moment' ) ;
2017-06-15 20:18:48 +02:00
2021-01-22 19:41:11 +02:00
export interface ItemsThatNeedDecryptionResult {
hasMore : boolean ;
items : any [ ] ;
}
2021-06-17 13:39:06 +02:00
export interface ItemThatNeedSync {
id : string ;
sync_time : number ;
type_ : ModelType ;
updated_time : number ;
encryption_applied : number ;
2021-11-03 18:24:40 +02:00
share_id : string ;
2021-06-17 13:39:06 +02:00
}
2021-06-17 12:24:02 +02:00
export interface ItemsThatNeedSyncResult {
hasMore : boolean ;
2021-06-17 13:39:06 +02:00
items : ItemThatNeedSync [ ] ;
2021-06-17 12:24:02 +02:00
neverSyncedItemIds : string [ ] ;
}
2021-09-10 20:05:47 +02:00
export interface EncryptedItemsStats {
encrypted : number ;
total : number ;
}
2021-01-22 19:41:11 +02:00
export default class BaseItem extends BaseModel {
public static encryptionService_ : any = null ;
public static revisionService_ : any = null ;
2021-05-13 18:57:37 +02:00
public static shareService_ : ShareService = null ;
2021-01-22 19:41:11 +02:00
// Also update:
// - itemsThatNeedSync()
// - syncedItems()
public static syncItemDefinitions_ : any [ ] = [
{ type : BaseModel . TYPE_NOTE , className : 'Note' } ,
{ type : BaseModel . TYPE_FOLDER , className : 'Folder' } ,
{ type : BaseModel . TYPE_RESOURCE , className : 'Resource' } ,
{ type : BaseModel . TYPE_TAG , className : 'Tag' } ,
{ type : BaseModel . TYPE_NOTE_TAG , className : 'NoteTag' } ,
{ type : BaseModel . TYPE_MASTER_KEY , className : 'MasterKey' } ,
{ type : BaseModel . TYPE_REVISION , className : 'Revision' } ,
] ;
public static SYNC_ITEM_LOCATION_LOCAL = 1 ;
public static SYNC_ITEM_LOCATION_REMOTE = 2 ;
2017-06-15 20:18:48 +02:00
static useUuid() {
return true ;
}
2017-12-13 20:57:40 +02:00
static encryptionSupported() {
return true ;
}
2021-01-22 19:41:11 +02:00
static loadClass ( className : string , classRef : any ) {
2017-07-06 21:48:17 +02:00
for ( let i = 0 ; i < BaseItem . syncItemDefinitions_ . length ; i ++ ) {
2022-07-23 09:31:32 +02:00
if ( BaseItem . syncItemDefinitions_ [ i ] . className === className ) {
2017-07-06 21:48:17 +02:00
BaseItem . syncItemDefinitions_ [ i ] . classRef = classRef ;
return ;
}
}
2019-09-19 23:51:18 +02:00
throw new Error ( ` Invalid class name: ${ className } ` ) ;
2017-07-06 21:48:17 +02:00
}
2021-01-22 19:41:11 +02:00
static async findUniqueItemTitle ( title : string , parentId : string = null ) {
2018-06-27 22:45:31 +02:00
let counter = 1 ;
let titleToTry = title ;
while ( true ) {
2020-06-28 19:00:51 +02:00
let item = null ;
if ( parentId !== null ) {
item = await this . loadByFields ( {
title : titleToTry ,
parent_id : parentId ,
} ) ;
} else {
item = await this . loadByField ( 'title' , titleToTry ) ;
}
2018-06-27 22:45:31 +02:00
if ( ! item ) return titleToTry ;
2019-09-19 23:51:18 +02:00
titleToTry = ` ${ title } ( ${ counter } ) ` ;
2018-06-27 22:45:31 +02:00
counter ++ ;
2019-09-19 23:51:18 +02:00
if ( counter >= 100 ) titleToTry = ` ${ title } ( ${ new Date ( ) . getTime ( ) } ) ` ;
2018-06-27 22:45:31 +02:00
if ( counter >= 1000 ) throw new Error ( 'Cannot find unique title' ) ;
}
}
2017-07-02 14:02:07 +02:00
// Need to dynamically load the classes like this to avoid circular dependencies
2021-01-22 19:41:11 +02:00
static getClass ( name : string ) {
2017-07-06 21:48:17 +02:00
for ( let i = 0 ; i < BaseItem . syncItemDefinitions_ . length ; i ++ ) {
2022-07-23 09:31:32 +02:00
if ( BaseItem . syncItemDefinitions_ [ i ] . className === name ) {
2017-12-28 21:57:21 +02:00
const classRef = BaseItem . syncItemDefinitions_ [ i ] . classRef ;
2019-09-19 23:51:18 +02:00
if ( ! classRef ) throw new Error ( ` Class has not been loaded: ${ name } ` ) ;
2017-07-06 21:48:17 +02:00
return BaseItem . syncItemDefinitions_ [ i ] . classRef ;
}
}
2019-09-19 23:51:18 +02:00
throw new Error ( ` Invalid class name: ${ name } ` ) ;
2017-07-10 20:09:58 +02:00
}
2021-01-22 19:41:11 +02:00
static getClassByItemType ( itemType : ModelType ) {
2017-08-20 16:29:18 +02:00
for ( let i = 0 ; i < BaseItem . syncItemDefinitions_ . length ; i ++ ) {
2022-07-23 09:31:32 +02:00
if ( BaseItem . syncItemDefinitions_ [ i ] . type === itemType ) {
2017-08-20 16:29:18 +02:00
return BaseItem . syncItemDefinitions_ [ i ] . classRef ;
}
}
2019-09-19 23:51:18 +02:00
throw new Error ( ` Invalid item type: ${ itemType } ` ) ;
2017-08-20 16:29:18 +02:00
}
2021-01-22 19:41:11 +02:00
static async syncedCount ( syncTarget : number ) {
2017-07-16 18:06:05 +02:00
const ItemClass = this . itemClass ( this . modelType ( ) ) ;
const itemType = ItemClass . modelType ( ) ;
// The fact that we don't check if the item_id still exist in the corresponding item table, means
// that the returned number might be innaccurate (for example if a sync operation was cancelled)
const sql = 'SELECT count(*) as total FROM sync_items WHERE sync_target = ? AND item_type = ?' ;
2019-07-29 15:43:53 +02:00
const r = await this . db ( ) . selectOne ( sql , [ syncTarget , itemType ] ) ;
2017-07-16 18:06:05 +02:00
return r . total ;
2017-07-02 14:02:07 +02:00
}
2021-01-29 20:45:11 +02:00
public static systemPath ( itemOrId : any , extension : string = null ) {
2019-07-29 15:43:53 +02:00
if ( extension === null ) extension = 'md' ;
2018-11-11 22:17:43 +02:00
2019-09-19 23:51:18 +02:00
if ( typeof itemOrId === 'string' ) return ` ${ itemOrId } . ${ extension } ` ;
else return ` ${ itemOrId . id } . ${ extension } ` ;
2017-06-15 20:18:48 +02:00
}
2021-01-22 19:41:11 +02:00
static isSystemPath ( path : string ) {
2017-07-18 22:03:07 +02:00
// 1b175bb38bba47baac22b0b47f778113.md
if ( ! path || ! path . length ) return false ;
2021-01-22 19:41:11 +02:00
let p : any = path . split ( '/' ) ;
2017-07-18 22:03:07 +02:00
p = p [ p . length - 1 ] ;
p = p . split ( '.' ) ;
2022-07-23 09:31:32 +02:00
if ( p . length !== 2 ) return false ;
return p [ 0 ] . length === 32 && p [ 1 ] === 'md' ;
2017-07-18 22:03:07 +02:00
}
2021-01-22 19:41:11 +02:00
static itemClass ( item : any ) : any {
2017-06-17 20:40:08 +02:00
if ( ! item ) throw new Error ( 'Item cannot be null' ) ;
2017-06-19 00:06:10 +02:00
if ( typeof item === 'object' ) {
if ( ! ( 'type_' in item ) ) throw new Error ( 'Item does not have a type_ property' ) ;
2017-07-02 14:02:07 +02:00
return this . itemClass ( item . type_ ) ;
2017-06-19 00:06:10 +02:00
} else {
2017-07-04 00:08:14 +02:00
for ( let i = 0 ; i < BaseItem . syncItemDefinitions_ . length ; i ++ ) {
2020-03-14 01:46:14 +02:00
const d = BaseItem . syncItemDefinitions_ [ i ] ;
2022-07-23 09:31:32 +02:00
if ( Number ( item ) === d . type ) return this . getClass ( d . className ) ;
2017-07-04 00:08:14 +02:00
}
2019-09-19 23:51:18 +02:00
throw new JoplinError ( ` Unknown type: ${ item } ` , 'unknownItemType' ) ;
2017-06-19 00:06:10 +02:00
}
2017-06-17 20:40:08 +02:00
}
2017-07-01 12:30:50 +02:00
// Returns the IDs of the items that have been synced at least once
2021-01-22 19:41:11 +02:00
static async syncedItemIds ( syncTarget : number ) {
2017-07-16 14:53:59 +02:00
if ( ! syncTarget ) throw new Error ( 'No syncTarget specified' ) ;
2020-03-14 01:46:14 +02:00
const temp = await this . db ( ) . selectAll ( 'SELECT item_id FROM sync_items WHERE sync_time > 0 AND sync_target = ?' , [ syncTarget ] ) ;
const output = [ ] ;
2017-07-23 16:11:44 +02:00
for ( let i = 0 ; i < temp . length ; i ++ ) {
output . push ( temp [ i ] . item_id ) ;
}
return output ;
2017-07-01 12:30:50 +02:00
}
2021-01-22 19:41:11 +02:00
static async allSyncItems ( syncTarget : number ) {
2019-05-12 02:15:52 +02:00
const output = await this . db ( ) . selectAll ( 'SELECT * FROM sync_items WHERE sync_target = ?' , [ syncTarget ] ) ;
return output ;
}
2021-01-22 19:41:11 +02:00
static pathToId ( path : string ) {
2020-03-14 01:46:14 +02:00
const p = path . split ( '/' ) ;
const s = p [ p . length - 1 ] . split ( '.' ) ;
2021-01-22 19:41:11 +02:00
let name : any = s [ 0 ] ;
2019-05-11 13:08:28 +02:00
if ( ! name ) return name ;
2019-05-11 18:35:39 +02:00
name = name . split ( '-' ) ;
2019-05-11 13:08:28 +02:00
return name [ name . length - 1 ] ;
2017-06-15 20:18:48 +02:00
}
2021-01-22 19:41:11 +02:00
static loadItemByPath ( path : string ) {
2017-07-02 12:34:07 +02:00
return this . loadItemById ( this . pathToId ( path ) ) ;
}
2021-01-22 19:41:11 +02:00
static async loadItemById ( id : string ) {
2020-03-14 01:46:14 +02:00
const classes = this . syncItemClassNames ( ) ;
2017-07-02 20:38:34 +02:00
for ( let i = 0 ; i < classes . length ; i ++ ) {
2020-03-14 01:46:14 +02:00
const item = await this . getClass ( classes [ i ] ) . load ( id ) ;
2017-06-15 20:18:48 +02:00
if ( item ) return item ;
2017-07-02 20:38:34 +02:00
}
return null ;
2017-06-15 20:18:48 +02:00
}
2021-01-22 19:41:11 +02:00
static async loadItemsByIds ( ids : string [ ] ) {
2020-09-24 15:30:20 +02:00
if ( ! ids . length ) return [ ] ;
2019-07-17 23:50:12 +02:00
const classes = this . syncItemClassNames ( ) ;
2021-01-22 19:41:11 +02:00
let output : any [ ] = [ ] ;
2019-07-17 23:50:12 +02:00
for ( let i = 0 ; i < classes . length ; i ++ ) {
const ItemClass = this . getClass ( classes [ i ] ) ;
2019-09-19 23:51:18 +02:00
const sql = ` SELECT * FROM ${ ItemClass . tableName ( ) } WHERE id IN (" ${ ids . join ( '","' ) } ") ` ;
2019-07-17 23:50:12 +02:00
const models = await ItemClass . modelSelectAll ( sql ) ;
output = output . concat ( models ) ;
}
return output ;
}
2021-01-22 19:41:11 +02:00
static loadItemByField ( itemType : number , field : string , value : any ) {
2020-03-14 01:46:14 +02:00
const ItemClass = this . itemClass ( itemType ) ;
2017-06-25 09:52:25 +02:00
return ItemClass . loadByField ( field , value ) ;
}
2021-01-22 19:41:11 +02:00
static loadItem ( itemType : ModelType , id : string ) {
2020-03-14 01:46:14 +02:00
const ItemClass = this . itemClass ( itemType ) ;
2017-06-25 09:52:25 +02:00
return ItemClass . load ( id ) ;
}
2021-01-22 19:41:11 +02:00
static deleteItem ( itemType : ModelType , id : string ) {
2020-03-14 01:46:14 +02:00
const ItemClass = this . itemClass ( itemType ) ;
2017-06-25 09:52:25 +02:00
return ItemClass . delete ( id ) ;
}
2021-10-15 13:38:14 +02:00
static async delete ( id : string , options : DeleteOptions = null ) {
2017-07-11 20:17:23 +02:00
return this . batchDelete ( [ id ] , options ) ;
}
2021-10-14 17:34:53 +02:00
static async batchDelete ( ids : string [ ] , options : DeleteOptions = null ) {
2019-05-06 22:35:29 +02:00
if ( ! options ) options = { } ;
2017-07-04 00:08:14 +02:00
let trackDeleted = true ;
2017-07-03 21:50:45 +02:00
if ( options && options . trackDeleted !== null && options . trackDeleted !== undefined ) trackDeleted = options . trackDeleted ;
2017-07-18 21:27:10 +02:00
// Don't create a deleted_items entry when conflicted notes are deleted
// since no other client have (or should have) them.
2021-01-29 20:45:11 +02:00
let conflictNoteIds : string [ ] = [ ] ;
2022-07-23 09:31:32 +02:00
if ( this . modelType ( ) === BaseModel . TYPE_NOTE ) {
2019-09-19 23:51:18 +02:00
const conflictNotes = await this . db ( ) . selectAll ( ` SELECT id FROM notes WHERE id IN (" ${ ids . join ( '","' ) } ") AND is_conflict = 1 ` ) ;
2021-01-22 19:41:11 +02:00
conflictNoteIds = conflictNotes . map ( ( n : NoteEntity ) = > {
2019-07-29 15:43:53 +02:00
return n . id ;
} ) ;
2017-07-18 21:27:10 +02:00
}
2017-07-11 20:17:23 +02:00
await super . batchDelete ( ids , options ) ;
2017-07-03 21:50:45 +02:00
if ( trackDeleted ) {
2017-07-24 20:58:11 +02:00
const syncTargetIds = Setting . enumOptionValues ( 'sync.target' ) ;
2020-03-14 01:46:14 +02:00
const queries = [ ] ;
const now = time . unixMs ( ) ;
2017-07-11 20:17:23 +02:00
for ( let i = 0 ; i < ids . length ; i ++ ) {
2017-07-18 21:27:10 +02:00
if ( conflictNoteIds . indexOf ( ids [ i ] ) >= 0 ) continue ;
2017-07-19 21:15:55 +02:00
// For each deleted item, for each sync target, we need to add an entry in deleted_items.
// That way, each target can later delete the remote item.
for ( let j = 0 ; j < syncTargetIds . length ; j ++ ) {
queries . push ( {
sql : 'INSERT INTO deleted_items (item_type, item_id, deleted_time, sync_target) VALUES (?, ?, ?, ?)' ,
params : [ this . modelType ( ) , ids [ i ] , now , syncTargetIds [ j ] ] ,
} ) ;
}
2017-07-11 20:17:23 +02:00
}
await this . db ( ) . transactionExecBatch ( queries ) ;
2017-07-03 21:50:45 +02:00
}
}
2018-01-15 20:35:39 +02:00
// Note: Currently, once a deleted_items entry has been processed, it is removed from the database. In practice it means that
2018-01-15 20:10:14 +02:00
// the following case will not work as expected:
// - Client 1 creates a note and sync with target 1 and 2
// - Client 2 sync with target 1
// - Client 2 deletes note and sync with target 1
// - Client 1 syncs with target 1 only (note is deleted from local machine, as expected)
// - Client 1 syncs with target 2 only => the note is *not* deleted from target 2 because no information
// that it was previously deleted exist (deleted_items entry has been deleted).
// The solution would be to permanently store the list of deleted items on each client.
2021-01-22 19:41:11 +02:00
static deletedItems ( syncTarget : number ) {
2017-07-19 21:15:55 +02:00
return this . db ( ) . selectAll ( 'SELECT * FROM deleted_items WHERE sync_target = ?' , [ syncTarget ] ) ;
2017-07-03 21:50:45 +02:00
}
2021-01-22 19:41:11 +02:00
static async deletedItemCount ( syncTarget : number ) {
2020-03-14 01:46:14 +02:00
const r = await this . db ( ) . selectOne ( 'SELECT count(*) as total FROM deleted_items WHERE sync_target = ?' , [ syncTarget ] ) ;
2017-07-10 21:16:59 +02:00
return r [ 'total' ] ;
}
2021-01-22 19:41:11 +02:00
static remoteDeletedItem ( syncTarget : number , itemId : string ) {
2017-07-19 21:15:55 +02:00
return this . db ( ) . exec ( 'DELETE FROM deleted_items WHERE item_id = ? AND sync_target = ?' , [ itemId , syncTarget ] ) ;
2017-07-03 21:50:45 +02:00
}
2021-01-22 19:41:11 +02:00
static serialize_format ( propName : string , propValue : any ) {
2017-08-20 22:11:32 +02:00
if ( [ 'created_time' , 'updated_time' , 'sync_time' , 'user_updated_time' , 'user_created_time' ] . indexOf ( propName ) >= 0 ) {
2017-06-15 20:18:48 +02:00
if ( ! propValue ) return '' ;
2019-10-11 19:49:47 +02:00
propValue = ` ${ moment . unix ( propValue / 1000 ) . utc ( ) . format ( 'YYYY-MM-DDTHH:mm:ss.SSS' ) } Z ` ;
2019-05-06 22:35:29 +02:00
} else if ( [ 'title_diff' , 'body_diff' ] . indexOf ( propName ) >= 0 ) {
if ( ! propValue ) return '' ;
propValue = JSON . stringify ( propValue ) ;
2017-06-15 20:18:48 +02:00
} else if ( propValue === null || propValue === undefined ) {
propValue = '' ;
2020-10-28 19:21:23 +02:00
} else {
propValue = ` ${ propValue } ` ;
2017-06-15 20:18:48 +02:00
}
2020-11-23 18:25:57 +02:00
if ( propName === 'body' ) return propValue ;
return propValue
. replace ( /\\n/g , '\\\\n' )
. replace ( /\\r/g , '\\\\r' )
. replace ( /\n/g , '\\n' )
. replace ( /\r/g , '\\r' ) ;
2017-06-15 20:18:48 +02:00
}
2021-01-22 19:41:11 +02:00
static unserialize_format ( type : ModelType , propName : string , propValue : any ) {
2022-07-23 09:31:32 +02:00
if ( propName [ propName . length - 1 ] === '_' ) return propValue ; // Private property
2017-06-15 20:18:48 +02:00
2018-10-31 02:35:57 +02:00
const ItemClass = this . itemClass ( type ) ;
2017-06-19 00:06:10 +02:00
2020-03-15 14:07:01 +02:00
if ( [ 'title_diff' , 'body_diff' ] . indexOf ( propName ) >= 0 ) {
2019-05-06 22:35:29 +02:00
if ( ! propValue ) return '' ;
propValue = JSON . parse ( propValue ) ;
2020-04-30 17:56:47 +02:00
} else if ( [ 'longitude' , 'latitude' , 'altitude' ] . indexOf ( propName ) >= 0 ) {
const places = ( propName === 'altitude' ) ? 4 : 8 ;
propValue = Number ( propValue ) . toFixed ( places ) ;
2017-06-15 20:18:48 +02:00
} else {
2020-03-15 14:07:01 +02:00
if ( [ 'created_time' , 'updated_time' , 'user_created_time' , 'user_updated_time' ] . indexOf ( propName ) >= 0 ) {
propValue = ( ! propValue ) ? '0' : moment ( propValue , 'YYYY-MM-DDTHH:mm:ss.SSSZ' ) . format ( 'x' ) ;
}
2017-06-19 00:06:10 +02:00
propValue = Database . formatValue ( ItemClass . fieldType ( propName ) , propValue ) ;
2017-06-15 20:18:48 +02:00
}
2020-11-23 18:25:57 +02:00
if ( propName === 'body' ) return propValue ;
return typeof propValue === 'string' ? propValue
. replace ( /\\n/g , '\n' )
. replace ( /\\r/g , '\r' )
. replace ( /\\\n/g , '\\n' )
. replace ( /\\\r/g , '\\r' )
: propValue ;
2017-06-15 20:18:48 +02:00
}
2021-01-22 19:41:11 +02:00
static async serialize ( item : any , shownKeys : any [ ] = null ) {
2018-10-07 21:11:33 +02:00
if ( shownKeys === null ) {
shownKeys = this . itemClass ( item ) . fieldNames ( ) ;
shownKeys . push ( 'type_' ) ;
}
2017-06-29 22:52:52 +02:00
item = this . filter ( item ) ;
2021-01-22 19:41:11 +02:00
const output : any = { } ;
2017-06-15 20:18:48 +02:00
2017-07-13 20:47:31 +02:00
if ( 'title' in item && shownKeys . indexOf ( 'title' ) >= 0 ) {
2017-07-13 23:26:45 +02:00
output . title = item . title ;
2017-07-03 22:38:26 +02:00
}
2017-07-13 20:47:31 +02:00
if ( 'body' in item && shownKeys . indexOf ( 'body' ) >= 0 ) {
2017-07-13 23:26:45 +02:00
output . body = item . body ;
2017-07-03 22:38:26 +02:00
}
2017-07-13 23:26:45 +02:00
output . props = [ ] ;
2017-06-15 20:18:48 +02:00
for ( let i = 0 ; i < shownKeys . length ; i ++ ) {
2017-07-02 20:38:34 +02:00
let key = shownKeys [ i ] ;
2022-07-23 09:31:32 +02:00
if ( key === 'title' || key === 'body' ) continue ;
2017-07-13 20:47:31 +02:00
2017-07-02 20:38:34 +02:00
let value = null ;
if ( typeof key === 'function' ) {
2020-03-14 01:46:14 +02:00
const r = await key ( ) ;
2017-07-02 20:38:34 +02:00
key = r . key ;
value = r . value ;
} else {
value = this . serialize_format ( key , item [ key ] ) ;
}
2019-09-19 23:51:18 +02:00
output . props . push ( ` ${ key } : ${ value } ` ) ;
2017-06-15 20:18:48 +02:00
}
2020-03-14 01:46:14 +02:00
const temp = [ ] ;
2017-07-13 23:26:45 +02:00
2019-07-29 15:43:53 +02:00
if ( typeof output . title === 'string' ) temp . push ( output . title ) ;
2017-07-13 23:26:45 +02:00
if ( output . body ) temp . push ( output . body ) ;
2019-07-29 15:43:53 +02:00
if ( output . props . length ) temp . push ( output . props . join ( '\n' ) ) ;
2017-07-13 23:26:45 +02:00
2019-07-29 15:43:53 +02:00
return temp . join ( '\n\n' ) ;
2017-06-15 20:18:48 +02:00
}
2017-12-14 20:53:08 +02:00
static encryptionService() {
if ( ! this . encryptionService_ ) throw new Error ( 'BaseItem.encryptionService_ is not set!!' ) ;
return this . encryptionService_ ;
}
2019-05-06 22:35:29 +02:00
static revisionService() {
if ( ! this . revisionService_ ) throw new Error ( 'BaseItem.revisionService_ is not set!!' ) ;
return this . revisionService_ ;
}
2021-05-13 18:57:37 +02:00
protected static shareService() {
if ( ! this . shareService_ ) throw new Error ( 'BaseItem.shareService_ is not set!!' ) ;
return this . shareService_ ;
}
2021-06-18 18:17:25 +02:00
public static async serializeForSync ( item : BaseItemEntity ) : Promise < string > {
2017-12-13 20:57:40 +02:00
const ItemClass = this . itemClass ( item ) ;
2020-03-14 01:46:14 +02:00
const shownKeys = ItemClass . fieldNames ( ) ;
2018-10-07 21:18:43 +02:00
shownKeys . push ( 'type_' ) ;
2021-11-03 18:24:40 +02:00
const share = item . share_id ? await this . shareService ( ) . shareById ( item . share_id ) : null ;
2018-10-07 21:18:43 +02:00
const serialized = await ItemClass . serialize ( item , shownKeys ) ;
2022-07-12 12:28:48 +02:00
if ( ! getEncryptionEnabled ( ) || ! ItemClass . encryptionSupported ( ) || ! itemCanBeEncrypted ( item , share ) ) {
2018-01-02 21:17:14 +02:00
// Normally not possible since itemsThatNeedSync should only return decrypted items
2019-07-29 15:43:53 +02:00
if ( item . encryption_applied ) throw new JoplinError ( 'Item is encrypted but encryption is currently disabled' , 'cannotSyncEncrypted' ) ;
2017-12-20 21:45:25 +02:00
return serialized ;
}
2019-07-29 15:43:53 +02:00
if ( item . encryption_applied ) {
2021-01-22 19:41:11 +02:00
const e : any = new Error ( 'Trying to encrypt item that is already encrypted' ) ;
2019-07-29 15:43:53 +02:00
e . code = 'cannotEncryptEncrypted' ;
throw e ;
}
2017-12-13 20:57:40 +02:00
2020-03-04 18:53:45 +02:00
let cipherText = null ;
try {
2021-11-03 18:24:40 +02:00
cipherText = await this . encryptionService ( ) . encryptString ( serialized , {
masterKeyId : share && share . master_key_id ? share . master_key_id : '' ,
} ) ;
2020-03-04 18:53:45 +02:00
} catch ( error ) {
const msg = [ ` Could not encrypt item ${ item . id } ` ] ;
if ( error && error . message ) msg . push ( error . message ) ;
2020-03-06 21:11:51 +02:00
const newError = new Error ( msg . join ( ': ' ) ) ;
newError . stack = error . stack ;
throw newError ;
2020-03-04 18:53:45 +02:00
}
2017-12-13 20:57:40 +02:00
2017-12-14 02:23:32 +02:00
// List of keys that won't be encrypted - mostly foreign keys required to link items
// with each others and timestamp required for synchronisation.
2021-05-13 18:57:37 +02:00
const keepKeys = [ 'id' , 'note_id' , 'tag_id' , 'parent_id' , 'share_id' , 'updated_time' , 'type_' ] ;
2021-01-22 19:41:11 +02:00
const reducedItem : any = { } ;
2017-12-13 20:57:40 +02:00
2017-12-21 21:06:08 +02:00
for ( let i = 0 ; i < keepKeys . length ; i ++ ) {
const n = keepKeys [ i ] ;
if ( ! item . hasOwnProperty ( n ) ) continue ;
2021-05-13 18:57:37 +02:00
reducedItem [ n ] = ( item as any ) [ n ] ;
2017-12-13 20:57:40 +02:00
}
2017-12-14 19:58:10 +02:00
reducedItem . encryption_applied = 1 ;
2017-12-13 20:57:40 +02:00
reducedItem . encryption_cipher_text = cipherText ;
2019-07-29 15:43:53 +02:00
return ItemClass . serialize ( reducedItem ) ;
2017-12-13 20:57:40 +02:00
}
2021-01-22 19:41:11 +02:00
static async decrypt ( item : any ) {
2019-09-19 23:51:18 +02:00
if ( ! item . encryption_cipher_text ) throw new Error ( ` Item is not encrypted: ${ item . id } ` ) ;
2017-12-13 20:57:40 +02:00
const ItemClass = this . itemClass ( item ) ;
2017-12-14 20:53:08 +02:00
const plainText = await this . encryptionService ( ) . decryptString ( item . encryption_cipher_text ) ;
2017-12-13 20:57:40 +02:00
// Note: decryption does not count has a change, so don't update any timestamp
const plainItem = await ItemClass . unserialize ( plainText ) ;
plainItem . updated_time = item . updated_time ;
plainItem . encryption_cipher_text = '' ;
2017-12-14 19:58:10 +02:00
plainItem . encryption_applied = 0 ;
2019-05-28 19:10:21 +02:00
return ItemClass . save ( plainItem , { autoTimestamp : false , changeSource : ItemChange.SOURCE_DECRYPTION } ) ;
2017-12-13 20:57:40 +02:00
}
2021-01-22 19:41:11 +02:00
static async unserialize ( content : string ) {
2020-03-14 01:46:14 +02:00
const lines = content . split ( '\n' ) ;
2021-01-22 19:41:11 +02:00
let output : any = { } ;
2017-06-15 20:18:48 +02:00
let state = 'readingProps' ;
2021-01-22 19:41:11 +02:00
const body : string [ ] = [ ] ;
2017-07-05 20:31:11 +02:00
2017-06-15 20:18:48 +02:00
for ( let i = lines . length - 1 ; i >= 0 ; i -- ) {
let line = lines [ i ] ;
2022-07-23 09:31:32 +02:00
if ( state === 'readingProps' ) {
2017-06-15 20:18:48 +02:00
line = line . trim ( ) ;
2022-07-23 09:31:32 +02:00
if ( line === '' ) {
2017-06-15 20:18:48 +02:00
state = 'readingBody' ;
continue ;
}
2020-03-14 01:46:14 +02:00
const p = line . indexOf ( ':' ) ;
2019-09-19 23:51:18 +02:00
if ( p < 0 ) throw new Error ( ` Invalid property format: ${ line } : ${ content } ` ) ;
2020-03-14 01:46:14 +02:00
const key = line . substr ( 0 , p ) . trim ( ) ;
const value = line . substr ( p + 1 ) . trim ( ) ;
2017-06-19 00:06:10 +02:00
output [ key ] = value ;
2022-07-23 09:31:32 +02:00
} else if ( state === 'readingBody' ) {
2017-06-15 20:18:48 +02:00
body . splice ( 0 , 0 , line ) ;
}
}
2019-07-29 15:43:53 +02:00
2019-09-19 23:51:18 +02:00
if ( ! output . type_ ) throw new Error ( ` Missing required property: type_: ${ content } ` ) ;
2017-06-19 00:06:10 +02:00
output . type_ = Number ( output . type_ ) ;
2017-07-03 22:38:26 +02:00
if ( body . length ) {
2020-03-14 01:46:14 +02:00
const title = body . splice ( 0 , 2 ) ;
2017-07-03 22:38:26 +02:00
output . title = title [ 0 ] ;
}
2019-07-29 15:43:53 +02:00
if ( output . type_ === BaseModel . TYPE_NOTE ) output . body = body . join ( '\n' ) ;
2017-06-15 20:18:48 +02:00
2018-10-31 02:35:57 +02:00
const ItemClass = this . itemClass ( output . type_ ) ;
output = ItemClass . removeUnknownFields ( output ) ;
2020-03-14 01:46:14 +02:00
for ( const n in output ) {
2017-06-19 00:06:10 +02:00
if ( ! output . hasOwnProperty ( n ) ) continue ;
2017-07-02 17:46:03 +02:00
output [ n ] = await this . unserialize_format ( output . type_ , n , output [ n ] ) ;
2017-06-19 00:06:10 +02:00
}
2017-06-15 20:18:48 +02:00
return output ;
}
2021-09-10 20:05:47 +02:00
public static async encryptedItemsStats ( ) : Promise < EncryptedItemsStats > {
2017-12-24 10:36:31 +02:00
const classNames = this . encryptableItemClassNames ( ) ;
let encryptedCount = 0 ;
let totalCount = 0 ;
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const ItemClass = this . getClass ( classNames [ i ] ) ;
encryptedCount += await ItemClass . count ( { where : 'encryption_applied = 1' } ) ;
totalCount += await ItemClass . count ( ) ;
}
return {
encrypted : encryptedCount ,
total : totalCount ,
} ;
}
static async encryptedItemsCount() {
const classNames = this . encryptableItemClassNames ( ) ;
let output = 0 ;
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const className = classNames [ i ] ;
const ItemClass = this . getClass ( className ) ;
const count = await ItemClass . count ( { where : 'encryption_applied = 1' } ) ;
output += count ;
}
return output ;
}
2017-12-20 21:45:25 +02:00
static async hasEncryptedItems() {
const classNames = this . encryptableItemClassNames ( ) ;
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const className = classNames [ i ] ;
const ItemClass = this . getClass ( className ) ;
const count = await ItemClass . count ( { where : 'encryption_applied = 1' } ) ;
if ( count ) return true ;
}
return false ;
}
2021-01-22 19:41:11 +02:00
static async itemsThatNeedDecryption ( exclusions : string [ ] = [ ] , limit = 100 ) : Promise < ItemsThatNeedDecryptionResult > {
2017-12-14 20:53:08 +02:00
const classNames = this . encryptableItemClassNames ( ) ;
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const className = classNames [ i ] ;
const ItemClass = this . getClass ( className ) ;
2019-05-22 16:56:07 +02:00
let whereSql = [ 'encryption_applied = 1' ] ;
if ( className === 'Resource' ) {
const blobDownloadedButEncryptedSql = 'encryption_blob_encrypted = 1 AND id IN (SELECT resource_id FROM resource_local_states WHERE fetch_status = 2))' ;
2019-09-19 23:51:18 +02:00
whereSql = [ ` (encryption_applied = 1 OR ( ${ blobDownloadedButEncryptedSql } ) ` ] ;
2019-05-22 16:56:07 +02:00
}
2019-07-29 15:43:53 +02:00
2019-09-19 23:51:18 +02:00
if ( exclusions . length ) whereSql . push ( ` id NOT IN (" ${ exclusions . join ( '","' ) } ") ` ) ;
2017-12-14 20:53:08 +02:00
2019-07-29 15:43:53 +02:00
const sql = sprintf (
`
2017-12-14 20:53:08 +02:00
SELECT *
FROM % s
WHERE % s
LIMIT % d
` ,
this . db ( ) . escapeField ( ItemClass . tableName ( ) ) ,
whereSql . join ( ' AND ' ) ,
limit
) ;
const items = await ItemClass . modelSelectAll ( sql ) ;
if ( i >= classNames . length - 1 ) {
return { hasMore : items.length >= limit , items : items } ;
} else {
if ( items . length ) return { hasMore : true , items : items } ;
}
}
throw new Error ( 'Unreachable' ) ;
}
2021-10-05 18:47:38 +02:00
public static async itemHasBeenSynced ( itemId : string ) : Promise < boolean > {
const r = await this . db ( ) . selectOne ( 'SELECT item_id FROM sync_items WHERE item_id = ?' , [ itemId ] ) ;
return ! ! r ;
}
2021-06-17 12:24:02 +02:00
public static async itemsThatNeedSync ( syncTarget : number , limit = 100 ) : Promise < ItemsThatNeedSyncResult > {
2021-08-12 17:54:10 +02:00
// Although we keep the master keys in the database, we no longer sync them
const classNames = this . syncItemClassNames ( ) . filter ( n = > n !== 'MasterKey' ) ;
2017-07-16 14:53:59 +02:00
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const className = classNames [ i ] ;
const ItemClass = this . getClass ( className ) ;
2020-03-14 01:46:14 +02:00
const fieldNames = ItemClass . fieldNames ( 'items' ) ;
2017-07-19 21:15:55 +02:00
// // NEVER SYNCED:
// 'SELECT * FROM [ITEMS] WHERE id NOT INT (SELECT item_id FROM sync_items WHERE sync_target = ?)'
// // CHANGED:
// 'SELECT * FROM [ITEMS] items JOIN sync_items s ON s.item_id = items.id WHERE sync_target = ? AND'
2017-07-16 14:53:59 +02:00
2021-01-22 19:41:11 +02:00
let extraWhere : any = [ ] ;
2022-07-23 09:31:32 +02:00
if ( className === 'Note' ) extraWhere . push ( 'is_conflict = 0' ) ;
if ( className === 'Resource' ) extraWhere . push ( 'encryption_blob_encrypted = 0' ) ;
2018-01-02 21:17:14 +02:00
if ( ItemClass . encryptionSupported ( ) ) extraWhere . push ( 'encryption_applied = 0' ) ;
2019-09-19 23:51:18 +02:00
extraWhere = extraWhere . length ? ` AND ${ extraWhere . join ( ' AND ' ) } ` : '' ;
2017-07-18 00:22:22 +02:00
2017-07-19 21:15:55 +02:00
// First get all the items that have never been synced under this sync target
2020-03-13 19:42:50 +02:00
//
// We order them by date descending so that latest modified notes go first.
// In most case it doesn't make a big difference, but when re-syncing the whole
// data set it does. In that case it means the recent notes, those that are likely
// to be modified again, will be synced first, thus avoiding potential conflicts.
2017-07-19 21:15:55 +02:00
2020-03-14 01:46:14 +02:00
const sql = sprintf ( `
2017-07-19 21:15:55 +02:00
SELECT % s
FROM % s items
WHERE id NOT IN (
SELECT item_id FROM sync_items WHERE sync_target = % d
)
% s
2020-03-13 19:42:50 +02:00
ORDER BY items . updated_time DESC
2017-07-16 14:53:59 +02:00
LIMIT % d
` ,
2020-03-13 19:42:50 +02:00
this . db ( ) . escapeFields ( fieldNames ) ,
this . db ( ) . escapeField ( ItemClass . tableName ( ) ) ,
Number ( syncTarget ) ,
extraWhere ,
limit
2019-07-29 15:43:53 +02:00
) ;
2017-07-16 14:53:59 +02:00
2020-03-14 01:46:14 +02:00
const neverSyncedItem = await ItemClass . modelSelectAll ( sql ) ;
2017-07-19 21:15:55 +02:00
// Secondly get the items that have been synced under this sync target but that have been changed since then
const newLimit = limit - neverSyncedItem . length ;
let changedItems = [ ] ;
if ( newLimit > 0 ) {
fieldNames . push ( 'sync_time' ) ;
2020-03-14 01:46:14 +02:00
const sql = sprintf (
2019-07-29 15:43:53 +02:00
`
2017-07-19 21:15:55 +02:00
SELECT % s FROM % s items
JOIN sync_items s ON s . item_id = items . id
WHERE sync_target = % d
2017-12-14 23:12:02 +02:00
AND ( s . sync_time < items . updated_time OR force_sync = 1 )
2017-12-05 01:38:09 +02:00
AND s . sync_disabled = 0
2017-07-19 21:15:55 +02:00
% s
2020-03-13 19:42:50 +02:00
ORDER BY items . updated_time DESC
2017-07-19 21:15:55 +02:00
LIMIT % d
` ,
2019-07-29 15:43:53 +02:00
this . db ( ) . escapeFields ( fieldNames ) ,
this . db ( ) . escapeField ( ItemClass . tableName ( ) ) ,
Number ( syncTarget ) ,
extraWhere ,
newLimit
) ;
2017-07-19 21:15:55 +02:00
changedItems = await ItemClass . modelSelectAll ( sql ) ;
}
2021-06-17 12:24:02 +02:00
const neverSyncedItemIds = neverSyncedItem . map ( ( it : any ) = > it . id ) ;
2017-07-19 21:15:55 +02:00
const items = neverSyncedItem . concat ( changedItems ) ;
2017-07-16 14:53:59 +02:00
if ( i >= classNames . length - 1 ) {
2021-06-17 12:24:02 +02:00
return { hasMore : items.length >= limit , items : items , neverSyncedItemIds } ;
2017-07-16 14:53:59 +02:00
} else {
2021-06-17 12:24:02 +02:00
if ( items . length ) return { hasMore : true , items : items , neverSyncedItemIds } ;
2017-07-16 14:53:59 +02:00
}
}
throw new Error ( 'Unreachable' ) ;
2017-06-19 00:06:10 +02:00
}
2021-08-12 17:54:10 +02:00
static syncItemClassNames ( ) : string [ ] {
2021-01-22 19:41:11 +02:00
return BaseItem . syncItemDefinitions_ . map ( ( def : any ) = > {
2017-07-04 00:08:14 +02:00
return def . className ;
} ) ;
}
2017-12-14 20:53:08 +02:00
static encryptableItemClassNames() {
const temp = this . syncItemClassNames ( ) ;
2020-03-14 01:46:14 +02:00
const output = [ ] ;
2017-12-14 20:53:08 +02:00
for ( let i = 0 ; i < temp . length ; i ++ ) {
if ( temp [ i ] === 'MasterKey' ) continue ;
output . push ( temp [ i ] ) ;
}
return output ;
}
2021-05-03 12:55:38 +02:00
public static syncItemTypes ( ) : ModelType [ ] {
2021-01-22 19:41:11 +02:00
return BaseItem . syncItemDefinitions_ . map ( ( def : any ) = > {
2017-08-20 16:29:18 +02:00
return def . type ;
} ) ;
}
2021-01-22 19:41:11 +02:00
static modelTypeToClassName ( type : number ) {
2017-07-14 20:02:45 +02:00
for ( let i = 0 ; i < BaseItem . syncItemDefinitions_ . length ; i ++ ) {
2022-07-23 09:31:32 +02:00
if ( BaseItem . syncItemDefinitions_ [ i ] . type === type ) return BaseItem . syncItemDefinitions_ [ i ] . className ;
2017-07-14 20:02:45 +02:00
}
2019-09-19 23:51:18 +02:00
throw new Error ( ` Invalid type: ${ type } ` ) ;
2017-07-14 20:02:45 +02:00
}
2021-01-22 19:41:11 +02:00
static async syncDisabledItems ( syncTargetId : number ) {
2017-12-05 21:21:01 +02:00
const rows = await this . db ( ) . selectAll ( 'SELECT * FROM sync_items WHERE sync_disabled = 1 AND sync_target = ?' , [ syncTargetId ] ) ;
2020-03-14 01:46:14 +02:00
const output = [ ] ;
2017-12-05 01:38:09 +02:00
for ( let i = 0 ; i < rows . length ; i ++ ) {
2019-05-12 02:15:52 +02:00
const row = rows [ i ] ;
const item = await this . loadItem ( row . item_type , row . item_id ) ;
if ( row . item_location === BaseItem . SYNC_ITEM_LOCATION_LOCAL && ! item ) continue ; // The referenced item no longer exist
2017-12-05 01:38:09 +02:00
output . push ( {
2019-05-12 02:15:52 +02:00
syncInfo : row ,
location : row.item_location ,
2017-12-05 01:38:09 +02:00
item : item ,
} ) ;
}
return output ;
}
2021-01-22 19:41:11 +02:00
static updateSyncTimeQueries ( syncTarget : number , item : any , syncTime : number , syncDisabled = false , syncDisabledReason = '' , itemLocation : number = null ) {
2017-07-16 14:53:59 +02:00
const itemType = item . type_ ;
const itemId = item . id ;
2019-05-06 22:35:29 +02:00
if ( ! itemType || ! itemId || syncTime === undefined ) throw new Error ( sprintf ( 'Invalid parameters in updateSyncTimeQueries(): %d, %s, %d' , syncTarget , JSON . stringify ( item ) , syncTime ) ) ;
2017-07-16 14:53:59 +02:00
2019-05-12 02:15:52 +02:00
if ( itemLocation === null ) itemLocation = BaseItem . SYNC_ITEM_LOCATION_LOCAL ;
2017-07-16 14:53:59 +02:00
return [
{
sql : 'DELETE FROM sync_items WHERE sync_target = ? AND item_type = ? AND item_id = ?' ,
params : [ syncTarget , itemType , itemId ] ,
} ,
{
2019-05-12 02:15:52 +02:00
sql : 'INSERT INTO sync_items (sync_target, item_type, item_id, item_location, sync_time, sync_disabled, sync_disabled_reason) VALUES (?, ?, ?, ?, ?, ?, ?)' ,
2019-09-19 23:51:18 +02:00
params : [ syncTarget , itemType , itemId , itemLocation , syncTime , syncDisabled ? 1 : 0 , ` ${ syncDisabledReason } ` ] ,
2019-07-29 15:43:53 +02:00
} ,
2017-07-16 14:53:59 +02:00
] ;
}
2021-01-22 19:41:11 +02:00
static async saveSyncTime ( syncTarget : number , item : any , syncTime : number ) {
2017-07-16 14:53:59 +02:00
const queries = this . updateSyncTimeQueries ( syncTarget , item , syncTime ) ;
return this . db ( ) . transactionExecBatch ( queries ) ;
}
2021-01-22 19:41:11 +02:00
static async saveSyncDisabled ( syncTargetId : number , item : any , syncDisabledReason : string , itemLocation : number = null ) {
2017-12-05 01:38:09 +02:00
const syncTime = 'sync_time' in item ? item.sync_time : 0 ;
2019-05-12 02:15:52 +02:00
const queries = this . updateSyncTimeQueries ( syncTargetId , item , syncTime , true , syncDisabledReason , itemLocation ) ;
2017-12-05 01:38:09 +02:00
return this . db ( ) . transactionExecBatch ( queries ) ;
}
2021-05-15 20:56:49 +02:00
public static async saveSyncEnabled ( itemType : ModelType , itemId : string ) {
await this . db ( ) . exec ( 'DELETE FROM sync_items WHERE item_type = ? AND item_id = ?' , [ itemType , itemId ] ) ;
}
2017-11-21 20:48:50 +02:00
// When an item is deleted, its associated sync_items data is not immediately deleted for
// performance reason. So this function is used to look for these remaining sync_items and
// delete them.
2017-07-16 14:53:59 +02:00
static async deleteOrphanSyncItems() {
const classNames = this . syncItemClassNames ( ) ;
2020-03-14 01:46:14 +02:00
const queries = [ ] ;
2017-07-16 14:53:59 +02:00
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const className = classNames [ i ] ;
const ItemClass = this . getClass ( className ) ;
2019-09-19 23:51:18 +02:00
let selectSql = ` SELECT id FROM ${ ItemClass . tableName ( ) } ` ;
2022-07-23 09:31:32 +02:00
if ( ItemClass . modelType ( ) === this . TYPE_NOTE ) selectSql += ' WHERE is_conflict = 0' ;
2017-07-16 18:06:05 +02:00
2019-09-19 23:51:18 +02:00
queries . push ( ` DELETE FROM sync_items WHERE item_location = ${ BaseItem . SYNC_ITEM_LOCATION_LOCAL } AND item_type = ${ ItemClass . modelType ( ) } AND item_id NOT IN ( ${ selectSql } ) ` ) ;
2017-07-16 14:53:59 +02:00
}
await this . db ( ) . transactionExecBatch ( queries ) ;
}
2021-01-22 19:41:11 +02:00
static displayTitle ( item : any ) {
2017-12-14 22:21:36 +02:00
if ( ! item ) return '' ;
2019-09-19 23:51:18 +02:00
if ( item . encryption_applied ) return ` 🔑 ${ _ ( 'Encrypted' ) } ` ;
2019-07-29 15:43:53 +02:00
return item . title ? item.title : _ ( 'Untitled' ) ;
2017-12-14 22:21:36 +02:00
}
2017-12-14 23:12:02 +02:00
static async markAllNonEncryptedForSync() {
const classNames = this . encryptableItemClassNames ( ) ;
for ( let i = 0 ; i < classNames . length ; i ++ ) {
const className = classNames [ i ] ;
const ItemClass = this . getClass ( className ) ;
2019-07-29 15:43:53 +02:00
const sql = sprintf (
`
2017-12-14 23:12:02 +02:00
SELECT id
FROM % s
WHERE encryption_applied = 0 ` ,
2018-01-09 21:45:08 +02:00
this . db ( ) . escapeField ( ItemClass . tableName ( ) )
2017-12-14 23:12:02 +02:00
) ;
const items = await ItemClass . modelSelectAll ( sql ) ;
2021-01-22 19:41:11 +02:00
const ids = items . map ( ( item : any ) = > {
2019-07-29 15:43:53 +02:00
return item . id ;
} ) ;
2017-12-14 23:12:02 +02:00
if ( ! ids . length ) continue ;
2019-09-19 23:51:18 +02:00
await this . db ( ) . exec ( ` UPDATE sync_items SET force_sync = 1 WHERE item_id IN (" ${ ids . join ( '","' ) } ") ` ) ;
2017-12-14 23:12:02 +02:00
}
}
2021-05-13 18:57:37 +02:00
static async updateShareStatus ( item : BaseItemEntity , isShared : boolean ) {
2019-12-17 14:45:57 +02:00
if ( ! item . id || ! item . type_ ) throw new Error ( 'Item must have an ID and a type' ) ;
if ( ! ! item . is_shared === ! ! isShared ) return false ;
const ItemClass = this . getClassByItemType ( item . type_ ) ;
// No auto-timestamp because sharing a note is not seen as an update
await ItemClass . save ( {
id : item.id ,
is_shared : isShared ? 1 : 0 ,
updated_time : Date.now ( ) ,
} , { autoTimestamp : false } ) ;
// The timestamps have not been changed but still need the note to be synced
// so we force-sync it.
// await this.forceSync(item.id);
return true ;
}
2021-01-22 19:41:11 +02:00
static async forceSync ( itemId : string ) {
2017-12-20 21:45:25 +02:00
await this . db ( ) . exec ( 'UPDATE sync_items SET force_sync = 1 WHERE item_id = ?' , [ itemId ] ) ;
}
static async forceSyncAll() {
await this . db ( ) . exec ( 'UPDATE sync_items SET force_sync = 1' ) ;
}
2021-01-22 19:41:11 +02:00
static async save ( o : any , options : any = null ) {
2017-12-14 22:21:36 +02:00
if ( ! options ) options = { } ;
if ( options . userSideValidation === true ) {
2019-07-29 15:43:53 +02:00
if ( o . encryption_applied ) throw new Error ( _ ( 'Encrypted items cannot be modified' ) ) ;
2017-12-14 22:21:36 +02:00
}
return super . save ( o , options ) ;
}
2021-01-22 19:41:11 +02:00
static markdownTag ( itemOrId : any ) {
2019-07-30 09:35:42 +02:00
const item = typeof itemOrId === 'object' ? itemOrId : {
id : itemOrId ,
title : '' ,
} ;
2018-09-30 21:15:30 +02:00
2018-05-02 16:13:20 +02:00
const output = [ ] ;
output . push ( '[' ) ;
2020-02-08 00:15:41 +02:00
output . push ( markdownUtils . escapeTitleText ( item . title ) ) ;
2018-05-02 16:13:20 +02:00
output . push ( ']' ) ;
2019-09-19 23:51:18 +02:00
output . push ( ` (:/ ${ item . id } ) ` ) ;
2018-05-02 16:13:20 +02:00
return output . join ( '' ) ;
}
2020-04-10 19:59:51 +02:00
2021-01-22 19:41:11 +02:00
static isMarkdownTag ( md : any ) {
2020-04-10 19:59:51 +02:00
if ( ! md ) return false ;
return ! ! md . match ( /^\[.*?\]\(:\/[0-9a-zA-Z]{32}\)$/ ) ;
}
2017-06-15 20:18:48 +02:00
}