2017-12-14 20:12:14 +02:00
const BaseItem = require ( 'lib/models/BaseItem.js' ) ;
const Folder = require ( 'lib/models/Folder.js' ) ;
const Note = require ( 'lib/models/Note.js' ) ;
const Resource = require ( 'lib/models/Resource.js' ) ;
2017-12-14 21:39:13 +02:00
const MasterKey = require ( 'lib/models/MasterKey.js' ) ;
2017-12-14 20:12:14 +02:00
const BaseModel = require ( 'lib/BaseModel.js' ) ;
2017-12-14 21:39:13 +02:00
const DecryptionWorker = require ( 'lib/services/DecryptionWorker' ) ;
2017-11-03 02:09:34 +02:00
const { sprintf } = require ( 'sprintf-js' ) ;
const { time } = require ( 'lib/time-utils.js' ) ;
const { Logger } = require ( 'lib/logger.js' ) ;
const { _ } = require ( 'lib/locale.js' ) ;
const { shim } = require ( 'lib/shim.js' ) ;
const moment = require ( 'moment' ) ;
2017-05-18 21:58:01 +02:00
class Synchronizer {
2017-07-07 00:15:31 +02:00
constructor ( db , api , appType ) {
2017-06-24 19:40:03 +02:00
this . state _ = 'idle' ;
2017-05-19 21:12:09 +02:00
this . db _ = db ;
this . api _ = api ;
2017-06-23 20:51:02 +02:00
this . syncDirName _ = '.sync' ;
2017-07-02 14:02:07 +02:00
this . resourceDirName _ = '.resource' ;
2017-06-23 23:32:24 +02:00
this . logger _ = new Logger ( ) ;
2017-07-07 00:15:31 +02:00
this . appType _ = appType ;
2017-07-09 17:47:05 +02:00
this . cancelling _ = false ;
2017-12-19 21:01:29 +02:00
this . autoStartDecryptionWorker _ = true ;
2017-07-14 21:06:01 +02:00
2017-11-21 20:17:50 +02:00
// Debug flags are used to test certain hard-to-test conditions
// such as cancelling in the middle of a loop.
this . debugFlags _ = [ ] ;
2017-07-14 21:06:01 +02:00
this . onProgress _ = function ( s ) { } ;
this . progressReport _ = { } ;
2017-07-16 23:17:22 +02:00
2017-07-18 00:43:29 +02:00
this . dispatch = function ( action ) { } ;
2017-05-18 21:58:01 +02:00
}
2017-06-24 19:40:03 +02:00
state ( ) {
return this . state _ ;
}
2017-05-18 21:58:01 +02:00
db ( ) {
2017-05-19 21:12:09 +02:00
return this . db _ ;
2017-05-18 21:58:01 +02:00
}
api ( ) {
2017-05-19 21:12:09 +02:00
return this . api _ ;
2017-05-18 21:58:01 +02:00
}
2017-06-23 23:32:24 +02:00
setLogger ( l ) {
this . logger _ = l ;
}
logger ( ) {
return this . logger _ ;
}
2017-12-14 21:39:13 +02:00
setEncryptionService ( v ) {
this . encryptionService _ = v ;
}
encryptionService ( v ) {
return this . encryptionService _ ;
}
2017-07-18 00:43:29 +02:00
static reportToLines ( report ) {
2017-07-14 21:06:01 +02:00
let lines = [ ] ;
if ( report . createLocal ) lines . push ( _ ( 'Created local items: %d.' , report . createLocal ) ) ;
if ( report . updateLocal ) lines . push ( _ ( 'Updated local items: %d.' , report . updateLocal ) ) ;
if ( report . createRemote ) lines . push ( _ ( 'Created remote items: %d.' , report . createRemote ) ) ;
2017-07-16 12:17:40 +02:00
if ( report . updateRemote ) lines . push ( _ ( 'Updated remote items: %d.' , report . updateRemote ) ) ;
2017-07-14 21:06:01 +02:00
if ( report . deleteLocal ) lines . push ( _ ( 'Deleted local items: %d.' , report . deleteLocal ) ) ;
if ( report . deleteRemote ) lines . push ( _ ( 'Deleted remote items: %d.' , report . deleteRemote ) ) ;
2018-01-12 21:01:34 +02:00
if ( report . fetchingTotal && report . fetchingProcessed ) lines . push ( _ ( 'Fetched items: %d/%d.' , report . fetchingProcessed , report . fetchingTotal ) ) ;
2017-08-02 19:47:25 +02:00
if ( ! report . completedTime && report . state ) lines . push ( _ ( 'State: "%s".' , report . state ) ) ;
2017-07-18 00:43:29 +02:00
if ( report . cancelling && ! report . completedTime ) lines . push ( _ ( 'Cancelling...' ) ) ;
2017-07-16 23:17:22 +02:00
if ( report . completedTime ) lines . push ( _ ( 'Completed: %s' , time . unixMsToLocalDateTime ( report . completedTime ) ) ) ;
2017-07-18 00:43:29 +02:00
2017-07-14 21:06:01 +02:00
return lines ;
}
2018-01-08 22:36:00 +02:00
logSyncOperation ( action , local = null , remote = null , message = null , actionCount = 1 ) {
2017-06-24 19:40:03 +02:00
let line = [ 'Sync' ] ;
line . push ( action ) ;
2017-07-14 21:06:01 +02:00
if ( message ) line . push ( message ) ;
2017-06-24 19:40:03 +02:00
2017-07-14 20:02:45 +02:00
let type = local && local . type _ ? local . type _ : null ;
if ( ! type ) type = remote && remote . type _ ? remote . type _ : null ;
if ( type ) line . push ( BaseItem . modelTypeToClassName ( type ) ) ;
2017-06-24 19:40:03 +02:00
if ( local ) {
let s = [ ] ;
s . push ( local . id ) ;
if ( 'title' in local ) s . push ( '"' + local . title + '"' ) ;
line . push ( '(Local ' + s . join ( ', ' ) + ')' ) ;
}
if ( remote ) {
let s = [ ] ;
2017-07-16 12:17:40 +02:00
s . push ( remote . id ? remote . id : remote . path ) ;
2017-06-24 19:40:03 +02:00
if ( 'title' in remote ) s . push ( '"' + remote . title + '"' ) ;
line . push ( '(Remote ' + s . join ( ', ' ) + ')' ) ;
}
this . logger ( ) . debug ( line . join ( ': ' ) ) ;
2017-07-14 21:06:01 +02:00
if ( ! this . progressReport _ [ action ] ) this . progressReport _ [ action ] = 0 ;
2018-01-08 22:36:00 +02:00
this . progressReport _ [ action ] += actionCount ;
2017-07-14 21:06:01 +02:00
this . progressReport _ . state = this . state ( ) ;
this . onProgress _ ( this . progressReport _ ) ;
2017-07-16 23:17:22 +02:00
2017-07-18 00:43:29 +02:00
this . dispatch ( { type : 'SYNC_REPORT_UPDATE' , report : Object . assign ( { } , this . progressReport _ ) } ) ;
2017-06-24 19:40:03 +02:00
}
async logSyncSummary ( report ) {
2017-07-14 21:06:01 +02:00
this . logger ( ) . info ( 'Operations completed: ' ) ;
2017-06-24 19:40:03 +02:00
for ( let n in report ) {
2017-07-01 00:53:22 +02:00
if ( ! report . hasOwnProperty ( n ) ) continue ;
2017-07-11 01:17:03 +02:00
if ( n == 'errors' ) continue ;
2017-07-19 00:14:20 +02:00
if ( n == 'starting' ) continue ;
if ( n == 'finished' ) continue ;
if ( n == 'state' ) continue ;
2017-07-23 16:11:44 +02:00
if ( n == 'completedTime' ) continue ;
2017-06-24 19:40:03 +02:00
this . logger ( ) . info ( n + ': ' + ( report [ n ] ? report [ n ] : '-' ) ) ;
}
let folderCount = await Folder . count ( ) ;
let noteCount = await Note . count ( ) ;
2017-07-02 14:02:07 +02:00
let resourceCount = await Resource . count ( ) ;
2017-06-24 19:40:03 +02:00
this . logger ( ) . info ( 'Total folders: ' + folderCount ) ;
this . logger ( ) . info ( 'Total notes: ' + noteCount ) ;
2017-07-02 14:02:07 +02:00
this . logger ( ) . info ( 'Total resources: ' + resourceCount ) ;
2017-07-10 01:20:38 +02:00
2017-07-14 21:06:01 +02:00
if ( report . errors && report . errors . length ) {
2017-07-10 01:20:38 +02:00
this . logger ( ) . warn ( 'There was some errors:' ) ;
for ( let i = 0 ; i < report . errors . length ; i ++ ) {
let e = report . errors [ i ] ;
2017-07-11 01:17:03 +02:00
this . logger ( ) . warn ( e ) ;
2017-07-10 01:20:38 +02:00
}
}
2017-06-23 20:51:02 +02:00
}
2017-10-14 20:03:23 +02:00
async cancel ( ) {
2017-07-26 22:09:33 +02:00
if ( this . cancelling _ || this . state ( ) == 'idle' ) return ;
2017-07-09 17:47:05 +02:00
2017-07-18 00:43:29 +02:00
this . logSyncOperation ( 'cancelling' , null , null , '' ) ;
2017-07-09 17:47:05 +02:00
this . cancelling _ = true ;
2017-10-14 20:03:23 +02:00
return new Promise ( ( resolve , reject ) => {
const iid = setInterval ( ( ) => {
if ( this . state ( ) == 'idle' ) {
clearInterval ( iid ) ;
resolve ( ) ;
}
} , 100 ) ;
} ) ;
2017-07-09 17:47:05 +02:00
}
cancelling ( ) {
return this . cancelling _ ;
}
2017-06-30 19:54:01 +02:00
async start ( options = null ) {
if ( ! options ) options = { } ;
2017-07-24 21:47:01 +02:00
if ( this . state ( ) != 'idle' ) {
2017-07-26 23:27:03 +02:00
let error = new Error ( _ ( 'Synchronisation is already in progress. State: %s' , this . state ( ) ) ) ;
2017-07-24 21:47:01 +02:00
error . code = 'alreadyStarted' ;
throw error ;
2017-07-30 21:51:18 +02:00
}
this . state _ = 'in_progress' ;
2017-07-24 21:47:01 +02:00
2017-07-14 21:06:01 +02:00
this . onProgress _ = options . onProgress ? options . onProgress : function ( o ) { } ;
this . progressReport _ = { errors : [ ] } ;
2017-06-30 19:54:01 +02:00
2017-07-18 22:03:07 +02:00
const lastContext = options . context ? options . context : { } ;
2017-07-18 21:57:49 +02:00
2017-07-24 20:58:11 +02:00
const syncTargetId = this . api ( ) . syncTargetId ( ) ;
2017-07-16 14:53:59 +02:00
2017-07-09 17:47:05 +02:00
this . cancelling _ = false ;
2017-07-02 12:34:07 +02:00
2017-12-14 21:39:13 +02:00
const masterKeysBefore = await MasterKey . count ( ) ;
2017-12-21 21:06:08 +02:00
let hasAutoEnabledEncryption = false ;
2017-12-14 21:39:13 +02:00
2017-06-19 00:06:10 +02:00
// ------------------------------------------------------------------------
// First, find all the items that have been changed since the
// last sync and apply the changes to remote.
// ------------------------------------------------------------------------
2017-06-15 20:18:48 +02:00
2017-06-24 19:40:03 +02:00
let synchronizationId = time . unixMs ( ) . toString ( ) ;
2017-07-20 22:15:28 +02:00
let outputContext = Object . assign ( { } , lastContext ) ;
2017-01-29 20:29:34 +02:00
2017-07-18 00:43:29 +02:00
this . dispatch ( { type : 'SYNC_STARTED' } ) ;
2017-07-26 23:27:03 +02:00
this . logSyncOperation ( 'starting' , null , null , 'Starting synchronisation to target ' + syncTargetId + '... [' + synchronizationId + ']' ) ;
2017-06-23 23:32:24 +02:00
2017-06-29 20:03:16 +02:00
try {
2017-07-02 14:02:07 +02:00
await this . api ( ) . mkdir ( this . syncDirName _ ) ;
2017-07-18 00:43:29 +02:00
await this . api ( ) . mkdir ( this . resourceDirName _ ) ;
2017-06-29 20:03:16 +02:00
let donePaths = [ ] ;
while ( true ) {
2017-07-09 17:47:05 +02:00
if ( this . cancelling ( ) ) break ;
2017-07-16 14:53:59 +02:00
let result = await BaseItem . itemsThatNeedSync ( syncTargetId ) ;
2017-06-29 20:03:16 +02:00
let locals = result . items ;
for ( let i = 0 ; i < locals . length ; i ++ ) {
2017-07-09 17:47:05 +02:00
if ( this . cancelling ( ) ) break ;
2017-06-29 20:03:16 +02:00
let local = locals [ i ] ;
let ItemClass = BaseItem . itemClass ( local ) ;
let path = BaseItem . systemPath ( local ) ;
2017-12-31 15:58:50 +02:00
// Safety check to avoid infinite loops.
// In fact this error is possible if the item is marked for sync (via sync_time or force_sync) while synchronisation is in
// progress. In that case exit anyway to be sure we aren't in a loop and the item will be re-synced next time.
2017-06-29 20:03:16 +02:00
if ( donePaths . indexOf ( path ) > 0 ) throw new Error ( sprintf ( 'Processing a path that has already been done: %s. sync_time was not updated?' , path ) ) ;
let remote = await this . api ( ) . stat ( path ) ;
let action = null ;
let updateSyncTimeOnly = true ;
2017-07-19 21:15:55 +02:00
let reason = '' ;
2017-12-04 20:39:40 +02:00
let remoteContent = null ;
2017-06-29 20:03:16 +02:00
if ( ! remote ) {
if ( ! local . sync _time ) {
action = 'createRemote' ;
reason = 'remote does not exist, and local is new and has never been synced' ;
} else {
2017-07-02 14:02:07 +02:00
// Note or item was modified after having been deleted remotely
2017-11-03 00:48:17 +02:00
// "itemConflict" if for all the items except the notes, which are dealt with in a special way
2017-07-03 21:50:45 +02:00
action = local . type _ == BaseModel . TYPE _NOTE ? 'noteConflict' : 'itemConflict' ;
2017-06-29 20:03:16 +02:00
reason = 'remote has been deleted, but local has changes' ;
}
2017-06-20 00:18:24 +02:00
} else {
2017-12-04 20:39:40 +02:00
// Note: in order to know the real updated_time value, we need to load the content. In theory we could
// rely on the file timestamp (in remote.updated_time) but in practice it's not accurate enough and
// can lead to conflicts (for example when the file timestamp is slightly ahead of it's real
// updated_time). updated_time is set and managed by clients so it's always accurate.
// Same situation below for updateLocal.
//
// This is a bit inefficient because if the resulting action is "updateRemote" we don't need the whole
// content, but for now that will do since being reliable is the priority.
//
// TODO: assuming a particular sync target is guaranteed to have accurate timestamps, the driver maybe
// could expose this with a accurateTimestamps() method that returns "true". In that case, the test
// could be done using the file timestamp and the potentially unecessary content loading could be skipped.
// OneDrive does not appear to have accurate timestamps as lastModifiedDateTime would occasionally be
// a few seconds ahead of what it was set with setTimestamp()
remoteContent = await this . api ( ) . get ( path ) ;
if ( ! remoteContent ) throw new Error ( 'Got metadata for path but could not fetch content: ' + path ) ;
remoteContent = await BaseItem . unserialize ( remoteContent ) ;
if ( remoteContent . updated _time > local . sync _time ) {
2017-11-03 00:48:17 +02:00
// Since, in this loop, we are only dealing with items that require sync, if the
// remote has been modified after the sync time, it means both items have been
2017-06-29 20:03:16 +02:00
// modified and so there's a conflict.
2017-07-03 21:50:45 +02:00
action = local . type _ == BaseModel . TYPE _NOTE ? 'noteConflict' : 'itemConflict' ;
2017-06-29 20:03:16 +02:00
reason = 'both remote and local have changes' ;
} else {
action = 'updateRemote' ;
reason = 'local has changes' ;
}
2017-06-20 00:18:24 +02:00
}
2017-06-14 00:39:45 +02:00
2017-06-29 20:03:16 +02:00
this . logSyncOperation ( action , local , remote , reason ) ;
2017-06-19 21:18:22 +02:00
2017-12-05 20:56:39 +02:00
const handleCannotSyncItem = async ( syncTargetId , item , cannotSyncReason ) => {
2017-12-05 01:38:09 +02:00
await ItemClass . saveSyncDisabled ( syncTargetId , item , cannotSyncReason ) ;
2017-12-05 20:56:39 +02:00
this . dispatch ( { type : 'SYNC_HAS_DISABLED_SYNC_ITEMS' } ) ;
2017-12-05 01:38:09 +02:00
}
2017-12-20 21:43:43 +02:00
if ( local . type _ == BaseModel . TYPE _RESOURCE && ( action == 'createRemote' || action === 'updateRemote' || ( action == 'itemConflict' && remote ) ) ) {
2017-12-05 01:38:09 +02:00
try {
2017-12-19 21:01:29 +02:00
const remoteContentPath = this . resourceDirName _ + '/' + local . id ;
const result = await Resource . fullPathForSyncUpload ( local ) ;
local = result . resource ;
const localResourceContentPath = result . path ;
await this . api ( ) . put ( remoteContentPath , null , { path : localResourceContentPath , source : 'file' } ) ;
2017-12-05 01:38:09 +02:00
} catch ( error ) {
2018-01-02 21:17:14 +02:00
if ( error && error . code === 'rejectedByTarget' ) {
2017-12-05 01:38:09 +02:00
await handleCannotSyncItem ( syncTargetId , local , error . message ) ;
action = null ;
} else {
throw error ;
2017-08-01 23:40:14 +02:00
}
2017-07-19 21:15:55 +02:00
}
2017-07-02 14:02:07 +02:00
}
2017-06-29 20:03:16 +02:00
if ( action == 'createRemote' || action == 'updateRemote' ) {
2017-06-20 21:18:19 +02:00
2017-06-29 20:03:16 +02:00
// Make the operation atomic by doing the work on a copy of the file
// and then copying it back to the original location.
2017-07-13 00:32:08 +02:00
// let tempPath = this.syncDirName_ + '/' + path + '_' + time.unixMs();
//
// Atomic operation is disabled for now because it's not possible
// to do an atomic move with OneDrive (see file-api-driver-onedrive.js)
2017-06-29 20:03:16 +02:00
2017-07-13 00:32:08 +02:00
// await this.api().put(tempPath, content);
// await this.api().setTimestamp(tempPath, local.updated_time);
// await this.api().move(tempPath, path);
2017-12-05 01:38:09 +02:00
let canSync = true ;
try {
2018-01-02 21:17:14 +02:00
if ( this . debugFlags _ . indexOf ( 'rejectedByTarget' ) >= 0 ) {
const error = new Error ( 'Testing rejectedByTarget' ) ;
error . code = 'rejectedByTarget' ;
2017-12-05 01:38:09 +02:00
throw error ;
}
2017-12-19 21:01:29 +02:00
const content = await ItemClass . serializeForSync ( local ) ;
2017-12-05 01:38:09 +02:00
await this . api ( ) . put ( path , content ) ;
} catch ( error ) {
2018-01-02 21:17:14 +02:00
if ( error && error . code === 'rejectedByTarget' ) {
2017-12-05 01:38:09 +02:00
await handleCannotSyncItem ( syncTargetId , local , error . message ) ;
canSync = false ;
} else {
throw error ;
}
}
2018-01-08 21:09:01 +02:00
// Note: Currently, we set sync_time to update_time, which should work fine given that the resolution is the millisecond.
// In theory though, this could happen:
//
// 1. t0: Editor: Note is modified
// 2. t0: Sync: Found that note was modified so start uploading it
// 3. t0: Editor: Note is modified again
// 4. t1: Sync: Note has finished uploading, set sync_time to t0
//
// Later any attempt to sync will not detect that note was modified in (3) (within the same millisecond as it was being uploaded)
// because sync_time will be t0 too.
//
// The solution would be to use something like an etag (a simple counter incremented on every change) to make sure each
// change is uniquely identified. Leaving it like this for now.
2017-12-05 01:38:09 +02:00
if ( canSync ) {
await this . api ( ) . setTimestamp ( path , local . updated _time ) ;
2018-01-08 21:09:01 +02:00
await ItemClass . saveSyncTime ( syncTargetId , local , local . updated _time ) ;
2017-12-05 01:38:09 +02:00
}
2017-06-20 21:18:19 +02:00
2017-07-02 14:02:07 +02:00
} else if ( action == 'itemConflict' ) {
2017-06-20 21:18:19 +02:00
2017-12-12 20:41:02 +02:00
// ------------------------------------------------------------------------------
// For non-note conflicts, we take the remote version (i.e. the version that was
// synced first) and overwrite the local content.
// ------------------------------------------------------------------------------
2017-06-29 20:03:16 +02:00
if ( remote ) {
2017-12-04 20:39:40 +02:00
local = remoteContent ;
2017-06-20 21:25:01 +02:00
2017-07-16 14:53:59 +02:00
const syncTimeQueries = BaseItem . updateSyncTimeQueries ( syncTargetId , local , time . unixMs ( ) ) ;
await ItemClass . save ( local , { autoTimestamp : false , nextQueries : syncTimeQueries } ) ;
2017-06-29 20:03:16 +02:00
} else {
await ItemClass . delete ( local . id ) ;
}
} else if ( action == 'noteConflict' ) {
2017-06-20 21:18:19 +02:00
2017-11-03 00:48:17 +02:00
// ------------------------------------------------------------------------------
// First find out if the conflict matters. For example, if the conflict is on the title or body
// we want to preserve all the changes. If it's on todo_completed it doesn't really matter
// so in this case we just take the remote content.
// ------------------------------------------------------------------------------
2017-07-02 12:34:07 +02:00
2017-11-03 00:48:17 +02:00
let mustHandleConflict = true ;
2017-12-04 20:39:40 +02:00
if ( remoteContent ) {
mustHandleConflict = Note . mustHandleConflict ( local , remoteContent ) ;
2017-11-03 00:48:17 +02:00
}
// ------------------------------------------------------------------------------
// Create a duplicate of local note into Conflicts folder
// (to preserve the user's changes)
// ------------------------------------------------------------------------------
2017-06-19 20:58:49 +02:00
2017-11-03 00:48:17 +02:00
if ( mustHandleConflict ) {
let conflictedNote = Object . assign ( { } , local ) ;
delete conflictedNote . id ;
conflictedNote . is _conflict = 1 ;
await Note . save ( conflictedNote , { autoTimestamp : false } ) ;
}
// ------------------------------------------------------------------------------
// Either copy the remote content to local or, if the remote content has
// been deleted, delete the local content.
// ------------------------------------------------------------------------------
if ( remote ) {
2017-12-04 20:39:40 +02:00
local = remoteContent ;
2017-07-16 14:53:59 +02:00
const syncTimeQueries = BaseItem . updateSyncTimeQueries ( syncTargetId , local , time . unixMs ( ) ) ;
await ItemClass . save ( local , { autoTimestamp : false , nextQueries : syncTimeQueries } ) ;
2017-07-02 22:40:50 +02:00
} else {
2017-11-03 00:48:17 +02:00
// Remote no longer exists (note deleted) so delete local one too
2017-07-02 22:40:50 +02:00
await ItemClass . delete ( local . id ) ;
2017-06-29 20:03:16 +02:00
}
2017-06-19 20:58:49 +02:00
2017-06-20 21:25:01 +02:00
}
2017-06-18 01:49:52 +02:00
2017-06-29 20:03:16 +02:00
donePaths . push ( path ) ;
}
2017-06-24 19:40:03 +02:00
2017-06-29 20:03:16 +02:00
if ( ! result . hasMore ) break ;
2017-06-19 00:06:10 +02:00
}
2017-06-03 18:20:17 +02:00
2017-06-29 20:03:16 +02:00
// ------------------------------------------------------------------------
// Delete the remote items that have been deleted locally.
// ------------------------------------------------------------------------
2017-06-16 00:12:00 +02:00
2017-07-19 21:15:55 +02:00
let deletedItems = await BaseItem . deletedItems ( syncTargetId ) ;
2017-06-29 20:03:16 +02:00
for ( let i = 0 ; i < deletedItems . length ; i ++ ) {
2017-07-09 17:47:05 +02:00
if ( this . cancelling ( ) ) break ;
2017-06-29 20:03:16 +02:00
let item = deletedItems [ i ] ;
let path = BaseItem . systemPath ( item . item _id )
this . logSyncOperation ( 'deleteRemote' , null , { id : item . item _id } , 'local has been deleted' ) ;
await this . api ( ) . delete ( path ) ;
2017-07-19 21:15:55 +02:00
await BaseItem . remoteDeletedItem ( syncTargetId , item . item _id ) ;
2017-06-29 20:03:16 +02:00
}
2017-06-24 19:40:03 +02:00
2017-06-29 20:03:16 +02:00
// ------------------------------------------------------------------------
// Loop through all the remote items, find those that
// have been updated, and apply the changes to local.
// ------------------------------------------------------------------------
// At this point all the local items that have changed have been pushed to remote
// or handled as conflicts, so no conflict is possible after this.
2017-07-18 22:03:07 +02:00
let context = null ;
let newDeltaContext = null ;
let localFoldersToDelete = [ ] ;
2017-11-21 20:17:50 +02:00
let hasCancelled = false ;
2017-07-18 22:03:07 +02:00
if ( lastContext . delta ) context = lastContext . delta ;
2017-07-18 21:57:49 +02:00
2017-07-18 22:03:07 +02:00
while ( true ) {
2017-11-21 20:17:50 +02:00
if ( this . cancelling ( ) || hasCancelled ) break ;
2017-07-18 21:57:49 +02:00
2017-07-19 00:14:20 +02:00
let listResult = await this . api ( ) . delta ( '' , {
context : context ,
2017-10-27 00:03:21 +02:00
// allItemIdsHandler() provides a way for drivers that don't have a delta API to
// still provide delta functionality by comparing the items they have to the items
// the client has. Very inefficient but that's the only possible workaround.
// It's a function so that it is only called if the driver needs these IDs. For
// drivers with a delta functionality it's a noop.
2017-10-26 23:56:32 +02:00
allItemIdsHandler : async ( ) => { return BaseItem . syncedItemIds ( syncTargetId ) ; }
2017-07-19 00:14:20 +02:00
} ) ;
2017-07-23 16:11:44 +02:00
2017-07-18 22:03:07 +02:00
let remotes = listResult . items ;
2018-01-08 22:36:00 +02:00
this . logSyncOperation ( 'fetchingTotal' , null , null , 'Fetching delta items from sync target' , remotes . length ) ;
2017-07-18 22:03:07 +02:00
for ( let i = 0 ; i < remotes . length ; i ++ ) {
2017-11-21 20:17:50 +02:00
if ( this . cancelling ( ) || this . debugFlags _ . indexOf ( 'cancelDeltaLoop2' ) >= 0 ) {
hasCancelled = true ;
break ;
}
2017-07-18 21:57:49 +02:00
2018-01-08 22:36:00 +02:00
this . logSyncOperation ( 'fetchingProcessed' , null , null , 'Processing fetched item' ) ;
2017-07-18 22:03:07 +02:00
let remote = remotes [ i ] ;
if ( ! BaseItem . isSystemPath ( remote . path ) ) continue ; // The delta API might return things like the .sync, .resource or the root folder
2017-07-18 21:57:49 +02:00
2017-12-04 20:39:40 +02:00
const loadContent = async ( ) => {
content = await this . api ( ) . get ( path ) ;
if ( ! content ) return null ;
return await BaseItem . unserialize ( content ) ;
}
2017-07-18 22:03:07 +02:00
let path = remote . path ;
let action = null ;
let reason = '' ;
let local = await BaseItem . loadItemByPath ( path ) ;
2017-12-05 00:58:42 +02:00
let ItemClass = null ;
2017-12-04 20:39:40 +02:00
let content = null ;
2017-07-18 22:03:07 +02:00
if ( ! local ) {
2017-12-04 20:39:40 +02:00
if ( remote . isDeleted !== true ) {
2017-07-18 22:03:07 +02:00
action = 'createLocal' ;
reason = 'remote exists but local does not' ;
2017-12-04 20:39:40 +02:00
content = await loadContent ( ) ;
2017-12-05 00:58:42 +02:00
ItemClass = content ? BaseItem . itemClass ( content ) : null ;
2017-07-18 22:03:07 +02:00
}
} else {
2017-12-05 00:58:42 +02:00
ItemClass = BaseItem . itemClass ( local ) ;
local = ItemClass . filter ( local ) ;
2017-07-18 22:03:07 +02:00
if ( remote . isDeleted ) {
action = 'deleteLocal' ;
reason = 'remote has been deleted' ;
} else {
2017-12-04 20:39:40 +02:00
content = await loadContent ( ) ;
if ( content && content . updated _time > local . updated _time ) {
2017-07-18 22:03:07 +02:00
action = 'updateLocal' ;
reason = 'remote is more recent than local' ;
}
}
}
2017-07-18 21:57:49 +02:00
2017-07-18 22:03:07 +02:00
if ( ! action ) continue ;
2017-07-18 21:57:49 +02:00
2017-07-18 22:03:07 +02:00
this . logSyncOperation ( action , local , remote , reason ) ;
2017-07-18 21:57:49 +02:00
2017-07-18 22:03:07 +02:00
if ( action == 'createLocal' || action == 'updateLocal' ) {
2017-07-18 21:57:49 +02:00
2017-07-18 22:03:07 +02:00
if ( content === null ) {
this . logger ( ) . warn ( 'Remote has been deleted between now and the list() call? In that case it will be handled during the next sync: ' + path ) ;
continue ;
}
2017-11-28 00:50:46 +02:00
content = ItemClass . filter ( content ) ;
2017-12-04 01:06:02 +02:00
// 2017-12-03: This was added because the new user_updated_time and user_created_time properties were added
// to the items. However changing the database is not enough since remote items that haven't been synced yet
// will not have these properties and, since they are required, it would cause a problem. So this check
// if they are present and, if not, set them to a reasonable default.
// Let's leave these two lines for 6 months, by which time all the clients should have been synced.
2017-12-02 17:18:15 +02:00
if ( ! content . user _updated _time ) content . user _updated _time = content . updated _time ;
if ( ! content . user _created _time ) content . user _created _time = content . created _time ;
2017-07-18 22:03:07 +02:00
let options = {
autoTimestamp : false ,
2017-12-05 00:58:42 +02:00
nextQueries : BaseItem . updateSyncTimeQueries ( syncTargetId , content , time . unixMs ( ) ) ,
2017-07-18 22:03:07 +02:00
} ;
if ( action == 'createLocal' ) options . isNew = true ;
2017-12-05 00:58:42 +02:00
if ( action == 'updateLocal' ) options . oldItem = local ;
2017-07-18 22:03:07 +02:00
2017-12-05 00:58:42 +02:00
if ( content . type _ == BaseModel . TYPE _RESOURCE && action == 'createLocal' ) {
let localResourceContentPath = Resource . fullPath ( content ) ;
let remoteResourceContentPath = this . resourceDirName _ + '/' + content . id ;
2017-07-18 22:03:07 +02:00
await this . api ( ) . get ( remoteResourceContentPath , { path : localResourceContentPath , target : 'file' } ) ;
}
2017-12-05 00:58:42 +02:00
await ItemClass . save ( content , options ) ;
2017-07-18 22:03:07 +02:00
2017-12-21 21:06:08 +02:00
if ( ! hasAutoEnabledEncryption && content . type _ === BaseModel . TYPE _MASTER _KEY && ! masterKeysBefore ) {
hasAutoEnabledEncryption = true ;
this . logger ( ) . info ( 'One master key was downloaded and none was previously available: automatically enabling encryption' ) ;
this . logger ( ) . info ( 'Using master key: ' , content ) ;
await this . encryptionService ( ) . enableEncryption ( content ) ;
await this . encryptionService ( ) . loadMasterKeysFromSettings ( ) ;
this . logger ( ) . info ( 'Encryption has been enabled with downloaded master key as active key. However, note that no password was initially supplied. It will need to be provided by user.' ) ;
}
if ( ! ! content . encryption _applied ) this . dispatch ( { type : 'SYNC_GOT_ENCRYPTED_ITEM' } ) ;
2017-07-18 22:03:07 +02:00
} else if ( action == 'deleteLocal' ) {
if ( local . type _ == BaseModel . TYPE _FOLDER ) {
localFoldersToDelete . push ( local ) ;
continue ;
}
let ItemClass = BaseItem . itemClass ( local . type _ ) ;
await ItemClass . delete ( local . id , { trackDeleted : false } ) ;
}
}
2017-11-21 20:17:50 +02:00
// If user has cancelled, don't record the new context (2) so that synchronisation
// can start again from the previous context (1) next time. It is ok if some items
// have been synced between (1) and (2) because the loop above will handle the same
// items being synced twice as an update. If the local and remote items are indentical
// the update will simply be skipped.
if ( ! hasCancelled ) {
if ( ! listResult . hasMore ) {
newDeltaContext = listResult . context ;
break ;
}
context = listResult . context ;
2017-07-18 22:03:07 +02:00
}
}
outputContext . delta = newDeltaContext ? newDeltaContext : lastContext . delta ;
2017-07-18 21:57:49 +02:00
2017-07-19 00:14:20 +02:00
// ------------------------------------------------------------------------
// Delete the folders that have been collected in the loop above.
// Folders are always deleted last, and only if they are empty.
// If they are not empty it's considered a conflict since whatever deleted
// them should have deleted their content too. In that case, all its notes
// are marked as "is_conflict".
// ------------------------------------------------------------------------
2017-07-18 21:57:49 +02:00
2017-07-18 22:03:07 +02:00
if ( ! this . cancelling ( ) ) {
for ( let i = 0 ; i < localFoldersToDelete . length ; i ++ ) {
const item = localFoldersToDelete [ i ] ;
const noteIds = await Folder . noteIds ( item . id ) ;
if ( noteIds . length ) { // CONFLICT
await Folder . markNotesAsConflict ( item . id ) ;
}
await Folder . delete ( item . id , { deleteChildren : false } ) ;
}
}
2017-07-18 21:57:49 +02:00
2017-07-18 22:03:07 +02:00
if ( ! this . cancelling ( ) ) {
await BaseItem . deleteOrphanSyncItems ( ) ;
}
2017-06-29 20:03:16 +02:00
} catch ( error ) {
2017-12-20 21:45:25 +02:00
if ( error && [ 'cannotEncryptEncrypted' , 'noActiveMasterKey' ] . indexOf ( error . code ) >= 0 ) {
// Only log an info statement for this since this is a common condition that is reported
// in the application, and needs to be resolved by the user
2017-12-17 21:51:45 +02:00
this . logger ( ) . info ( error . message ) ;
} else {
this . logger ( ) . error ( error ) ;
this . progressReport _ . errors . push ( error ) ;
}
2017-06-20 21:18:19 +02:00
}
2017-05-18 21:58:01 +02:00
2017-07-09 17:47:05 +02:00
if ( this . cancelling ( ) ) {
2017-07-26 23:27:03 +02:00
this . logger ( ) . info ( 'Synchronisation was cancelled.' ) ;
2017-07-09 17:47:05 +02:00
this . cancelling _ = false ;
}
2017-07-16 23:17:22 +02:00
this . progressReport _ . completedTime = time . unixMs ( ) ;
2017-07-26 23:27:03 +02:00
this . logSyncOperation ( 'finished' , null , null , 'Synchronisation finished [' + synchronizationId + ']' ) ;
2017-07-18 00:43:29 +02:00
2017-07-14 21:06:01 +02:00
await this . logSyncSummary ( this . progressReport _ ) ;
this . onProgress _ = function ( s ) { } ;
this . progressReport _ = { } ;
2017-07-16 23:17:22 +02:00
2017-07-18 00:43:29 +02:00
this . dispatch ( { type : 'SYNC_COMPLETED' } ) ;
2017-07-30 22:22:57 +02:00
2017-07-30 21:51:18 +02:00
this . state _ = 'idle' ;
2017-07-18 21:57:49 +02:00
return outputContext ;
2017-05-18 21:58:01 +02:00
}
}
2017-11-03 02:13:17 +02:00
module . exports = { Synchronizer } ;