2018-03-09 22:59:12 +02:00
const BaseItem = require ( 'lib/models/BaseItem.js' ) ;
const Folder = require ( 'lib/models/Folder.js' ) ;
const Note = require ( 'lib/models/Note.js' ) ;
const Resource = require ( 'lib/models/Resource.js' ) ;
2019-05-06 22:35:29 +02:00
const ItemChange = require ( 'lib/models/ItemChange.js' ) ;
2018-11-13 02:45:08 +02:00
const ResourceLocalState = require ( 'lib/models/ResourceLocalState.js' ) ;
2018-03-09 22:59:12 +02:00
const MasterKey = require ( 'lib/models/MasterKey.js' ) ;
const BaseModel = require ( 'lib/BaseModel.js' ) ;
const DecryptionWorker = require ( 'lib/services/DecryptionWorker' ) ;
const { sprintf } = require ( 'sprintf-js' ) ;
const { time } = require ( 'lib/time-utils.js' ) ;
const { Logger } = require ( 'lib/logger.js' ) ;
const { _ } = require ( 'lib/locale.js' ) ;
const { shim } = require ( 'lib/shim.js' ) ;
const JoplinError = require ( 'lib/JoplinError' ) ;
2018-10-08 08:36:45 +02:00
const BaseSyncTarget = require ( 'lib/BaseSyncTarget' ) ;
2019-06-08 00:20:08 +02:00
const TaskQueue = require ( 'lib/TaskQueue' ) ;
2017-05-18 21:58:01 +02:00
class Synchronizer {
2018-03-09 22:59:12 +02:00
2017-07-07 00:15:31 +02:00
constructor ( db , api , appType ) {
2018-03-09 22:59:12 +02:00
this . state _ = 'idle' ;
2017-05-19 21:12:09 +02:00
this . db _ = db ;
this . api _ = api ;
2018-03-09 22:59:12 +02:00
this . syncDirName _ = '.sync' ;
2018-10-08 08:36:45 +02:00
this . resourceDirName _ = BaseSyncTarget . resourceDirName ( ) ;
2017-06-23 23:32:24 +02:00
this . logger _ = new Logger ( ) ;
2017-07-07 00:15:31 +02:00
this . appType _ = appType ;
2017-07-09 17:47:05 +02:00
this . cancelling _ = false ;
2017-12-19 21:01:29 +02:00
this . autoStartDecryptionWorker _ = true ;
2019-05-12 02:15:52 +02:00
this . maxResourceSize _ = null ;
2019-06-08 00:20:08 +02:00
this . downloadQueue _ = null ;
2017-07-14 21:06:01 +02:00
2017-11-21 20:17:50 +02:00
// Debug flags are used to test certain hard-to-test conditions
// such as cancelling in the middle of a loop.
2018-01-21 19:01:37 +02:00
this . testingHooks _ = [ ] ;
2017-11-21 20:17:50 +02:00
2017-07-14 21:06:01 +02:00
this . onProgress _ = function ( s ) { } ;
this . progressReport _ = { } ;
2017-07-16 23:17:22 +02:00
2017-07-18 00:43:29 +02:00
this . dispatch = function ( action ) { } ;
2017-05-18 21:58:01 +02:00
}
2017-06-24 19:40:03 +02:00
state ( ) {
return this . state _ ;
}
2017-05-18 21:58:01 +02:00
db ( ) {
2017-05-19 21:12:09 +02:00
return this . db _ ;
2017-05-18 21:58:01 +02:00
}
api ( ) {
2017-05-19 21:12:09 +02:00
return this . api _ ;
2017-05-18 21:58:01 +02:00
}
2017-06-23 23:32:24 +02:00
setLogger ( l ) {
this . logger _ = l ;
}
logger ( ) {
return this . logger _ ;
}
2019-05-12 02:15:52 +02:00
maxResourceSize ( ) {
if ( this . maxResourceSize _ !== null ) return this . maxResourceSize _ ;
return this . appType _ === 'mobile' ? 10 * 1000 * 1000 : Infinity ;
}
2017-12-14 21:39:13 +02:00
setEncryptionService ( v ) {
this . encryptionService _ = v ;
}
encryptionService ( v ) {
return this . encryptionService _ ;
}
2017-07-18 00:43:29 +02:00
static reportToLines ( report ) {
2017-07-14 21:06:01 +02:00
let lines = [ ] ;
2018-03-09 19:49:35 +02:00
if ( report . createLocal ) lines . push ( _ ( "Created local items: %d." , report . createLocal ) ) ;
if ( report . updateLocal ) lines . push ( _ ( "Updated local items: %d." , report . updateLocal ) ) ;
if ( report . createRemote ) lines . push ( _ ( "Created remote items: %d." , report . createRemote ) ) ;
if ( report . updateRemote ) lines . push ( _ ( "Updated remote items: %d." , report . updateRemote ) ) ;
if ( report . deleteLocal ) lines . push ( _ ( "Deleted local items: %d." , report . deleteLocal ) ) ;
if ( report . deleteRemote ) lines . push ( _ ( "Deleted remote items: %d." , report . deleteRemote ) ) ;
if ( report . fetchingTotal && report . fetchingProcessed ) lines . push ( _ ( "Fetched items: %d/%d." , report . fetchingProcessed , report . fetchingTotal ) ) ;
2018-11-14 00:25:23 +02:00
// if (!report.completedTime && report.state) lines.push(_('State: %s.', Synchronizer.stateToLabel(report.state)));
2018-03-09 19:49:35 +02:00
if ( report . cancelling && ! report . completedTime ) lines . push ( _ ( "Cancelling..." ) ) ;
2018-08-01 23:22:24 +02:00
if ( report . completedTime ) lines . push ( _ ( "Completed: %s" , time . formatMsToLocal ( report . completedTime ) ) ) ;
2018-03-09 21:51:01 +02:00
if ( report . errors && report . errors . length ) lines . push ( _ ( "Last error: %s" , report . errors [ report . errors . length - 1 ] . toString ( ) . substr ( 0 , 500 ) ) ) ;
2017-07-18 00:43:29 +02:00
2017-07-14 21:06:01 +02:00
return lines ;
}
2018-01-08 22:36:00 +02:00
logSyncOperation ( action , local = null , remote = null , message = null , actionCount = 1 ) {
2018-03-09 22:59:12 +02:00
let line = [ 'Sync' ] ;
2017-06-24 19:40:03 +02:00
line . push ( action ) ;
2017-07-14 21:06:01 +02:00
if ( message ) line . push ( message ) ;
2017-06-24 19:40:03 +02:00
2017-07-14 20:02:45 +02:00
let type = local && local . type _ ? local . type _ : null ;
if ( ! type ) type = remote && remote . type _ ? remote . type _ : null ;
if ( type ) line . push ( BaseItem . modelTypeToClassName ( type ) ) ;
2017-06-24 19:40:03 +02:00
if ( local ) {
let s = [ ] ;
s . push ( local . id ) ;
2018-03-09 22:59:12 +02:00
line . push ( '(Local ' + s . join ( ', ' ) + ')' ) ;
2017-06-24 19:40:03 +02:00
}
if ( remote ) {
let s = [ ] ;
2017-07-16 12:17:40 +02:00
s . push ( remote . id ? remote . id : remote . path ) ;
2018-03-09 22:59:12 +02:00
line . push ( '(Remote ' + s . join ( ', ' ) + ')' ) ;
2017-06-24 19:40:03 +02:00
}
2018-03-09 22:59:12 +02:00
this . logger ( ) . debug ( line . join ( ': ' ) ) ;
2017-07-14 21:06:01 +02:00
if ( ! this . progressReport _ [ action ] ) this . progressReport _ [ action ] = 0 ;
2018-01-08 22:36:00 +02:00
this . progressReport _ [ action ] += actionCount ;
2017-07-14 21:06:01 +02:00
this . progressReport _ . state = this . state ( ) ;
this . onProgress _ ( this . progressReport _ ) ;
2017-07-16 23:17:22 +02:00
2018-03-09 22:59:12 +02:00
this . dispatch ( { type : 'SYNC_REPORT_UPDATE' , report : Object . assign ( { } , this . progressReport _ ) } ) ;
2017-06-24 19:40:03 +02:00
}
async logSyncSummary ( report ) {
2018-03-09 22:59:12 +02:00
this . logger ( ) . info ( 'Operations completed: ' ) ;
2017-06-24 19:40:03 +02:00
for ( let n in report ) {
2017-07-01 00:53:22 +02:00
if ( ! report . hasOwnProperty ( n ) ) continue ;
2018-03-09 22:59:12 +02:00
if ( n == 'errors' ) continue ;
if ( n == 'starting' ) continue ;
if ( n == 'finished' ) continue ;
if ( n == 'state' ) continue ;
if ( n == 'completedTime' ) continue ;
this . logger ( ) . info ( n + ': ' + ( report [ n ] ? report [ n ] : '-' ) ) ;
2017-06-24 19:40:03 +02:00
}
let folderCount = await Folder . count ( ) ;
let noteCount = await Note . count ( ) ;
2017-07-02 14:02:07 +02:00
let resourceCount = await Resource . count ( ) ;
2018-03-09 22:59:12 +02:00
this . logger ( ) . info ( 'Total folders: ' + folderCount ) ;
this . logger ( ) . info ( 'Total notes: ' + noteCount ) ;
this . logger ( ) . info ( 'Total resources: ' + resourceCount ) ;
2017-07-10 01:20:38 +02:00
2017-07-14 21:06:01 +02:00
if ( report . errors && report . errors . length ) {
2018-03-09 22:59:12 +02:00
this . logger ( ) . warn ( 'There was some errors:' ) ;
2017-07-10 01:20:38 +02:00
for ( let i = 0 ; i < report . errors . length ; i ++ ) {
let e = report . errors [ i ] ;
2017-07-11 01:17:03 +02:00
this . logger ( ) . warn ( e ) ;
2017-07-10 01:20:38 +02:00
}
}
2017-06-23 20:51:02 +02:00
}
2017-10-14 20:03:23 +02:00
async cancel ( ) {
2018-03-09 22:59:12 +02:00
if ( this . cancelling _ || this . state ( ) == 'idle' ) return ;
2018-08-01 23:05:09 +02:00
2019-06-08 00:20:08 +02:00
// Stop queue but don't set it to null as it may be used to
// retrieve the last few downloads.
2019-06-14 09:11:15 +02:00
if ( this . downloadQueue _ ) this . downloadQueue _ . stop ( ) ;
2019-06-08 00:20:08 +02:00
2018-03-09 22:59:12 +02:00
this . logSyncOperation ( 'cancelling' , null , null , '' ) ;
2017-07-09 17:47:05 +02:00
this . cancelling _ = true ;
2017-10-14 20:03:23 +02:00
return new Promise ( ( resolve , reject ) => {
const iid = setInterval ( ( ) => {
2018-03-09 22:59:12 +02:00
if ( this . state ( ) == 'idle' ) {
2017-10-14 20:03:23 +02:00
clearInterval ( iid ) ;
resolve ( ) ;
}
} , 100 ) ;
} ) ;
2017-07-09 17:47:05 +02:00
}
cancelling ( ) {
return this . cancelling _ ;
}
2018-03-09 21:51:01 +02:00
static stateToLabel ( state ) {
if ( state === "idle" ) return _ ( "Idle" ) ;
if ( state === "in_progress" ) return _ ( "In progress" ) ;
return state ;
}
2018-03-09 21:07:38 +02:00
// Synchronisation is done in three major steps:
//
// 1. UPLOAD: Send to the sync target the items that have changed since the last sync.
// 2. DELETE_REMOTE: Delete on the sync target, the items that have been deleted locally.
// 3. DELTA: Find on the sync target the items that have been modified or deleted and apply the changes locally.
2017-06-30 19:54:01 +02:00
async start ( options = null ) {
if ( ! options ) options = { } ;
2017-07-24 21:47:01 +02:00
2018-03-09 22:59:12 +02:00
if ( this . state ( ) != 'idle' ) {
let error = new Error ( _ ( 'Synchronisation is already in progress. State: %s' , this . state ( ) ) ) ;
error . code = 'alreadyStarted' ;
2017-07-24 21:47:01 +02:00
throw error ;
2017-07-30 21:51:18 +02:00
}
2018-03-09 22:59:12 +02:00
this . state _ = 'in_progress' ;
2017-07-24 21:47:01 +02:00
2017-07-14 21:06:01 +02:00
this . onProgress _ = options . onProgress ? options . onProgress : function ( o ) { } ;
this . progressReport _ = { errors : [ ] } ;
2017-06-30 19:54:01 +02:00
2017-07-18 22:03:07 +02:00
const lastContext = options . context ? options . context : { } ;
2017-07-18 21:57:49 +02:00
2018-03-09 21:07:38 +02:00
const syncSteps = options . syncSteps ? options . syncSteps : [ "update_remote" , "delete_remote" , "delta" ] ;
2017-07-24 20:58:11 +02:00
const syncTargetId = this . api ( ) . syncTargetId ( ) ;
2017-07-16 14:53:59 +02:00
2017-07-09 17:47:05 +02:00
this . cancelling _ = false ;
2017-07-02 12:34:07 +02:00
2017-12-14 21:39:13 +02:00
const masterKeysBefore = await MasterKey . count ( ) ;
2017-12-21 21:06:08 +02:00
let hasAutoEnabledEncryption = false ;
2017-12-14 21:39:13 +02:00
2017-06-24 19:40:03 +02:00
let synchronizationId = time . unixMs ( ) . toString ( ) ;
2017-07-20 22:15:28 +02:00
let outputContext = Object . assign ( { } , lastContext ) ;
2017-01-29 20:29:34 +02:00
2018-03-09 22:59:12 +02:00
this . dispatch ( { type : 'SYNC_STARTED' } ) ;
2017-07-18 00:43:29 +02:00
2018-03-09 22:59:12 +02:00
this . logSyncOperation ( 'starting' , null , null , 'Starting synchronisation to target ' + syncTargetId + '... [' + synchronizationId + ']' ) ;
2017-06-23 23:32:24 +02:00
2019-05-12 02:15:52 +02:00
const handleCannotSyncItem = async ( ItemClass , syncTargetId , item , cannotSyncReason , itemLocation = null ) => {
await ItemClass . saveSyncDisabled ( syncTargetId , item , cannotSyncReason , itemLocation ) ;
this . dispatch ( { type : "SYNC_HAS_DISABLED_SYNC_ITEMS" } ) ;
}
2017-06-29 20:03:16 +02:00
try {
2018-01-30 22:10:36 +02:00
await this . api ( ) . mkdir ( this . syncDirName _ ) ;
this . api ( ) . setTempDirName ( this . syncDirName _ ) ;
2017-07-18 00:43:29 +02:00
await this . api ( ) . mkdir ( this . resourceDirName _ ) ;
2017-06-29 20:03:16 +02:00
2018-03-09 21:07:38 +02:00
// ========================================================================
// 1. UPLOAD
// ------------------------------------------------------------------------
// First, find all the items that have been changed since the
// last sync and apply the changes to remote.
// ========================================================================
2017-06-29 20:03:16 +02:00
2018-03-09 21:07:38 +02:00
if ( syncSteps . indexOf ( "update_remote" ) >= 0 ) {
let donePaths = [ ] ;
2019-05-16 19:34:16 +02:00
const completeItemProcessing = ( path ) => {
donePaths . push ( path ) ;
}
2018-03-09 21:07:38 +02:00
while ( true ) {
2017-07-09 17:47:05 +02:00
if ( this . cancelling ( ) ) break ;
2018-03-09 21:07:38 +02:00
let result = await BaseItem . itemsThatNeedSync ( syncTargetId ) ;
let locals = result . items ;
for ( let i = 0 ; i < locals . length ; i ++ ) {
if ( this . cancelling ( ) ) break ;
2017-06-14 00:39:45 +02:00
2018-03-09 21:07:38 +02:00
let local = locals [ i ] ;
let ItemClass = BaseItem . itemClass ( local ) ;
let path = BaseItem . systemPath ( local ) ;
// Safety check to avoid infinite loops.
2018-12-08 01:41:39 +02:00
// - In fact this error is possible if the item is marked for sync (via sync_time or force_sync) while synchronisation is in
// progress. In that case exit anyway to be sure we aren't in a loop and the item will be re-synced next time.
// - It can also happen if the item is directly modified in the sync target, and set with an update_time in the future. In that case,
// the local sync_time will be updated to Date.now() but on the next loop it will see that the remote item still has a date ahead
// and will see a conflict. There's currently no automatic fix for this - the remote item on the sync target must be fixed manually
// (by setting an updated_time less than current time).
2019-04-29 19:27:32 +02:00
if ( donePaths . indexOf ( path ) >= 0 ) throw new JoplinError ( sprintf ( "Processing a path that has already been done: %s. sync_time was not updated? Remote item has an updated_time in the future?" , path ) , 'processingPathTwice' ) ;
2018-03-09 21:07:38 +02:00
let remote = await this . api ( ) . stat ( path ) ;
let action = null ;
let updateSyncTimeOnly = true ;
let reason = "" ;
let remoteContent = null ;
if ( ! remote ) {
if ( ! local . sync _time ) {
action = "createRemote" ;
reason = "remote does not exist, and local is new and has never been synced" ;
2017-12-05 01:38:09 +02:00
} else {
2018-03-09 21:07:38 +02:00
// Note or item was modified after having been deleted remotely
2018-10-30 02:17:50 +02:00
// "itemConflict" is for all the items except the notes, which are dealt with in a special way
2018-03-09 21:07:38 +02:00
action = local . type _ == BaseModel . TYPE _NOTE ? "noteConflict" : "itemConflict" ;
reason = "remote has been deleted, but local has changes" ;
2017-08-01 23:40:14 +02:00
}
2018-03-09 21:07:38 +02:00
} else {
// Note: in order to know the real updated_time value, we need to load the content. In theory we could
// rely on the file timestamp (in remote.updated_time) but in practice it's not accurate enough and
// can lead to conflicts (for example when the file timestamp is slightly ahead of it's real
// updated_time). updated_time is set and managed by clients so it's always accurate.
// Same situation below for updateLocal.
//
// This is a bit inefficient because if the resulting action is "updateRemote" we don't need the whole
// content, but for now that will do since being reliable is the priority.
//
2019-01-18 19:56:56 +02:00
// Note: assuming a particular sync target is guaranteed to have accurate timestamps, the driver maybe
2018-03-09 21:07:38 +02:00
// could expose this with a accurateTimestamps() method that returns "true". In that case, the test
2018-05-05 16:25:37 +02:00
// could be done using the file timestamp and the potentially unnecessary content loading could be skipped.
2018-03-09 21:07:38 +02:00
// OneDrive does not appear to have accurate timestamps as lastModifiedDateTime would occasionally be
// a few seconds ahead of what it was set with setTimestamp()
2018-06-18 09:47:51 +02:00
try {
remoteContent = await this . api ( ) . get ( path ) ;
} catch ( error ) {
if ( error . code === 'rejectedByTarget' ) {
this . progressReport _ . errors . push ( error ) ;
this . logger ( ) . warn ( 'Rejected by target: ' + path + ': ' + error . message ) ;
2019-05-16 19:34:16 +02:00
completeItemProcessing ( path ) ;
2018-06-18 09:47:51 +02:00
continue ;
} else {
throw error ;
}
}
2018-03-09 21:07:38 +02:00
if ( ! remoteContent ) throw new Error ( "Got metadata for path but could not fetch content: " + path ) ;
remoteContent = await BaseItem . unserialize ( remoteContent ) ;
if ( remoteContent . updated _time > local . sync _time ) {
// Since, in this loop, we are only dealing with items that require sync, if the
// remote has been modified after the sync time, it means both items have been
// modified and so there's a conflict.
action = local . type _ == BaseModel . TYPE _NOTE ? "noteConflict" : "itemConflict" ;
reason = "both remote and local have changes" ;
2017-12-05 01:38:09 +02:00
} else {
2018-03-09 21:07:38 +02:00
action = "updateRemote" ;
reason = "local has changes" ;
2017-12-05 01:38:09 +02:00
}
}
2018-03-09 21:07:38 +02:00
this . logSyncOperation ( action , local , remote , reason ) ;
2017-12-12 20:41:02 +02:00
2018-03-09 21:07:38 +02:00
if ( local . type _ == BaseModel . TYPE _RESOURCE && ( action == "createRemote" || action === "updateRemote" || ( action == "itemConflict" && remote ) ) ) {
2019-05-16 19:34:16 +02:00
const localState = await Resource . localState ( local . id ) ;
if ( localState . fetch _status !== Resource . FETCH _STATUS _DONE ) {
action = null ;
} else {
try {
const remoteContentPath = this . resourceDirName _ + "/" + local . id ;
const result = await Resource . fullPathForSyncUpload ( local ) ;
local = result . resource ;
const localResourceContentPath = result . path ;
await this . api ( ) . put ( remoteContentPath , null , { path : localResourceContentPath , source : "file" } ) ;
} catch ( error ) {
if ( error && [ "rejectedByTarget" , "fileNotFound" ] . indexOf ( error . code ) >= 0 ) {
await handleCannotSyncItem ( ItemClass , syncTargetId , local , error . message ) ;
action = null ;
} else {
throw error ;
}
2018-03-09 21:07:38 +02:00
}
}
2017-11-03 00:48:17 +02:00
}
2018-03-09 21:07:38 +02:00
if ( action == "createRemote" || action == "updateRemote" ) {
let canSync = true ;
try {
2019-05-06 22:35:29 +02:00
if ( this . testingHooks _ . indexOf ( "notesRejectedByTarget" ) >= 0 && local . type _ === BaseModel . TYPE _NOTE ) throw new JoplinError ( "Testing rejectedByTarget" , "rejectedByTarget" ) ;
2018-03-09 21:07:38 +02:00
const content = await ItemClass . serializeForSync ( local ) ;
await this . api ( ) . put ( path , content ) ;
} catch ( error ) {
if ( error && error . code === "rejectedByTarget" ) {
2019-05-12 02:15:52 +02:00
await handleCannotSyncItem ( ItemClass , syncTargetId , local , error . message ) ;
2018-03-09 21:07:38 +02:00
canSync = false ;
} else {
throw error ;
}
}
2017-06-19 20:58:49 +02:00
2018-03-09 21:07:38 +02:00
// Note: Currently, we set sync_time to update_time, which should work fine given that the resolution is the millisecond.
// In theory though, this could happen:
//
// 1. t0: Editor: Note is modified
// 2. t0: Sync: Found that note was modified so start uploading it
// 3. t0: Editor: Note is modified again
// 4. t1: Sync: Note has finished uploading, set sync_time to t0
//
// Later any attempt to sync will not detect that note was modified in (3) (within the same millisecond as it was being uploaded)
// because sync_time will be t0 too.
//
// The solution would be to use something like an etag (a simple counter incremented on every change) to make sure each
// change is uniquely identified. Leaving it like this for now.
if ( canSync ) {
// 2018-01-21: Setting timestamp is not needed because the delta() logic doesn't rely
// on it (instead it uses a more reliable `context` object) and the itemsThatNeedSync loop
// above also doesn't use it because it fetches the whole remote object and read the
// more reliable 'updated_time' property. Basically remote.updated_time is deprecated.
// await this.api().setTimestamp(path, local.updated_time);
await ItemClass . saveSyncTime ( syncTargetId , local , local . updated _time ) ;
}
} else if ( action == "itemConflict" ) {
// ------------------------------------------------------------------------------
// For non-note conflicts, we take the remote version (i.e. the version that was
// synced first) and overwrite the local content.
// ------------------------------------------------------------------------------
if ( remote ) {
local = remoteContent ;
const syncTimeQueries = BaseItem . updateSyncTimeQueries ( syncTargetId , local , time . unixMs ( ) ) ;
2019-05-06 22:35:29 +02:00
await ItemClass . save ( local , { autoTimestamp : false , changeSource : ItemChange . SOURCE _SYNC , nextQueries : syncTimeQueries } ) ;
2018-03-09 21:07:38 +02:00
} else {
2019-05-06 22:35:29 +02:00
await ItemClass . delete ( local . id , { changeSource : ItemChange . SOURCE _SYNC } ) ;
2018-03-09 21:07:38 +02:00
}
} else if ( action == "noteConflict" ) {
// ------------------------------------------------------------------------------
// First find out if the conflict matters. For example, if the conflict is on the title or body
// we want to preserve all the changes. If it's on todo_completed it doesn't really matter
// so in this case we just take the remote content.
// ------------------------------------------------------------------------------
let mustHandleConflict = true ;
if ( remoteContent ) {
mustHandleConflict = Note . mustHandleConflict ( local , remoteContent ) ;
}
2017-11-03 00:48:17 +02:00
2018-03-09 21:07:38 +02:00
// ------------------------------------------------------------------------------
// Create a duplicate of local note into Conflicts folder
// (to preserve the user's changes)
// ------------------------------------------------------------------------------
2017-11-03 00:48:17 +02:00
2018-03-09 21:07:38 +02:00
if ( mustHandleConflict ) {
let conflictedNote = Object . assign ( { } , local ) ;
delete conflictedNote . id ;
conflictedNote . is _conflict = 1 ;
2019-05-06 22:35:29 +02:00
await Note . save ( conflictedNote , { autoTimestamp : false , changeSource : ItemChange . SOURCE _SYNC } ) ;
2018-03-09 21:07:38 +02:00
}
2018-02-16 19:55:50 +02:00
2018-03-09 21:07:38 +02:00
// ------------------------------------------------------------------------------
// Either copy the remote content to local or, if the remote content has
// been deleted, delete the local content.
// ------------------------------------------------------------------------------
if ( remote ) {
local = remoteContent ;
const syncTimeQueries = BaseItem . updateSyncTimeQueries ( syncTargetId , local , time . unixMs ( ) ) ;
2019-05-06 22:35:29 +02:00
await ItemClass . save ( local , { autoTimestamp : false , changeSource : ItemChange . SOURCE _SYNC , nextQueries : syncTimeQueries } ) ;
2018-03-09 21:07:38 +02:00
if ( ! ! local . encryption _applied ) this . dispatch ( { type : "SYNC_GOT_ENCRYPTED_ITEM" } ) ;
} else {
// Remote no longer exists (note deleted) so delete local one too
2019-05-06 22:35:29 +02:00
await ItemClass . delete ( local . id , { changeSource : ItemChange . SOURCE _SYNC } ) ;
2018-03-09 21:07:38 +02:00
}
2017-06-29 20:03:16 +02:00
}
2018-03-09 21:07:38 +02:00
2019-05-16 19:34:16 +02:00
completeItemProcessing ( path ) ;
2017-06-20 21:25:01 +02:00
}
2017-06-18 01:49:52 +02:00
2018-03-09 21:07:38 +02:00
if ( ! result . hasMore ) break ;
2017-06-29 20:03:16 +02:00
}
2018-03-09 21:07:38 +02:00
} // UPLOAD STEP
2017-06-24 19:40:03 +02:00
2018-03-09 21:07:38 +02:00
// ========================================================================
// 2. DELETE_REMOTE
2017-06-29 20:03:16 +02:00
// ------------------------------------------------------------------------
// Delete the remote items that have been deleted locally.
2018-03-09 21:07:38 +02:00
// ========================================================================
2017-06-16 00:12:00 +02:00
2018-03-09 21:07:38 +02:00
if ( syncSteps . indexOf ( "delete_remote" ) >= 0 ) {
let deletedItems = await BaseItem . deletedItems ( syncTargetId ) ;
for ( let i = 0 ; i < deletedItems . length ; i ++ ) {
if ( this . cancelling ( ) ) break ;
2017-07-09 17:47:05 +02:00
2018-03-09 21:07:38 +02:00
let item = deletedItems [ i ] ;
let path = BaseItem . systemPath ( item . item _id ) ;
this . logSyncOperation ( "deleteRemote" , null , { id : item . item _id } , "local has been deleted" ) ;
await this . api ( ) . delete ( path ) ;
await BaseItem . remoteDeletedItem ( syncTargetId , item . item _id ) ;
}
} // DELETE_REMOTE STEP
2017-06-24 19:40:03 +02:00
2018-03-09 21:07:38 +02:00
// ------------------------------------------------------------------------
// 3. DELTA
2017-06-29 20:03:16 +02:00
// ------------------------------------------------------------------------
// Loop through all the remote items, find those that
2018-10-30 02:17:50 +02:00
// have been created or updated, and apply the changes to local.
2017-06-29 20:03:16 +02:00
// ------------------------------------------------------------------------
2019-06-08 00:20:08 +02:00
if ( this . downloadQueue _ ) await this . downloadQueue _ . stop ( ) ;
this . downloadQueue _ = new TaskQueue ( 'syncDownload' ) ;
this . downloadQueue _ . logger _ = this . logger ( ) ;
2018-03-09 21:07:38 +02:00
if ( syncSteps . indexOf ( "delta" ) >= 0 ) {
// At this point all the local items that have changed have been pushed to remote
// or handled as conflicts, so no conflict is possible after this.
2017-06-29 20:03:16 +02:00
2018-03-09 21:07:38 +02:00
let context = null ;
let newDeltaContext = null ;
let localFoldersToDelete = [ ] ;
let hasCancelled = false ;
if ( lastContext . delta ) context = lastContext . delta ;
2017-07-18 21:57:49 +02:00
2018-03-09 21:07:38 +02:00
while ( true ) {
if ( this . cancelling ( ) || hasCancelled ) break ;
2017-07-18 21:57:49 +02:00
2018-03-09 21:07:38 +02:00
let listResult = await this . api ( ) . delta ( "" , {
context : context ,
2017-10-27 00:03:21 +02:00
2018-03-09 21:07:38 +02:00
// allItemIdsHandler() provides a way for drivers that don't have a delta API to
// still provide delta functionality by comparing the items they have to the items
// the client has. Very inefficient but that's the only possible workaround.
// It's a function so that it is only called if the driver needs these IDs. For
// drivers with a delta functionality it's a noop.
allItemIdsHandler : async ( ) => {
return BaseItem . syncedItemIds ( syncTargetId ) ;
} ,
} ) ;
2017-07-23 16:11:44 +02:00
2018-03-09 21:07:38 +02:00
let remotes = listResult . items ;
2018-01-08 22:36:00 +02:00
2018-03-09 21:07:38 +02:00
this . logSyncOperation ( "fetchingTotal" , null , null , "Fetching delta items from sync target" , remotes . length ) ;
2017-07-18 21:57:49 +02:00
2019-06-08 00:20:08 +02:00
for ( const remote of remotes ) {
if ( this . cancelling ( ) ) break ;
this . downloadQueue _ . push ( remote . path , async ( ) => {
return this . api ( ) . get ( remote . path ) ;
} ) ;
}
2018-03-09 21:07:38 +02:00
for ( let i = 0 ; i < remotes . length ; i ++ ) {
if ( this . cancelling ( ) || this . testingHooks _ . indexOf ( "cancelDeltaLoop2" ) >= 0 ) {
hasCancelled = true ;
break ;
2017-07-18 22:03:07 +02:00
}
2018-03-09 21:07:38 +02:00
2019-05-15 09:14:36 +02:00
this . logSyncOperation ( "fetchingProcessed" , null , null , "Processing fetched item" ) ;
2018-03-09 21:07:38 +02:00
let remote = remotes [ i ] ;
if ( ! BaseItem . isSystemPath ( remote . path ) ) continue ; // The delta API might return things like the .sync, .resource or the root folder
const loadContent = async ( ) => {
2019-06-08 00:20:08 +02:00
let task = await this . downloadQueue _ . waitForResult ( path ) ; //await this.api().get(path);
if ( task . error ) throw task . error ;
if ( ! task . result ) return null ;
return await BaseItem . unserialize ( task . result ) ;
2018-03-09 21:07:38 +02:00
} ;
let path = remote . path ;
let action = null ;
let reason = "" ;
let local = await BaseItem . loadItemByPath ( path ) ;
let ItemClass = null ;
let content = null ;
2018-06-18 09:47:51 +02:00
try {
if ( ! local ) {
if ( remote . isDeleted !== true ) {
action = "createLocal" ;
reason = "remote exists but local does not" ;
content = await loadContent ( ) ;
ItemClass = content ? BaseItem . itemClass ( content ) : null ;
}
2018-03-09 21:07:38 +02:00
} else {
2018-06-18 09:47:51 +02:00
ItemClass = BaseItem . itemClass ( local ) ;
local = ItemClass . filter ( local ) ;
if ( remote . isDeleted ) {
action = "deleteLocal" ;
reason = "remote has been deleted" ;
} else {
content = await loadContent ( ) ;
if ( content && content . updated _time > local . updated _time ) {
action = "updateLocal" ;
reason = "remote is more recent than local" ;
}
2018-03-09 21:07:38 +02:00
}
2017-07-18 22:03:07 +02:00
}
2018-06-18 09:47:51 +02:00
} catch ( error ) {
if ( error . code === 'rejectedByTarget' ) {
this . progressReport _ . errors . push ( error ) ;
this . logger ( ) . warn ( 'Rejected by target: ' + path + ': ' + error . message ) ;
action = null ;
} else {
2018-10-30 02:17:50 +02:00
error . message = 'On file ' + path + ': ' + error . message ;
2018-06-18 09:47:51 +02:00
throw error ;
}
2017-07-18 22:03:07 +02:00
}
2017-07-18 21:57:49 +02:00
2019-05-06 22:35:29 +02:00
if ( this . testingHooks _ . indexOf ( 'skipRevisions' ) >= 0 && content && content . type _ === BaseModel . TYPE _REVISION ) action = null ;
2018-03-09 21:07:38 +02:00
if ( ! action ) continue ;
2017-07-18 21:57:49 +02:00
2018-03-09 21:07:38 +02:00
this . logSyncOperation ( action , local , remote , reason ) ;
2017-07-18 21:57:49 +02:00
2018-03-09 21:07:38 +02:00
if ( action == "createLocal" || action == "updateLocal" ) {
if ( content === null ) {
this . logger ( ) . warn ( "Remote has been deleted between now and the delta() call? In that case it will be handled during the next sync: " + path ) ;
continue ;
}
content = ItemClass . filter ( content ) ;
// 2017-12-03: This was added because the new user_updated_time and user_created_time properties were added
// to the items. However changing the database is not enough since remote items that haven't been synced yet
// will not have these properties and, since they are required, it would cause a problem. So this check
// if they are present and, if not, set them to a reasonable default.
// Let's leave these two lines for 6 months, by which time all the clients should have been synced.
if ( ! content . user _updated _time ) content . user _updated _time = content . updated _time ;
if ( ! content . user _created _time ) content . user _created _time = content . created _time ;
let options = {
autoTimestamp : false ,
nextQueries : BaseItem . updateSyncTimeQueries ( syncTargetId , content , time . unixMs ( ) ) ,
2019-05-06 22:35:29 +02:00
changeSource : ItemChange . SOURCE _SYNC ,
2018-03-09 21:07:38 +02:00
} ;
if ( action == "createLocal" ) options . isNew = true ;
if ( action == "updateLocal" ) options . oldItem = local ;
2018-10-10 19:53:09 +02:00
const creatingNewResource = content . type _ == BaseModel . TYPE _RESOURCE && action == "createLocal" ;
2018-11-13 02:45:08 +02:00
if ( creatingNewResource ) {
2019-05-12 02:15:52 +02:00
if ( content . size >= this . maxResourceSize ( ) ) {
await handleCannotSyncItem ( ItemClass , syncTargetId , content , 'File "' + content . title + '" is larger than allowed ' + this . maxResourceSize ( ) + ' bytes. Beyond this limit, the mobile app would crash.' , BaseItem . SYNC _ITEM _LOCATION _REMOTE ) ;
continue ;
}
2018-11-13 02:45:08 +02:00
await ResourceLocalState . save ( { resource _id : content . id , fetch _status : Resource . FETCH _STATUS _IDLE } ) ;
}
2018-10-10 19:53:09 +02:00
2018-03-09 21:07:38 +02:00
await ItemClass . save ( content , options ) ;
2018-10-10 19:53:09 +02:00
if ( creatingNewResource ) this . dispatch ( { type : "SYNC_CREATED_RESOURCE" , id : content . id } ) ;
2018-10-08 20:11:53 +02:00
2018-03-09 21:07:38 +02:00
if ( ! hasAutoEnabledEncryption && content . type _ === BaseModel . TYPE _MASTER _KEY && ! masterKeysBefore ) {
hasAutoEnabledEncryption = true ;
this . logger ( ) . info ( "One master key was downloaded and none was previously available: automatically enabling encryption" ) ;
this . logger ( ) . info ( "Using master key: " , content ) ;
await this . encryptionService ( ) . enableEncryption ( content ) ;
await this . encryptionService ( ) . loadMasterKeysFromSettings ( ) ;
this . logger ( ) . info (
"Encryption has been enabled with downloaded master key as active key. However, note that no password was initially supplied. It will need to be provided by user."
) ;
}
2017-07-18 22:03:07 +02:00
2018-03-09 21:07:38 +02:00
if ( ! ! content . encryption _applied ) this . dispatch ( { type : "SYNC_GOT_ENCRYPTED_ITEM" } ) ;
} else if ( action == "deleteLocal" ) {
if ( local . type _ == BaseModel . TYPE _FOLDER ) {
localFoldersToDelete . push ( local ) ;
continue ;
}
2017-07-18 22:03:07 +02:00
2018-03-09 21:07:38 +02:00
let ItemClass = BaseItem . itemClass ( local . type _ ) ;
2019-05-06 22:35:29 +02:00
await ItemClass . delete ( local . id , { trackDeleted : false , changeSource : ItemChange . SOURCE _SYNC } ) ;
2017-12-21 21:06:08 +02:00
}
2018-03-09 21:07:38 +02:00
}
2017-12-21 21:06:08 +02:00
2018-03-09 21:07:38 +02:00
// If user has cancelled, don't record the new context (2) so that synchronisation
// can start again from the previous context (1) next time. It is ok if some items
// have been synced between (1) and (2) because the loop above will handle the same
2018-05-05 16:25:37 +02:00
// items being synced twice as an update. If the local and remote items are identical
2018-03-09 21:07:38 +02:00
// the update will simply be skipped.
if ( ! hasCancelled ) {
2018-06-11 00:16:27 +02:00
if ( options . saveContextHandler ) {
const deltaToSave = Object . assign ( { } , listResult . context ) ;
// Remove these two variables because they can be large and can be rebuilt
// the next time the sync is started.
delete deltaToSave . statsCache ;
delete deltaToSave . statIdsCache ;
options . saveContextHandler ( { delta : deltaToSave } ) ;
}
2018-03-09 21:07:38 +02:00
if ( ! listResult . hasMore ) {
newDeltaContext = listResult . context ;
break ;
2017-07-18 22:03:07 +02:00
}
2018-03-09 21:07:38 +02:00
context = listResult . context ;
2017-07-18 22:03:07 +02:00
}
}
2018-03-09 21:07:38 +02:00
outputContext . delta = newDeltaContext ? newDeltaContext : lastContext . delta ;
// ------------------------------------------------------------------------
// Delete the folders that have been collected in the loop above.
// Folders are always deleted last, and only if they are empty.
// If they are not empty it's considered a conflict since whatever deleted
// them should have deleted their content too. In that case, all its notes
// are marked as "is_conflict".
// ------------------------------------------------------------------------
if ( ! this . cancelling ( ) ) {
for ( let i = 0 ; i < localFoldersToDelete . length ; i ++ ) {
const item = localFoldersToDelete [ i ] ;
const noteIds = await Folder . noteIds ( item . id ) ;
if ( noteIds . length ) {
// CONFLICT
await Folder . markNotesAsConflict ( item . id ) ;
}
2019-05-06 22:35:29 +02:00
await Folder . delete ( item . id , { deleteChildren : false , changeSource : ItemChange . SOURCE _SYNC , trackDeleted : false } ) ;
2017-11-21 20:17:50 +02:00
}
2017-07-18 22:03:07 +02:00
}
2017-07-18 21:57:49 +02:00
2018-03-09 21:07:38 +02:00
if ( ! this . cancelling ( ) ) {
await BaseItem . deleteOrphanSyncItems ( ) ;
2017-07-18 22:03:07 +02:00
}
2018-03-09 21:07:38 +02:00
} // DELTA STEP
2017-06-29 20:03:16 +02:00
} catch ( error ) {
2019-04-29 19:27:32 +02:00
if ( error && [ "cannotEncryptEncrypted" , "noActiveMasterKey" , "processingPathTwice" ] . indexOf ( error . code ) >= 0 ) {
2017-12-20 21:45:25 +02:00
// Only log an info statement for this since this is a common condition that is reported
2019-04-29 19:27:32 +02:00
// in the application, and needs to be resolved by the user.
// Or it's a temporary issue that will be resolved on next sync.
2017-12-17 21:51:45 +02:00
this . logger ( ) . info ( error . message ) ;
2019-05-01 00:42:06 +02:00
} else if ( error . code === 'unknownItemType' ) {
this . progressReport _ . errors . push ( _ ( 'Unknown item type downloaded - please upgrade Joplin to the latest version' ) ) ;
this . logger ( ) . error ( error ) ;
2017-12-17 21:51:45 +02:00
} else {
this . logger ( ) . error ( error ) ;
2018-03-09 21:51:01 +02:00
// Don't save to the report errors that are due to things like temporary network errors or timeout.
if ( ! shim . fetchRequestCanBeRetried ( error ) ) this . progressReport _ . errors . push ( error ) ;
2017-12-17 21:51:45 +02:00
}
2017-06-20 21:18:19 +02:00
}
2017-05-18 21:58:01 +02:00
2017-07-09 17:47:05 +02:00
if ( this . cancelling ( ) ) {
2018-03-09 22:59:12 +02:00
this . logger ( ) . info ( 'Synchronisation was cancelled.' ) ;
2017-07-09 17:47:05 +02:00
this . cancelling _ = false ;
}
2017-07-16 23:17:22 +02:00
this . progressReport _ . completedTime = time . unixMs ( ) ;
2018-03-09 22:59:12 +02:00
this . logSyncOperation ( 'finished' , null , null , 'Synchronisation finished [' + synchronizationId + ']' ) ;
2017-07-18 00:43:29 +02:00
2017-07-14 21:06:01 +02:00
await this . logSyncSummary ( this . progressReport _ ) ;
this . onProgress _ = function ( s ) { } ;
this . progressReport _ = { } ;
2017-07-16 23:17:22 +02:00
2018-03-09 22:59:12 +02:00
this . dispatch ( { type : 'SYNC_COMPLETED' } ) ;
2017-07-30 22:22:57 +02:00
2018-03-09 22:59:12 +02:00
this . state _ = 'idle' ;
2017-07-18 21:57:49 +02:00
return outputContext ;
2017-05-18 21:58:01 +02:00
}
2018-03-09 22:59:12 +02:00
2017-05-18 21:58:01 +02:00
}
2018-03-09 19:49:35 +02:00
module . exports = { Synchronizer } ;