2018-03-09 22:59:12 +02:00
const { isHidden } = require ( 'lib/path-utils.js' ) ;
const { Logger } = require ( 'lib/logger.js' ) ;
const { shim } = require ( 'lib/shim' ) ;
const BaseItem = require ( 'lib/models/BaseItem.js' ) ;
const JoplinError = require ( 'lib/JoplinError' ) ;
const ArrayUtils = require ( 'lib/ArrayUtils' ) ;
const { time } = require ( 'lib/time-utils.js' ) ;
2019-09-27 20:12:28 +02:00
const { sprintf } = require ( 'sprintf-js' ) ;
2018-02-07 21:46:07 +02:00
function requestCanBeRepeated ( error ) {
2018-03-09 22:59:12 +02:00
const errorCode = typeof error === 'object' && error . code ? error . code : null ;
2018-02-07 21:46:07 +02:00
2019-09-25 20:58:15 +02:00
// The target is explicitely rejecting the item so repeating wouldn't make a difference.
2018-03-09 22:59:12 +02:00
if ( errorCode === 'rejectedByTarget' ) return false ;
2018-02-07 21:46:07 +02:00
2019-09-25 20:58:15 +02:00
// We don't repeat failSafe errors because it's an indication of an issue at the
// server-level issue which usually cannot be fixed by repeating the request.
// Also we print the previous requests and responses to the log in this case,
// so not repeating means there will be less noise in the log.
if ( errorCode === 'failSafe' ) return false ;
2018-02-07 21:46:07 +02:00
return true ;
}
async function tryAndRepeat ( fn , count ) {
let retryCount = 0 ;
2019-07-29 10:12:23 +02:00
// Don't use internal fetch retry mechanim since we
// are already retrying here.
const shimFetchMaxRetryPrevious = shim . fetchMaxRetrySet ( 0 ) ;
const defer = ( ) => {
shim . fetchMaxRetrySet ( shimFetchMaxRetryPrevious ) ;
2019-07-29 15:43:53 +02:00
} ;
2019-07-29 10:12:23 +02:00
2018-02-07 21:46:07 +02:00
while ( true ) {
try {
const result = await fn ( ) ;
2019-07-29 10:12:23 +02:00
defer ( ) ;
2018-02-07 21:46:07 +02:00
return result ;
} catch ( error ) {
2019-07-29 10:12:23 +02:00
if ( retryCount >= count || ! requestCanBeRepeated ( error ) ) {
defer ( ) ;
throw error ;
}
2018-02-07 21:46:07 +02:00
retryCount ++ ;
await time . sleep ( 1 + retryCount * 3 ) ;
}
2019-07-29 15:43:53 +02:00
}
2018-02-07 21:46:07 +02:00
}
2017-06-11 23:11:14 +02:00
class FileApi {
constructor ( baseDir , driver ) {
this . baseDir _ = baseDir ;
this . driver _ = driver ;
2017-06-23 23:32:24 +02:00
this . logger _ = new Logger ( ) ;
2017-07-24 20:58:11 +02:00
this . syncTargetId _ = null ;
2018-01-30 22:10:36 +02:00
this . tempDirName _ = null ;
this . driver _ . fileApi _ = this ;
2018-02-15 20:33:08 +02:00
this . requestRepeatCount _ = null ; // For testing purpose only - normally this value should come from the driver
2018-01-30 22:10:36 +02:00
}
2018-02-07 21:46:07 +02:00
// Ideally all requests repeating should be done at the FileApi level to remove duplicate code in the drivers, but
// historically some drivers (eg. OneDrive) are already handling request repeating, so this is optional, per driver,
// and it defaults to no repeating.
requestRepeatCount ( ) {
2018-02-15 20:33:08 +02:00
if ( this . requestRepeatCount _ !== null ) return this . requestRepeatCount _ ;
2018-02-07 21:46:07 +02:00
if ( this . driver _ . requestRepeatCount ) return this . driver _ . requestRepeatCount ( ) ;
return 0 ;
}
2019-09-25 20:58:15 +02:00
lastRequests ( ) {
return this . driver _ . lastRequests ? this . driver _ . lastRequests ( ) : [ ] ;
}
clearLastRequests ( ) {
if ( this . driver _ . clearLastRequests ) this . driver _ . clearLastRequests ( ) ;
}
2018-05-21 17:26:01 +02:00
baseDir ( ) {
return this . baseDir _ ;
}
2018-01-30 22:10:36 +02:00
tempDirName ( ) {
2018-03-09 22:59:12 +02:00
if ( this . tempDirName _ === null ) throw Error ( 'Temp dir not set!' ) ;
2018-01-30 22:10:36 +02:00
return this . tempDirName _ ;
}
setTempDirName ( v ) {
this . tempDirName _ = v ;
2017-06-11 23:11:14 +02:00
}
2018-01-21 19:01:37 +02:00
fsDriver ( ) {
return shim . fsDriver ( ) ;
}
2017-07-16 14:53:59 +02:00
driver ( ) {
return this . driver _ ;
}
2017-07-24 20:58:11 +02:00
setSyncTargetId ( v ) {
this . syncTargetId _ = v ;
}
syncTargetId ( ) {
2018-03-09 22:59:12 +02:00
if ( this . syncTargetId _ === null ) throw new Error ( 'syncTargetId has not been set!!' ) ;
2017-07-24 20:58:11 +02:00
return this . syncTargetId _ ;
}
2017-06-23 23:32:24 +02:00
setLogger ( l ) {
2018-02-06 20:59:36 +02:00
if ( ! l ) l = new Logger ( ) ;
2017-06-23 23:32:24 +02:00
this . logger _ = l ;
}
logger ( ) {
return this . logger _ ;
2017-06-22 23:52:27 +02:00
}
2017-06-13 22:12:08 +02:00
fullPath _ ( path ) {
2020-03-14 01:46:14 +02:00
const output = [ ] ;
2018-05-21 17:26:01 +02:00
if ( this . baseDir ( ) ) output . push ( this . baseDir ( ) ) ;
2018-01-30 22:10:36 +02:00
if ( path ) output . push ( path ) ;
2018-03-09 22:59:12 +02:00
return output . join ( '/' ) ;
2017-06-13 22:12:08 +02:00
}
2017-06-27 21:26:29 +02:00
// DRIVER MUST RETURN PATHS RELATIVE TO `path`
2019-09-13 00:16:42 +02:00
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
2018-03-09 22:59:12 +02:00
async list ( path = '' , options = null ) {
2017-06-23 20:51:02 +02:00
if ( ! options ) options = { } ;
2018-03-09 22:59:12 +02:00
if ( ! ( 'includeHidden' in options ) ) options . includeHidden = false ;
if ( ! ( 'context' in options ) ) options . context = null ;
2020-07-11 00:42:03 +02:00
if ( ! ( 'includeDirs' in options ) ) options . includeDirs = true ;
2017-06-23 20:51:02 +02:00
2019-09-19 23:51:18 +02:00
this . logger ( ) . debug ( ` list ${ this . baseDir ( ) } ` ) ;
2017-06-29 20:03:16 +02:00
2019-10-11 01:03:23 +02:00
const result = await tryAndRepeat ( ( ) => this . driver _ . list ( this . fullPath _ ( path ) , options ) , this . requestRepeatCount ( ) ) ;
2018-02-07 21:46:07 +02:00
if ( ! options . includeHidden ) {
2020-03-14 01:46:14 +02:00
const temp = [ ] ;
2018-02-07 21:46:07 +02:00
for ( let i = 0 ; i < result . items . length ; i ++ ) {
if ( ! isHidden ( result . items [ i ] . path ) ) temp . push ( result . items [ i ] ) ;
2017-06-23 20:51:02 +02:00
}
2018-02-07 21:46:07 +02:00
result . items = temp ;
}
2020-07-11 00:42:03 +02:00
if ( ! options . includeHidden ) {
result . items = result . items . filter ( f => ! f . isDir ) ;
}
2018-02-07 21:46:07 +02:00
return result ;
2017-06-11 23:11:14 +02:00
}
2018-01-25 23:15:58 +02:00
// Deprectated
2017-06-27 21:48:01 +02:00
setTimestamp ( path , timestampMs ) {
2019-09-19 23:51:18 +02:00
this . logger ( ) . debug ( ` setTimestamp ${ this . fullPath _ ( path ) } ` ) ;
2018-02-07 21:46:07 +02:00
return tryAndRepeat ( ( ) => this . driver _ . setTimestamp ( this . fullPath _ ( path ) , timestampMs ) , this . requestRepeatCount ( ) ) ;
2019-10-09 21:35:13 +02:00
// return this.driver_.setTimestamp(this.fullPath_(path), timestampMs);
2017-06-12 23:56:27 +02:00
}
2017-06-23 20:51:02 +02:00
mkdir ( path ) {
2019-09-19 23:51:18 +02:00
this . logger ( ) . debug ( ` mkdir ${ this . fullPath _ ( path ) } ` ) ;
2018-02-07 21:46:07 +02:00
return tryAndRepeat ( ( ) => this . driver _ . mkdir ( this . fullPath _ ( path ) ) , this . requestRepeatCount ( ) ) ;
2017-06-23 20:51:02 +02:00
}
2017-06-11 23:11:14 +02:00
2018-02-07 21:46:07 +02:00
async stat ( path ) {
2019-09-19 23:51:18 +02:00
this . logger ( ) . debug ( ` stat ${ this . fullPath _ ( path ) } ` ) ;
2018-02-07 21:46:07 +02:00
const output = await tryAndRepeat ( ( ) => this . driver _ . stat ( this . fullPath _ ( path ) ) , this . requestRepeatCount ( ) ) ;
if ( ! output ) return output ;
output . path = path ;
return output ;
// return this.driver_.stat(this.fullPath_(path)).then((output) => {
// if (!output) return output;
// output.path = path;
// return output;
// });
2017-06-15 01:14:15 +02:00
}
2017-07-06 23:30:45 +02:00
get ( path , options = null ) {
if ( ! options ) options = { } ;
2018-03-09 22:59:12 +02:00
if ( ! options . encoding ) options . encoding = 'utf8' ;
2019-09-19 23:51:18 +02:00
this . logger ( ) . debug ( ` get ${ this . fullPath _ ( path ) } ` ) ;
2018-02-07 21:46:07 +02:00
return tryAndRepeat ( ( ) => this . driver _ . get ( this . fullPath _ ( path ) , options ) , this . requestRepeatCount ( ) ) ;
2017-06-11 23:11:14 +02:00
}
2018-01-21 19:01:37 +02:00
async put ( path , content , options = null ) {
2019-09-19 23:51:18 +02:00
this . logger ( ) . debug ( ` put ${ this . fullPath _ ( path ) } ` , options ) ;
2019-07-29 15:43:53 +02:00
2018-03-09 22:59:12 +02:00
if ( options && options . source === 'file' ) {
2019-09-19 23:51:18 +02:00
if ( ! ( await this . fsDriver ( ) . exists ( options . path ) ) ) throw new JoplinError ( ` File not found: ${ options . path } ` , 'fileNotFound' ) ;
2018-01-21 19:01:37 +02:00
}
2018-02-07 21:46:07 +02:00
return tryAndRepeat ( ( ) => this . driver _ . put ( this . fullPath _ ( path ) , content , options ) , this . requestRepeatCount ( ) ) ;
2017-06-11 23:11:14 +02:00
}
delete ( path ) {
2019-09-19 23:51:18 +02:00
this . logger ( ) . debug ( ` delete ${ this . fullPath _ ( path ) } ` ) ;
2018-02-07 21:46:07 +02:00
return tryAndRepeat ( ( ) => this . driver _ . delete ( this . fullPath _ ( path ) ) , this . requestRepeatCount ( ) ) ;
2017-06-11 23:11:14 +02:00
}
2018-01-25 23:15:58 +02:00
// Deprectated
2017-06-23 20:51:02 +02:00
move ( oldPath , newPath ) {
2019-09-19 23:51:18 +02:00
this . logger ( ) . debug ( ` move ${ this . fullPath _ ( oldPath ) } => ${ this . fullPath _ ( newPath ) } ` ) ;
2018-02-07 21:46:07 +02:00
return tryAndRepeat ( ( ) => this . driver _ . move ( this . fullPath _ ( oldPath ) , this . fullPath _ ( newPath ) ) , this . requestRepeatCount ( ) ) ;
2017-06-23 20:51:02 +02:00
}
2017-06-11 23:11:14 +02:00
2018-01-25 23:15:58 +02:00
// Deprectated
2017-06-13 22:58:17 +02:00
format ( ) {
2018-02-07 21:46:07 +02:00
return tryAndRepeat ( ( ) => this . driver _ . format ( ) , this . requestRepeatCount ( ) ) ;
2017-06-13 22:58:17 +02:00
}
2018-01-25 23:15:58 +02:00
clearRoot ( ) {
2018-05-21 17:26:01 +02:00
return tryAndRepeat ( ( ) => this . driver _ . clearRoot ( this . baseDir ( ) ) , this . requestRepeatCount ( ) ) ;
2018-01-25 23:15:58 +02:00
}
2017-07-18 21:57:49 +02:00
delta ( path , options = null ) {
2019-09-19 23:51:18 +02:00
this . logger ( ) . debug ( ` delta ${ this . fullPath _ ( path ) } ` ) ;
2018-02-07 21:46:07 +02:00
return tryAndRepeat ( ( ) => this . driver _ . delta ( this . fullPath _ ( path ) , options ) , this . requestRepeatCount ( ) ) ;
2017-07-18 21:57:49 +02:00
}
2017-06-11 23:11:14 +02:00
}
2018-01-21 21:45:32 +02:00
function basicDeltaContextFromOptions _ ( options ) {
2020-03-14 01:46:14 +02:00
const output = {
2018-01-21 21:45:32 +02:00
timestamp : 0 ,
filesAtTimestamp : [ ] ,
statsCache : null ,
2018-01-30 23:10:54 +02:00
statIdsCache : null ,
deletedItemsProcessed : false ,
2018-01-21 21:45:32 +02:00
} ;
if ( ! options || ! options . context ) return output ;
2018-01-30 23:10:54 +02:00
2018-01-21 21:45:32 +02:00
const d = new Date ( options . context . timestamp ) ;
output . timestamp = isNaN ( d . getTime ( ) ) ? 0 : options . context . timestamp ;
output . filesAtTimestamp = Array . isArray ( options . context . filesAtTimestamp ) ? options . context . filesAtTimestamp . slice ( ) : [ ] ;
output . statsCache = options . context && options . context . statsCache ? options . context . statsCache : null ;
2018-01-30 23:10:54 +02:00
output . statIdsCache = options . context && options . context . statIdsCache ? options . context . statIdsCache : null ;
2019-07-29 15:43:53 +02:00
output . deletedItemsProcessed = options . context && 'deletedItemsProcessed' in options . context ? options . context . deletedItemsProcessed : false ;
2018-01-21 21:45:32 +02:00
return output ;
}
// This is the basic delta algorithm, which can be used in case the cloud service does not have
2018-01-25 21:01:14 +02:00
// a built-in delta API. OneDrive and Dropbox have one for example, but Nextcloud and obviously
2018-01-21 21:45:32 +02:00
// the file system do not.
2018-01-25 21:01:14 +02:00
async function basicDelta ( path , getDirStatFn , options ) {
2018-06-11 00:16:27 +02:00
const outputLimit = 50 ;
2018-01-21 21:45:32 +02:00
const itemIds = await options . allItemIdsHandler ( ) ;
2018-03-09 22:59:12 +02:00
if ( ! Array . isArray ( itemIds ) ) throw new Error ( 'Delta API not supported - local IDs must be provided' ) ;
2018-01-21 21:45:32 +02:00
2019-11-13 20:54:54 +02:00
const logger = options && options . logger ? options . logger : new Logger ( ) ;
2018-01-21 21:45:32 +02:00
const context = basicDeltaContextFromOptions _ ( options ) ;
2019-11-13 20:54:54 +02:00
if ( context . timestamp > Date . now ( ) ) {
logger . warn ( ` BasicDelta: Context timestamp is greater than current time: ${ context . timestamp } ` ) ;
logger . warn ( 'BasicDelta: Sync will continue but it is likely that nothing will be synced' ) ;
}
2020-03-14 01:46:14 +02:00
const newContext = {
2018-01-21 21:45:32 +02:00
timestamp : context . timestamp ,
filesAtTimestamp : context . filesAtTimestamp . slice ( ) ,
statsCache : context . statsCache ,
2018-01-30 23:10:54 +02:00
statIdsCache : context . statIdsCache ,
deletedItemsProcessed : context . deletedItemsProcessed ,
2018-01-21 21:45:32 +02:00
} ;
// Stats are cached until all items have been processed (until hasMore is false)
if ( newContext . statsCache === null ) {
2018-01-25 21:01:14 +02:00
newContext . statsCache = await getDirStatFn ( path ) ;
2018-01-21 21:45:32 +02:00
newContext . statsCache . sort ( function ( a , b ) {
return a . updated _time - b . updated _time ;
} ) ;
2020-05-21 10:14:33 +02:00
newContext . statIdsCache = newContext . statsCache . filter ( item => BaseItem . isSystemPath ( item . path ) ) . map ( item => BaseItem . pathToId ( item . path ) ) ;
2018-01-29 22:51:14 +02:00
newContext . statIdsCache . sort ( ) ; // Items must be sorted to use binary search below
2018-01-21 21:45:32 +02:00
}
let output = [ ] ;
2019-11-13 20:54:54 +02:00
const updateReport = {
timestamp : context . timestamp ,
older : 0 ,
newer : 0 ,
equal : 0 ,
} ;
2018-01-21 21:45:32 +02:00
// Find out which files have been changed since the last time. Note that we keep
// both the timestamp of the most recent change, *and* the items that exactly match
// this timestamp. This to handle cases where an item is modified while this delta
// function is running. For example:
// t0: Item 1 is changed
// t0: Sync items - run delta function
// t0: While delta() is running, modify Item 2
// Since item 2 was modified within the same millisecond, it would be skipped in the
// next sync if we relied exclusively on a timestamp.
for ( let i = 0 ; i < newContext . statsCache . length ; i ++ ) {
const stat = newContext . statsCache [ i ] ;
if ( stat . isDir ) continue ;
2019-11-13 20:54:54 +02:00
if ( stat . updated _time < context . timestamp ) {
updateReport . older ++ ;
continue ;
}
2018-01-21 21:45:32 +02:00
// Special case for items that exactly match the timestamp
if ( stat . updated _time === context . timestamp ) {
2019-11-13 20:54:54 +02:00
if ( context . filesAtTimestamp . indexOf ( stat . path ) >= 0 ) {
updateReport . equal ++ ;
continue ;
}
2018-01-21 21:45:32 +02:00
}
if ( stat . updated _time > newContext . timestamp ) {
newContext . timestamp = stat . updated _time ;
newContext . filesAtTimestamp = [ ] ;
2019-11-13 20:54:54 +02:00
updateReport . newer ++ ;
2018-01-21 21:45:32 +02:00
}
newContext . filesAtTimestamp . push ( stat . path ) ;
output . push ( stat ) ;
if ( output . length >= outputLimit ) break ;
}
2019-11-13 20:54:54 +02:00
logger . info ( ` BasicDelta: Report: ${ JSON . stringify ( updateReport ) } ` ) ;
2018-01-30 23:10:54 +02:00
if ( ! newContext . deletedItemsProcessed ) {
// Find out which items have been deleted on the sync target by comparing the items
// we have to the items on the target.
// Note that when deleted items are processed it might result in the output having
// more items than outputLimit. This is acceptable since delete operations are cheap.
2020-03-14 01:46:14 +02:00
const deletedItems = [ ] ;
2018-01-30 23:10:54 +02:00
for ( let i = 0 ; i < itemIds . length ; i ++ ) {
const itemId = itemIds [ i ] ;
if ( ArrayUtils . binarySearch ( newContext . statIdsCache , itemId ) < 0 ) {
deletedItems . push ( {
path : BaseItem . systemPath ( itemId ) ,
isDeleted : true ,
} ) ;
}
2018-01-21 21:45:32 +02:00
}
2018-01-30 23:10:54 +02:00
2019-09-27 20:12:28 +02:00
const percentDeleted = itemIds . length ? deletedItems . length / itemIds . length : 0 ;
// If more than 90% of the notes are going to be deleted, it's most likely a
// configuration error or bug. For example, if the user moves their Nextcloud
// directory, or if a network drive gets disconnected and returns an empty dir
// instead of an error. In that case, we don't wipe out the user data, unless
// they have switched off the fail-safe.
if ( options . wipeOutFailSafe && percentDeleted >= 0.90 ) throw new JoplinError ( sprintf ( 'Fail-safe: Sync was interrupted because %d%% of the data (%d items) is about to be deleted. To override this behaviour disable the fail-safe in the sync settings.' , Math . round ( percentDeleted * 100 ) , deletedItems . length ) , 'failSafe' ) ;
2018-01-30 23:10:54 +02:00
output = output . concat ( deletedItems ) ;
2018-01-21 21:45:32 +02:00
}
2018-01-30 23:10:54 +02:00
newContext . deletedItemsProcessed = true ;
2018-01-21 21:45:32 +02:00
const hasMore = output . length >= outputLimit ;
2018-02-18 23:52:07 +02:00
if ( ! hasMore ) {
// Clear temporary info from context. It's especially important to remove deletedItemsProcessed
// so that they are processed again on the next sync.
newContext . statsCache = null ;
2018-05-10 20:50:44 +02:00
newContext . statIdsCache = null ;
2018-02-18 23:52:07 +02:00
delete newContext . deletedItemsProcessed ;
}
2018-01-21 21:45:32 +02:00
return {
hasMore : hasMore ,
context : newContext ,
items : output ,
} ;
}
2019-07-29 15:43:53 +02:00
module . exports = { FileApi , basicDelta } ;