You've already forked joplin
							
							
				mirror of
				https://github.com/laurent22/joplin.git
				synced 2025-10-31 00:07:48 +02:00 
			
		
		
		
	All: Improved first sync speed when synchronising with Joplin Server
This commit is contained in:
		| @@ -50,7 +50,7 @@ done | ||||
|  | ||||
| cd "$ROOT_DIR/packages/app-cli" | ||||
| npm start -- --profile "$PROFILE_DIR" batch "$CMD_FILE" | ||||
| npm start -- --profile "$PROFILE_DIR" import ~/Desktop/Joplin_17_06_2021.jex | ||||
| # npm start -- --profile "$PROFILE_DIR" import ~/Desktop/Tout_18_06_2021.jex | ||||
| # npm start -- --profile "$PROFILE_DIR" import ~/Desktop/Joplin_17_06_2021.jex | ||||
| npm start -- --profile "$PROFILE_DIR" import ~/Desktop/Tout_18_06_2021.jex | ||||
| npm start -- --profile "$PROFILE_DIR" sync | ||||
|  | ||||
|   | ||||
| @@ -20,6 +20,7 @@ import JoplinError from './JoplinError'; | ||||
| import ShareService from './services/share/ShareService'; | ||||
| import TaskQueue from './TaskQueue'; | ||||
| import ItemUploader from './services/synchronizer/ItemUploader'; | ||||
| import { FileApi } from './file-api'; | ||||
| const { sprintf } = require('sprintf-js'); | ||||
| const { Dirnames } = require('./services/synchronizer/utils/types'); | ||||
|  | ||||
| @@ -27,6 +28,18 @@ interface RemoteItem { | ||||
| 	id: string; | ||||
| 	path?: string; | ||||
| 	type_?: number; | ||||
| 	isDeleted?: boolean; | ||||
|  | ||||
| 	// This the time when the file was created on the server. It is used for | ||||
| 	// example for the locking mechanim or any file that's not an actual Joplin | ||||
| 	// item. | ||||
| 	updated_time?: number; | ||||
|  | ||||
| 	// This is the time that corresponds to the actual Joplin item updated_time | ||||
| 	// value. A note is always uploaded with a delay so the server updated_time | ||||
| 	// value will always be ahead. However for synchronising we need to know the | ||||
| 	// exact Joplin item updated_time value. | ||||
| 	jop_updated_time?: number; | ||||
| } | ||||
|  | ||||
| function isCannotSyncError(error: any): boolean { | ||||
| @@ -50,7 +63,7 @@ export default class Synchronizer { | ||||
| 	public static verboseMode: boolean = true; | ||||
|  | ||||
| 	private db_: any; | ||||
| 	private api_: any; | ||||
| 	private api_: FileApi; | ||||
| 	private appType_: string; | ||||
| 	private logger_: Logger = new Logger(); | ||||
| 	private state_: string = 'idle'; | ||||
| @@ -74,7 +87,7 @@ export default class Synchronizer { | ||||
|  | ||||
| 	public dispatch: Function; | ||||
|  | ||||
| 	public constructor(db: any, api: any, appType: string) { | ||||
| 	public constructor(db: any, api: FileApi, appType: string) { | ||||
| 		this.db_ = db; | ||||
| 		this.api_ = api; | ||||
| 		this.appType_ = appType; | ||||
| @@ -307,7 +320,7 @@ export default class Synchronizer { | ||||
| 		if (this.syncTargetIsLocked_) throw new JoplinError('Sync target is locked - aborting API call', 'lockError'); | ||||
|  | ||||
| 		try { | ||||
| 			const output = await this.api()[fnName](...args); | ||||
| 			const output = await (this.api() as any)[fnName](...args); | ||||
| 			return output; | ||||
| 		} catch (error) { | ||||
| 			const lockStatus = await this.lockErrorStatus_(); | ||||
| @@ -769,16 +782,27 @@ export default class Synchronizer { | ||||
| 						logger: this.logger(), | ||||
| 					}); | ||||
|  | ||||
| 					const remotes = listResult.items; | ||||
| 					const remotes: RemoteItem[] = listResult.items; | ||||
|  | ||||
| 					this.logSyncOperation('fetchingTotal', null, null, 'Fetching delta items from sync target', remotes.length); | ||||
|  | ||||
| 					const remoteIds = remotes.map(r => BaseItem.pathToId(r.path)); | ||||
| 					const locals = await BaseItem.loadItemsByIds(remoteIds); | ||||
|  | ||||
| 					for (const remote of remotes) { | ||||
| 						if (this.cancelling()) break; | ||||
|  | ||||
| 						this.downloadQueue_.push(remote.path, async () => { | ||||
| 							return this.apiCall('get', remote.path); | ||||
| 						}); | ||||
| 						let needsToDownload = true; | ||||
| 						if (this.api().supportsAccurateTimestamp) { | ||||
| 							const local = locals.find(l => l.id === BaseItem.pathToId(remote.path)); | ||||
| 							if (local && local.updated_time === remote.jop_updated_time) needsToDownload = false; | ||||
| 						} | ||||
|  | ||||
| 						if (needsToDownload) { | ||||
| 							this.downloadQueue_.push(remote.path, async () => { | ||||
| 								return this.apiCall('get', remote.path); | ||||
| 							}); | ||||
| 						} | ||||
| 					} | ||||
|  | ||||
| 					for (let i = 0; i < remotes.length; i++) { | ||||
| @@ -800,9 +824,10 @@ export default class Synchronizer { | ||||
| 						}; | ||||
|  | ||||
| 						const path = remote.path; | ||||
| 						const remoteId = BaseItem.pathToId(path); | ||||
| 						let action = null; | ||||
| 						let reason = ''; | ||||
| 						let local = await BaseItem.loadItemByPath(path); | ||||
| 						let local = locals.find(l => l.id === remoteId); | ||||
| 						let ItemClass = null; | ||||
| 						let content = null; | ||||
|  | ||||
| @@ -821,10 +846,14 @@ export default class Synchronizer { | ||||
| 									action = 'deleteLocal'; | ||||
| 									reason = 'remote has been deleted'; | ||||
| 								} else { | ||||
| 									content = await loadContent(); | ||||
| 									if (content && content.updated_time > local.updated_time) { | ||||
| 										action = 'updateLocal'; | ||||
| 										reason = 'remote is more recent than local'; | ||||
| 									if (this.api().supportsAccurateTimestamp && remote.jop_updated_time === local.updated_time) { | ||||
| 										// Nothing to do, and no need to fetch the content | ||||
| 									} else { | ||||
| 										content = await loadContent(); | ||||
| 										if (content && content.updated_time > local.updated_time) { | ||||
| 											action = 'updateLocal'; | ||||
| 											reason = 'remote is more recent than local'; | ||||
| 										} | ||||
| 									} | ||||
| 								} | ||||
| 							} | ||||
|   | ||||
| @@ -36,6 +36,10 @@ export default class FileApiDriverJoplinServer { | ||||
| 		return true; | ||||
| 	} | ||||
|  | ||||
| 	public get supportsAccurateTimestamp() { | ||||
| 		return true; | ||||
| 	} | ||||
|  | ||||
| 	public requestRepeatCount() { | ||||
| 		return 3; | ||||
| 	} | ||||
| @@ -44,7 +48,8 @@ export default class FileApiDriverJoplinServer { | ||||
| 		const output = { | ||||
| 			path: rootPath ? path.substr(rootPath.length + 1) : path, | ||||
| 			updated_time: md.updated_time, | ||||
| 			isDir: false, // !!md.is_directory, | ||||
| 			jop_updated_time: md.jop_updated_time, | ||||
| 			isDir: false, | ||||
| 			isDeleted: isDeleted, | ||||
| 		}; | ||||
|  | ||||
|   | ||||
| @@ -24,6 +24,10 @@ export default class FileApiDriverMemory { | ||||
| 		return true; | ||||
| 	} | ||||
|  | ||||
| 	public get supportsAccurateTimestamp() { | ||||
| 		return true; | ||||
| 	} | ||||
|  | ||||
| 	decodeContent_(content: any) { | ||||
| 		return Buffer.from(content, 'base64').toString('utf-8'); | ||||
| 	} | ||||
|   | ||||
| @@ -86,10 +86,26 @@ class FileApi { | ||||
| 		if (this.driver_.initialize) return this.driver_.initialize(this.fullPath('')); | ||||
| 	} | ||||
|  | ||||
| 	// This can be true if the driver implements uploading items in batch. Will | ||||
| 	// probably only be supported by Joplin Server. | ||||
| 	public get supportsMultiPut(): boolean { | ||||
| 		return !!this.driver().supportsMultiPut; | ||||
| 	} | ||||
|  | ||||
| 	// This can be true when the sync target timestamps (updated_time) provided | ||||
| 	// in the delta call are guaranteed to be accurate. That requires | ||||
| 	// explicitely setting the timestamp, which is not done anymore on any sync | ||||
| 	// target as it wasn't accurate (for example, the file system can't be | ||||
| 	// relied on, and even OneDrive for some reason doesn't guarantee that the | ||||
| 	// timestamp you set is what you get back). | ||||
| 	// | ||||
| 	// The only reliable one at the moment is Joplin Server since it reads the | ||||
| 	// updated_time property directly from the item (it unserializes it | ||||
| 	// server-side). | ||||
| 	public get supportsAccurateTimestamp(): boolean { | ||||
| 		return !!this.driver().supportsAccurateTimestamp; | ||||
| 	} | ||||
|  | ||||
| 	async fetchRemoteDateOffset_() { | ||||
| 		const tempFile = `${this.tempDirName()}/timeCheck${Math.round(Math.random() * 1000000)}.txt`; | ||||
| 		const startTime = Date.now(); | ||||
|   | ||||
										
											Binary file not shown.
										
									
								
							| @@ -356,6 +356,7 @@ export interface Item extends WithDates, WithUuid { | ||||
| 	jop_share_id?: Uuid; | ||||
| 	jop_type?: number; | ||||
| 	jop_encryption_applied?: number; | ||||
| 	jop_updated_time?: number; | ||||
| } | ||||
|  | ||||
| export interface UserItem extends WithDates { | ||||
| @@ -503,6 +504,7 @@ export const databaseSchema: DatabaseTables = { | ||||
| 		jop_share_id: { type: 'string' }, | ||||
| 		jop_type: { type: 'number' }, | ||||
| 		jop_encryption_applied: { type: 'number' }, | ||||
| 		jop_updated_time: { type: 'number' }, | ||||
| 	}, | ||||
| 	user_items: { | ||||
| 		id: { type: 'number' }, | ||||
|   | ||||
| @@ -0,0 +1,29 @@ | ||||
| import { Knex } from 'knex'; | ||||
| import { DbConnection } from '../db'; | ||||
|  | ||||
| export async function up(db: DbConnection): Promise<any> { | ||||
| 	await db.schema.alterTable('items', function(table: Knex.CreateTableBuilder) { | ||||
| 		table.integer('jop_updated_time').defaultTo(0).notNullable(); | ||||
| 	}); | ||||
|  | ||||
| 	while (true) { | ||||
| 		const items = await db('items') | ||||
| 			.select('id', 'content') | ||||
| 			.where('jop_type', '>', 0) | ||||
| 			.andWhere('jop_updated_time', '=', 0) | ||||
| 			.limit(1000); | ||||
|  | ||||
| 		if (!items.length) break; | ||||
|  | ||||
| 		await db.transaction(async trx => { | ||||
| 			for (const item of items) { | ||||
| 				const unserialized = JSON.parse(item.content); | ||||
| 				await trx('items').update({ jop_updated_time: unserialized.updated_time }).where('id', '=', item.id); | ||||
| 			} | ||||
| 		}); | ||||
| 	} | ||||
| } | ||||
|  | ||||
| export async function down(_db: DbConnection): Promise<any> { | ||||
|  | ||||
| } | ||||
| @@ -5,14 +5,12 @@ import { ErrorResyncRequired } from '../utils/errors'; | ||||
| import BaseModel, { SaveOptions } from './BaseModel'; | ||||
| import { PaginatedResults, Pagination, PaginationOrderDir } from './utils/pagination'; | ||||
|  | ||||
| export interface ChangeWithItem { | ||||
| 	item: Item; | ||||
| 	updated_time: number; | ||||
| 	type: ChangeType; | ||||
| export interface DeltaChange extends Change { | ||||
| 	jop_updated_time?: number; | ||||
| } | ||||
|  | ||||
| export interface PaginatedChanges extends PaginatedResults { | ||||
| 	items: Change[]; | ||||
| 	items: DeltaChange[]; | ||||
| } | ||||
|  | ||||
| export interface ChangePagination { | ||||
| @@ -158,9 +156,20 @@ export default class ChangeModel extends BaseModel<Change> { | ||||
| 			.orderBy('counter', 'asc') | ||||
| 			.limit(pagination.limit) as any[]; | ||||
|  | ||||
| 		const changes = await query; | ||||
| 		const changes: Change[] = await query; | ||||
|  | ||||
| 		const finalChanges = await this.removeDeletedItems(this.compressChanges(changes)); | ||||
| 		const items: Item[] = await this.db('items').select('id', 'jop_updated_time').whereIn('items.id', changes.map(c => c.item_id)); | ||||
|  | ||||
| 		let finalChanges: DeltaChange[] = this.compressChanges(changes); | ||||
| 		finalChanges = await this.removeDeletedItems(finalChanges, items); | ||||
| 		finalChanges = finalChanges.map(c => { | ||||
| 			const item = items.find(item => item.id === c.item_id); | ||||
| 			if (!item) return c; | ||||
| 			return { | ||||
| 				...c, | ||||
| 				jop_updated_time: item.jop_updated_time, | ||||
| 			}; | ||||
| 		}); | ||||
|  | ||||
| 		return { | ||||
| 			items: finalChanges, | ||||
| @@ -171,14 +180,14 @@ export default class ChangeModel extends BaseModel<Change> { | ||||
| 		}; | ||||
| 	} | ||||
|  | ||||
| 	private async removeDeletedItems(changes: Change[]): Promise<Change[]> { | ||||
| 	private async removeDeletedItems(changes: Change[], items: Item[] = null): Promise<Change[]> { | ||||
| 		const itemIds = changes.map(c => c.item_id); | ||||
|  | ||||
| 		// We skip permission check here because, when an item is shared, we need | ||||
| 		// to fetch files that don't belong to the current user. This check | ||||
| 		// would not be needed anyway because the change items are generated in | ||||
| 		// a context where permissions have already been checked. | ||||
| 		const items: Item[] = await this.db('items').select('id').whereIn('items.id', itemIds); | ||||
| 		items = items === null ? await this.db('items').select('id').whereIn('items.id', itemIds) : items; | ||||
|  | ||||
| 		const output: Change[] = []; | ||||
|  | ||||
|   | ||||
| @@ -285,6 +285,7 @@ export default class ItemModel extends BaseModel<Item> { | ||||
| 		item.share_id = itemRow.jop_share_id; | ||||
| 		item.type_ = itemRow.jop_type; | ||||
| 		item.encryption_applied = itemRow.jop_encryption_applied; | ||||
| 		item.updated_time = itemRow.jop_updated_time; | ||||
|  | ||||
| 		return item; | ||||
| 	} | ||||
| @@ -336,6 +337,7 @@ export default class ItemModel extends BaseModel<Item> { | ||||
| 					item.jop_type = joplinItem.type_; | ||||
| 					item.jop_encryption_applied = joplinItem.encryption_applied || 0; | ||||
| 					item.jop_share_id = joplinItem.share_id || ''; | ||||
| 					item.jop_updated_time = joplinItem.updated_time; | ||||
|  | ||||
| 					const joplinItemToSave = { ...joplinItem }; | ||||
|  | ||||
| @@ -344,6 +346,7 @@ export default class ItemModel extends BaseModel<Item> { | ||||
| 					delete joplinItemToSave.share_id; | ||||
| 					delete joplinItemToSave.type_; | ||||
| 					delete joplinItemToSave.encryption_applied; | ||||
| 					delete joplinItemToSave.updated_time; | ||||
|  | ||||
| 					item.content = Buffer.from(JSON.stringify(joplinItemToSave)); | ||||
| 				} else { | ||||
|   | ||||
		Reference in New Issue
	
	Block a user