You've already forked joplin
							
							
				mirror of
				https://github.com/laurent22/joplin.git
				synced 2025-10-31 00:07:48 +02:00 
			
		
		
		
	All: Handling of impossible-to-sync items (such as when they are over the size limit of the cloud provider)
This commit is contained in:
		| @@ -629,4 +629,32 @@ describe('Synchronizer', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('items should skip items that cannot be synced', async (done) => { | ||||
| 		let folder1 = await Folder.save({ title: "folder1" }); | ||||
| 		let note1 = await Note.save({ title: "un", is_todo: 1, parent_id: folder1.id }); | ||||
| 		const noteId = note1.id; | ||||
| 		await synchronizer().start(); | ||||
| 		let disabledItems = await BaseItem.syncDisabledItems(); | ||||
| 		expect(disabledItems.length).toBe(0); | ||||
| 		await Note.save({ id: noteId, title: "un mod", }); | ||||
| 		synchronizer().debugFlags_ = ['cannotSync']; | ||||
| 		await synchronizer().start(); | ||||
| 		synchronizer().debugFlags_ = []; | ||||
| 		await synchronizer().start(); // Another sync to check that this item is now excluded from sync | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizer().start(); | ||||
| 		let notes = await Note.all(); | ||||
| 		expect(notes.length).toBe(1); | ||||
| 		expect(notes[0].title).toBe('un'); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		disabledItems = await BaseItem.syncDisabledItems(); | ||||
| 		expect(disabledItems.length).toBe(1); | ||||
|  | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| }); | ||||
| @@ -43,8 +43,9 @@ const syncDir = __dirname + '/../tests/sync'; | ||||
| const sleepTime = syncTargetId_ == SyncTargetRegistry.nameToId('filesystem') ? 1001 : 400; | ||||
|  | ||||
| const logger = new Logger(); | ||||
| logger.addTarget('console'); | ||||
| logger.addTarget('file', { path: logDir + '/log.txt' }); | ||||
| logger.setLevel(Logger.LEVEL_DEBUG); | ||||
| logger.setLevel(Logger.LEVEL_WARN); | ||||
|  | ||||
| BaseItem.loadClass('Note', Note); | ||||
| BaseItem.loadClass('Folder', Folder); | ||||
|   | ||||
| @@ -123,15 +123,27 @@ class FileApiDriverOneDrive { | ||||
| 		return this.makeItem_(item); | ||||
| 	} | ||||
|  | ||||
| 	put(path, content, options = null) { | ||||
| 	async put(path, content, options = null) { | ||||
| 		if (!options) options = {}; | ||||
|  | ||||
| 		if (options.source == 'file') { | ||||
| 			return this.api_.exec('PUT', this.makePath_(path) + ':/content', null, null, options); | ||||
| 		} else { | ||||
| 			options.headers = { 'Content-Type': 'text/plain' }; | ||||
| 			return this.api_.exec('PUT', this.makePath_(path) + ':/content', null, content, options); | ||||
| 		let response = null; | ||||
|  | ||||
| 		try { | ||||
| 			if (options.source == 'file') { | ||||
| 				response = await this.api_.exec('PUT', this.makePath_(path) + ':/content', null, null, options); | ||||
| 			} else { | ||||
| 				options.headers = { 'Content-Type': 'text/plain' }; | ||||
| 				response = await this.api_.exec('PUT', this.makePath_(path) + ':/content', null, content, options); | ||||
| 			} | ||||
| 		} catch (error) { | ||||
| 			if (error && error.code === 'BadRequest' && error.message === 'Maximum request length exceeded.') { | ||||
| 				error.code = 'cannotSync'; | ||||
| 				error.message = 'Resource exceeds OneDrive max file size (4MB)'; | ||||
| 			} | ||||
| 			throw error; | ||||
| 		} | ||||
|  | ||||
| 		return response; | ||||
| 	} | ||||
|  | ||||
| 	delete(path) { | ||||
|   | ||||
| @@ -202,7 +202,7 @@ class JoplinDatabase extends Database { | ||||
| 		// default value and thus might cause problems. In that case, the default value | ||||
| 		// must be set in the synchronizer too. | ||||
|  | ||||
| 		const existingDatabaseVersions = [0, 1, 2, 3, 4, 5, 6, 7]; | ||||
| 		const existingDatabaseVersions = [0, 1, 2, 3, 4, 5, 6, 7, 8]; | ||||
|  | ||||
| 		let currentVersionIndex = existingDatabaseVersions.indexOf(fromVersion); | ||||
| 		// currentVersionIndex < 0 if for the case where an old version of Joplin used with a newer | ||||
| @@ -265,6 +265,11 @@ class JoplinDatabase extends Database { | ||||
| 				queries.push('ALTER TABLE resources ADD COLUMN file_extension TEXT NOT NULL DEFAULT ""'); | ||||
| 			} | ||||
|  | ||||
| 			if (targetVersion == 8) { | ||||
| 				queries.push('ALTER TABLE sync_items ADD COLUMN sync_disabled INT NOT NULL DEFAULT "0"'); | ||||
| 				queries.push('ALTER TABLE sync_items ADD COLUMN sync_disabled_reason TEXT NOT NULL DEFAULT ""'); | ||||
| 			} | ||||
|  | ||||
| 			queries.push({ sql: 'UPDATE version SET version = ?', params: [targetVersion] }); | ||||
| 			await this.transactionExecBatch(queries); | ||||
|  | ||||
|   | ||||
| @@ -339,6 +339,7 @@ class BaseItem extends BaseModel { | ||||
| 					JOIN sync_items s ON s.item_id = items.id | ||||
| 					WHERE sync_target = %d | ||||
| 					AND s.sync_time < items.updated_time | ||||
| 					AND s.sync_disabled = 0 | ||||
| 					%s | ||||
| 					LIMIT %d | ||||
| 				`, | ||||
| @@ -382,7 +383,20 @@ class BaseItem extends BaseModel { | ||||
| 		throw new Error('Invalid type: ' + type); | ||||
| 	} | ||||
|  | ||||
| 	static updateSyncTimeQueries(syncTarget, item, syncTime) { | ||||
| 	static async syncDisabledItems() { | ||||
| 		const rows = await this.db().selectAll('SELECT * FROM sync_items WHERE sync_disabled = 1'); | ||||
| 		let output = []; | ||||
| 		for (let i = 0; i < rows.length; i++) { | ||||
| 			const item = await this.loadItem(rows[i].item_type, rows[i].id); | ||||
| 			output.push({ | ||||
| 				syncInfo: rows[i], | ||||
| 				item: item, | ||||
| 			}); | ||||
| 		} | ||||
| 		return output; | ||||
| 	} | ||||
|  | ||||
| 	static updateSyncTimeQueries(syncTarget, item, syncTime, syncDisabled = false, syncDisabledReason = '') { | ||||
| 		const itemType = item.type_; | ||||
| 		const itemId = item.id; | ||||
| 		if (!itemType || !itemId || syncTime === undefined) throw new Error('Invalid parameters in updateSyncTimeQueries()'); | ||||
| @@ -393,8 +407,8 @@ class BaseItem extends BaseModel { | ||||
| 				params: [syncTarget, itemType, itemId], | ||||
| 			}, | ||||
| 			{ | ||||
| 				sql: 'INSERT INTO sync_items (sync_target, item_type, item_id, sync_time) VALUES (?, ?, ?, ?)', | ||||
| 				params: [syncTarget, itemType, itemId, syncTime], | ||||
| 				sql: 'INSERT INTO sync_items (sync_target, item_type, item_id, sync_time, sync_disabled, sync_disabled_reason) VALUES (?, ?, ?, ?, ?, ?)', | ||||
| 				params: [syncTarget, itemType, itemId, syncTime, syncDisabled ? 1 : 0, syncDisabledReason + ''], | ||||
| 			} | ||||
| 		]; | ||||
| 	} | ||||
| @@ -404,6 +418,12 @@ class BaseItem extends BaseModel { | ||||
| 		return this.db().transactionExecBatch(queries); | ||||
| 	} | ||||
|  | ||||
| 	static async saveSyncDisabled(syncTargetId, item, syncDisabledReason) { | ||||
| 		const syncTime = 'sync_time' in item ? item.sync_time : 0; | ||||
| 		const queries = this.updateSyncTimeQueries(syncTargetId, item, syncTime, true, syncDisabledReason); | ||||
| 		return this.db().transactionExecBatch(queries); | ||||
| 	} | ||||
|  | ||||
| 	// When an item is deleted, its associated sync_items data is not immediately deleted for | ||||
| 	// performance reason. So this function is used to look for these remaining sync_items and | ||||
| 	// delete them. | ||||
|   | ||||
| @@ -126,7 +126,11 @@ class Note extends BaseItem { | ||||
| 			let r = null; | ||||
| 			r = noteFieldComp(a.user_updated_time, b.user_updated_time); if (r) return r; | ||||
| 			r = noteFieldComp(a.user_created_time, b.user_created_time); if (r) return r; | ||||
| 			r = noteFieldComp(a.title.toLowerCase(), b.title.toLowerCase()); if (r) return r; | ||||
|  | ||||
| 			const titleA = a.title ? a.title.toLowerCase() : ''; | ||||
| 			const titleB = b.title ? b.title.toLowerCase() : ''; | ||||
| 			r = noteFieldComp(titleA, titleB); if (r) return r; | ||||
| 			 | ||||
| 			return noteFieldComp(a.id, b.id); | ||||
| 		} | ||||
|  | ||||
|   | ||||
| @@ -253,22 +253,35 @@ class Synchronizer { | ||||
|  | ||||
| 					this.logSyncOperation(action, local, remote, reason); | ||||
|  | ||||
| 					async function handleCannotSyncItem(syncTargetId, item, cannotSyncReason) { | ||||
| 						await ItemClass.saveSyncDisabled(syncTargetId, item, cannotSyncReason); | ||||
| 					} | ||||
|  | ||||
| 					if (local.type_ == BaseModel.TYPE_RESOURCE && (action == 'createRemote' || (action == 'itemConflict' && remote))) { | ||||
| 						let remoteContentPath = this.resourceDirName_ + '/' + local.id; | ||||
| 						// TODO: handle node and mobile in the same way | ||||
| 						if (shim.isNode()) { | ||||
| 							let resourceContent = ''; | ||||
| 							try { | ||||
| 								resourceContent = await Resource.content(local); | ||||
| 							} catch (error) { | ||||
| 								error.message = 'Cannot read resource content: ' + local.id + ': ' + error.message; | ||||
| 								this.logger().error(error); | ||||
| 								this.progressReport_.errors.push(error); | ||||
| 						try { | ||||
| 							// TODO: handle node and mobile in the same way | ||||
| 							if (shim.isNode()) { | ||||
| 								let resourceContent = ''; | ||||
| 								try { | ||||
| 									resourceContent = await Resource.content(local); | ||||
| 								} catch (error) { | ||||
| 									error.message = 'Cannot read resource content: ' + local.id + ': ' + error.message; | ||||
| 									this.logger().error(error); | ||||
| 									this.progressReport_.errors.push(error); | ||||
| 								} | ||||
| 								await this.api().put(remoteContentPath, resourceContent); | ||||
| 							} else { | ||||
| 								const localResourceContentPath = Resource.fullPath(local); | ||||
| 								await this.api().put(remoteContentPath, null, { path: localResourceContentPath, source: 'file' }); | ||||
| 							} | ||||
| 						} catch (error) { | ||||
| 							if (error && error.code === 'cannotSync') { | ||||
| 								await handleCannotSyncItem(syncTargetId, local, error.message); | ||||
| 								action = null; | ||||
| 							} else { | ||||
| 								throw error; | ||||
| 							} | ||||
| 							await this.api().put(remoteContentPath, resourceContent); | ||||
| 						} else { | ||||
| 							const localResourceContentPath = Resource.fullPath(local); | ||||
| 							await this.api().put(remoteContentPath, null, { path: localResourceContentPath, source: 'file' }); | ||||
| 						} | ||||
| 					} | ||||
|  | ||||
| @@ -285,9 +298,27 @@ class Synchronizer { | ||||
| 						// await this.api().setTimestamp(tempPath, local.updated_time); | ||||
| 						// await this.api().move(tempPath, path); | ||||
|  | ||||
| 						await this.api().put(path, content); | ||||
| 						await this.api().setTimestamp(path, local.updated_time); | ||||
| 						await ItemClass.saveSyncTime(syncTargetId, local, time.unixMs()); | ||||
| 						let canSync = true; | ||||
| 						try { | ||||
| 							if (this.debugFlags_.indexOf('cannotSync') >= 0) { | ||||
| 								const error = new Error('Testing cannotSync'); | ||||
| 								error.code = 'cannotSync'; | ||||
| 								throw error; | ||||
| 							} | ||||
| 							await this.api().put(path, content); | ||||
| 						} catch (error) { | ||||
| 							if (error && error.code === 'cannotSync') { | ||||
| 								await handleCannotSyncItem(syncTargetId, local, error.message); | ||||
| 								canSync = false; | ||||
| 							} else { | ||||
| 								throw error; | ||||
| 							} | ||||
| 						} | ||||
|  | ||||
| 						if (canSync) { | ||||
| 							await this.api().setTimestamp(path, local.updated_time); | ||||
| 							await ItemClass.saveSyncTime(syncTargetId, local, time.unixMs()); | ||||
| 						} | ||||
|  | ||||
| 					} else if (action == 'itemConflict') { | ||||
|  | ||||
|   | ||||
		Reference in New Issue
	
	Block a user