You've already forked joplin
							
							
				mirror of
				https://github.com/laurent22/joplin.git
				synced 2025-10-31 00:07:48 +02:00 
			
		
		
		
	Tools: Cleaned up tests and splitted sync tests into smaller parts
This commit is contained in:
		| @@ -74,6 +74,24 @@ packages/app-cli/tests/InMemoryCache.js.map | ||||
| packages/app-cli/tests/MdToHtml.d.ts | ||||
| packages/app-cli/tests/MdToHtml.js | ||||
| packages/app-cli/tests/MdToHtml.js.map | ||||
| packages/app-cli/tests/Synchronizer.basics.d.ts | ||||
| packages/app-cli/tests/Synchronizer.basics.js | ||||
| packages/app-cli/tests/Synchronizer.basics.js.map | ||||
| packages/app-cli/tests/Synchronizer.conflicts.d.ts | ||||
| packages/app-cli/tests/Synchronizer.conflicts.js | ||||
| packages/app-cli/tests/Synchronizer.conflicts.js.map | ||||
| packages/app-cli/tests/Synchronizer.e2ee.d.ts | ||||
| packages/app-cli/tests/Synchronizer.e2ee.js | ||||
| packages/app-cli/tests/Synchronizer.e2ee.js.map | ||||
| packages/app-cli/tests/Synchronizer.resources.d.ts | ||||
| packages/app-cli/tests/Synchronizer.resources.js | ||||
| packages/app-cli/tests/Synchronizer.resources.js.map | ||||
| packages/app-cli/tests/Synchronizer.revisions.d.ts | ||||
| packages/app-cli/tests/Synchronizer.revisions.js | ||||
| packages/app-cli/tests/Synchronizer.revisions.js.map | ||||
| packages/app-cli/tests/Synchronizer.tags.d.ts | ||||
| packages/app-cli/tests/Synchronizer.tags.js | ||||
| packages/app-cli/tests/Synchronizer.tags.js.map | ||||
| packages/app-cli/tests/fsDriver.d.ts | ||||
| packages/app-cli/tests/fsDriver.js | ||||
| packages/app-cli/tests/fsDriver.js.map | ||||
| @@ -242,6 +260,9 @@ packages/app-cli/tests/synchronizer_LockHandler.js.map | ||||
| packages/app-cli/tests/synchronizer_MigrationHandler.d.ts | ||||
| packages/app-cli/tests/synchronizer_MigrationHandler.js | ||||
| packages/app-cli/tests/synchronizer_MigrationHandler.js.map | ||||
| packages/app-cli/tests/test-utils-synchronizer.d.ts | ||||
| packages/app-cli/tests/test-utils-synchronizer.js | ||||
| packages/app-cli/tests/test-utils-synchronizer.js.map | ||||
| packages/app-desktop/ElectronAppWrapper.d.ts | ||||
| packages/app-desktop/ElectronAppWrapper.js | ||||
| packages/app-desktop/ElectronAppWrapper.js.map | ||||
| @@ -872,6 +893,9 @@ packages/lib/commands/historyForward.js.map | ||||
| packages/lib/commands/synchronize.d.ts | ||||
| packages/lib/commands/synchronize.js | ||||
| packages/lib/commands/synchronize.js.map | ||||
| packages/lib/dummy.test.d.ts | ||||
| packages/lib/dummy.test.js | ||||
| packages/lib/dummy.test.js.map | ||||
| packages/lib/errorUtils.d.ts | ||||
| packages/lib/errorUtils.js | ||||
| packages/lib/errorUtils.js.map | ||||
|   | ||||
| @@ -24,6 +24,7 @@ module.exports = { | ||||
| 		'afterAll': 'readonly', | ||||
| 		'beforeEach': 'readonly', | ||||
| 		'afterEach': 'readonly', | ||||
| 		'jest': 'readonly', | ||||
|  | ||||
| 		// React Native variables | ||||
| 		'__DEV__': 'readonly', | ||||
|   | ||||
							
								
								
									
										24
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										24
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -65,6 +65,24 @@ packages/app-cli/tests/InMemoryCache.js.map | ||||
| packages/app-cli/tests/MdToHtml.d.ts | ||||
| packages/app-cli/tests/MdToHtml.js | ||||
| packages/app-cli/tests/MdToHtml.js.map | ||||
| packages/app-cli/tests/Synchronizer.basics.d.ts | ||||
| packages/app-cli/tests/Synchronizer.basics.js | ||||
| packages/app-cli/tests/Synchronizer.basics.js.map | ||||
| packages/app-cli/tests/Synchronizer.conflicts.d.ts | ||||
| packages/app-cli/tests/Synchronizer.conflicts.js | ||||
| packages/app-cli/tests/Synchronizer.conflicts.js.map | ||||
| packages/app-cli/tests/Synchronizer.e2ee.d.ts | ||||
| packages/app-cli/tests/Synchronizer.e2ee.js | ||||
| packages/app-cli/tests/Synchronizer.e2ee.js.map | ||||
| packages/app-cli/tests/Synchronizer.resources.d.ts | ||||
| packages/app-cli/tests/Synchronizer.resources.js | ||||
| packages/app-cli/tests/Synchronizer.resources.js.map | ||||
| packages/app-cli/tests/Synchronizer.revisions.d.ts | ||||
| packages/app-cli/tests/Synchronizer.revisions.js | ||||
| packages/app-cli/tests/Synchronizer.revisions.js.map | ||||
| packages/app-cli/tests/Synchronizer.tags.d.ts | ||||
| packages/app-cli/tests/Synchronizer.tags.js | ||||
| packages/app-cli/tests/Synchronizer.tags.js.map | ||||
| packages/app-cli/tests/fsDriver.d.ts | ||||
| packages/app-cli/tests/fsDriver.js | ||||
| packages/app-cli/tests/fsDriver.js.map | ||||
| @@ -233,6 +251,9 @@ packages/app-cli/tests/synchronizer_LockHandler.js.map | ||||
| packages/app-cli/tests/synchronizer_MigrationHandler.d.ts | ||||
| packages/app-cli/tests/synchronizer_MigrationHandler.js | ||||
| packages/app-cli/tests/synchronizer_MigrationHandler.js.map | ||||
| packages/app-cli/tests/test-utils-synchronizer.d.ts | ||||
| packages/app-cli/tests/test-utils-synchronizer.js | ||||
| packages/app-cli/tests/test-utils-synchronizer.js.map | ||||
| packages/app-desktop/ElectronAppWrapper.d.ts | ||||
| packages/app-desktop/ElectronAppWrapper.js | ||||
| packages/app-desktop/ElectronAppWrapper.js.map | ||||
| @@ -863,6 +884,9 @@ packages/lib/commands/historyForward.js.map | ||||
| packages/lib/commands/synchronize.d.ts | ||||
| packages/lib/commands/synchronize.js | ||||
| packages/lib/commands/synchronize.js.map | ||||
| packages/lib/dummy.test.d.ts | ||||
| packages/lib/dummy.test.js | ||||
| packages/lib/dummy.test.js.map | ||||
| packages/lib/errorUtils.d.ts | ||||
| packages/lib/errorUtils.js | ||||
| packages/lib/errorUtils.js.map | ||||
|   | ||||
| @@ -12,10 +12,6 @@ const Setting = require('@joplin/lib/models/Setting').default; | ||||
| const { sprintf } = require('sprintf-js'); | ||||
| const exec = require('child_process').exec; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.error('Unhandled promise rejection', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| const baseDir = `${dirname(__dirname)}/tests/cli-integration`; | ||||
| const joplinAppPath = `${__dirname}/main.js`; | ||||
|  | ||||
|   | ||||
| @@ -22,10 +22,6 @@ const logger = new Logger(); | ||||
| logger.addTarget('console'); | ||||
| logger.setLevel(Logger.LEVEL_DEBUG); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.error('Unhandled promise rejection', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| function createClient(id) { | ||||
| 	return { | ||||
| 		id: id, | ||||
|   | ||||
| @@ -34,6 +34,7 @@ module.exports = { | ||||
| 		'<rootDir>/tests/support/', | ||||
| 		'<rootDir>/build/', | ||||
| 		'<rootDir>/tests/test-utils.js', | ||||
| 		'<rootDir>/tests/test-utils-synchronizer.js', | ||||
| 		'<rootDir>/tests/file_api_driver.js', | ||||
| 		'<rootDir>/tests/tmp/', | ||||
| 	], | ||||
|   | ||||
| @@ -2,20 +2,16 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const ArrayUtils = require('@joplin/lib/ArrayUtils'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('ArrayUtils', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should remove array elements', asyncTest(async () => { | ||||
| 	it('should remove array elements', (async () => { | ||||
| 		let a = ['un', 'deux', 'trois']; | ||||
| 		a = ArrayUtils.removeElement(a, 'deux'); | ||||
|  | ||||
| @@ -28,7 +24,7 @@ describe('ArrayUtils', function() { | ||||
| 		expect(a.length).toBe(3); | ||||
| 	})); | ||||
|  | ||||
| 	it('should find items using binary search', asyncTest(async () => { | ||||
| 	it('should find items using binary search', (async () => { | ||||
| 		let items = ['aaa', 'ccc', 'bbb']; | ||||
| 		expect(ArrayUtils.binarySearch(items, 'bbb')).toBe(-1); // Array not sorted! | ||||
| 		items.sort(); | ||||
| @@ -41,14 +37,14 @@ describe('ArrayUtils', function() { | ||||
| 		expect(ArrayUtils.binarySearch(items, 'aaa')).toBe(-1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should compare arrays', asyncTest(async () => { | ||||
| 	it('should compare arrays', (async () => { | ||||
| 		expect(ArrayUtils.contentEquals([], [])).toBe(true); | ||||
| 		expect(ArrayUtils.contentEquals(['a'], ['a'])).toBe(true); | ||||
| 		expect(ArrayUtils.contentEquals(['b', 'a'], ['a', 'b'])).toBe(true); | ||||
| 		expect(ArrayUtils.contentEquals(['b'], ['a', 'b'])).toBe(false); | ||||
| 	})); | ||||
|  | ||||
| 	it('should merge overlapping intervals', asyncTest(async () => { | ||||
| 	it('should merge overlapping intervals', (async () => { | ||||
| 		const testCases = [ | ||||
| 			[ | ||||
| 				[], | ||||
|   | ||||
| @@ -1,13 +1,9 @@ | ||||
|  | ||||
| const { asyncTest, setupDatabaseAndSynchronizer, switchClient } = require('./test-utils.js'); | ||||
| const { setupDatabaseAndSynchronizer, switchClient } = require('./test-utils.js'); | ||||
| const shim = require('@joplin/lib/shim').default; | ||||
| const { enexXmlToHtml } = require('@joplin/lib/import-enex-html-gen.js'); | ||||
| const cleanHtml = require('clean-html'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.warn('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| const fileWithPath = (filename) => | ||||
| 	`${__dirname}/enex_to_html/${filename}`; | ||||
|  | ||||
| @@ -49,7 +45,7 @@ const compareOutputToExpected = (options) => { | ||||
| 	const outputFile = fileWithPath(`${options.testName}.html`); | ||||
| 	const testTitle = `should convert from Enex to Html: ${options.testName}`; | ||||
|  | ||||
| 	it(testTitle, asyncTest(async () => { | ||||
| 	it(testTitle, (async () => { | ||||
| 		const enexInput = await shim.fsDriver().readFile(inputFile); | ||||
| 		const expectedOutput = await shim.fsDriver().readFile(outputFile); | ||||
| 		const actualOutput = await beautifyHtml(await enexXmlToHtml(enexInput, options.resources)); | ||||
| @@ -101,7 +97,7 @@ describe('EnexToHtml', function() { | ||||
| 		}], | ||||
| 	}); | ||||
|  | ||||
| 	// it('fails when not given a matching resource', asyncTest(async () => { | ||||
| 	// it('fails when not given a matching resource', (async () => { | ||||
| 	// 	// To test the promise-unexpectedly-resolved case, add `audioResource` to the array. | ||||
| 	// 	const resources = []; | ||||
| 	// 	const inputFile = fileWithPath('en-media--image.enex'); | ||||
|   | ||||
| @@ -4,7 +4,7 @@ | ||||
| const os = require('os'); | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { filename } = require('@joplin/lib/path-utils'); | ||||
| const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const BaseModel = require('@joplin/lib/BaseModel').default; | ||||
| @@ -12,10 +12,6 @@ const shim = require('@joplin/lib/shim').default; | ||||
| const HtmlToHtml = require('@joplin/renderer/HtmlToHtml').default; | ||||
| const { enexXmlToMd } = require('@joplin/lib/import-enex-md-gen.js'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('HtmlToHtml', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| @@ -24,7 +20,7 @@ describe('HtmlToHtml', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should convert from Html to Html', asyncTest(async () => { | ||||
| 	it('should convert from Html to Html', (async () => { | ||||
| 		const basePath = `${__dirname}/html_to_html`; | ||||
| 		const files = await shim.fsDriver().readDirStats(basePath); | ||||
| 		const htmlToHtml = new HtmlToHtml(); | ||||
|   | ||||
| @@ -4,7 +4,7 @@ | ||||
| const os = require('os'); | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { filename } = require('@joplin/lib/path-utils'); | ||||
| const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const BaseModel = require('@joplin/lib/BaseModel').default; | ||||
| @@ -12,10 +12,6 @@ const shim = require('@joplin/lib/shim').default; | ||||
| const HtmlToMd = require('@joplin/lib/HtmlToMd'); | ||||
| const { enexXmlToMd } = require('@joplin/lib/import-enex-md-gen.js'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('HtmlToMd', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| @@ -24,7 +20,7 @@ describe('HtmlToMd', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should convert from Html to Markdown', asyncTest(async () => { | ||||
| 	it('should convert from Html to Markdown', (async () => { | ||||
| 		const basePath = `${__dirname}/html_to_md`; | ||||
| 		const files = await shim.fsDriver().readDirStats(basePath); | ||||
| 		const htmlToMd = new HtmlToMd(); | ||||
|   | ||||
| @@ -1,10 +1,9 @@ | ||||
|  | ||||
| const { asyncTest } = require('./test-utils.js'); | ||||
| const MarkupToHtml = require('@joplin/renderer/MarkupToHtml').default; | ||||
|  | ||||
| describe('MarkupToHtml', function() { | ||||
|  | ||||
| 	it('should strip markup', asyncTest(async () => { | ||||
| 	it('should strip markup', (async () => { | ||||
| 		const service = new MarkupToHtml(); | ||||
|  | ||||
| 		const testCases = { | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| import MdToHtml from '@joplin/renderer/MdToHtml'; | ||||
| const os = require('os'); | ||||
| const { filename } = require('@joplin/lib/path-utils'); | ||||
| const { asyncTest, setupDatabaseAndSynchronizer, switchClient } = require('./test-utils.js'); | ||||
| const { setupDatabaseAndSynchronizer, switchClient } = require('./test-utils.js'); | ||||
| const shim = require('@joplin/lib/shim').default; | ||||
| const { themeStyle } = require('@joplin/lib/theme'); | ||||
|  | ||||
| @@ -25,7 +25,7 @@ describe('MdToHtml', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should convert from Markdown to Html', asyncTest(async () => { | ||||
| 	it('should convert from Markdown to Html', (async () => { | ||||
| 		const basePath = `${__dirname}/md_to_html`; | ||||
| 		const files = await shim.fsDriver().readDirStats(basePath); | ||||
| 		const mdToHtml = newTestMdToHtml(); | ||||
| @@ -82,7 +82,7 @@ describe('MdToHtml', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should return enabled plugin assets', asyncTest(async () => { | ||||
| 	it('should return enabled plugin assets', (async () => { | ||||
| 		const pluginOptions: any = {}; | ||||
| 		const pluginNames = MdToHtml.pluginNames(); | ||||
|  | ||||
| @@ -107,7 +107,7 @@ describe('MdToHtml', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should wrapped the rendered Markdown', asyncTest(async () => { | ||||
| 	it('should wrapped the rendered Markdown', (async () => { | ||||
| 		const mdToHtml = newTestMdToHtml(); | ||||
|  | ||||
| 		// In this case, the HTML contains both the style and | ||||
| @@ -117,7 +117,7 @@ describe('MdToHtml', function() { | ||||
| 		expect(result.html.indexOf('rendered-md') >= 0).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return the rendered body only', asyncTest(async () => { | ||||
| 	it('should return the rendered body only', (async () => { | ||||
| 		const mdToHtml = newTestMdToHtml(); | ||||
|  | ||||
| 		// In this case, the HTML contains only the rendered markdown, with | ||||
| @@ -137,7 +137,7 @@ describe('MdToHtml', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should split HTML and CSS', asyncTest(async () => { | ||||
| 	it('should split HTML and CSS', (async () => { | ||||
| 		const mdToHtml = newTestMdToHtml(); | ||||
|  | ||||
| 		// It is similar to the bodyOnly option, excepts that the rendered | ||||
| @@ -147,7 +147,7 @@ describe('MdToHtml', function() { | ||||
| 		expect(result.html.trim()).toBe('<div id="rendered-md"><p>just <strong>testing</strong></p>\n</div>'); | ||||
| 	})); | ||||
|  | ||||
| 	// it('should render links correctly', asyncTest(async () => { | ||||
| 	// it('should render links correctly', (async () => { | ||||
| 	// 	const mdToHtml = newTestMdToHtml(); | ||||
|  | ||||
| 	// 	const testCases = [ | ||||
|   | ||||
| @@ -1,20 +1,14 @@ | ||||
| /* eslint-disable no-unused-vars */ | ||||
|  | ||||
|  | ||||
| const { asyncTest } = require('./test-utils.js'); | ||||
| const StringUtils = require('@joplin/lib/string-utils'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('StringUtils', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should surround keywords with strings', asyncTest(async () => { | ||||
| 	it('should surround keywords with strings', (async () => { | ||||
| 		const testCases = [ | ||||
| 			[[], 'test', 'a', 'b', 'test'], | ||||
| 			[['test'], 'test', 'a', 'b', 'atestb'], | ||||
| @@ -40,7 +34,7 @@ describe('StringUtils', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should find the next whitespace character', asyncTest(async () => { | ||||
| 	it('should find the next whitespace character', (async () => { | ||||
| 		const testCases = [ | ||||
| 			['', [[0, 0]]], | ||||
| 			['Joplin', [[0, 6], [3, 6], [6, 6]]], | ||||
|   | ||||
							
								
								
									
										398
									
								
								packages/app-cli/tests/Synchronizer.basics.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										398
									
								
								packages/app-cli/tests/Synchronizer.basics.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,398 @@ | ||||
| import Setting from '@joplin/lib/models/Setting'; | ||||
| import { allNotesFolders, remoteNotesAndFolders, localNotesFoldersSameAsRemote } from './test-utils-synchronizer'; | ||||
|  | ||||
| const { syncTargetName, synchronizerStart, setupDatabaseAndSynchronizer, synchronizer, sleep, switchClient, syncTargetId, fileApi } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const BaseItem = require('@joplin/lib/models/BaseItem.js'); | ||||
| const WelcomeUtils = require('@joplin/lib/WelcomeUtils'); | ||||
|  | ||||
| describe('Synchronizer.basics', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| 		await setupDatabaseAndSynchronizer(1); | ||||
| 		await setupDatabaseAndSynchronizer(2); | ||||
| 		await switchClient(1); | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should create remote items', (async () => { | ||||
| 		const folder = await Folder.save({ title: 'folder1' }); | ||||
| 		await Note.save({ title: 'un', parent_id: folder.id }); | ||||
|  | ||||
| 		const all = await allNotesFolders(); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await localNotesFoldersSameAsRemote(all, expect); | ||||
| 	})); | ||||
|  | ||||
| 	it('should update remote items', (async () => { | ||||
| 		const folder = await Folder.save({ title: 'folder1' }); | ||||
| 		const note = await Note.save({ title: 'un', parent_id: folder.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await Note.save({ title: 'un UPDATE', id: note.id }); | ||||
|  | ||||
| 		const all = await allNotesFolders(); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await localNotesFoldersSameAsRemote(all, expect); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create local items', (async () => { | ||||
| 		const folder = await Folder.save({ title: 'folder1' }); | ||||
| 		await Note.save({ title: 'un', parent_id: folder.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		const all = await allNotesFolders(); | ||||
|  | ||||
| 		await localNotesFoldersSameAsRemote(all, expect); | ||||
| 	})); | ||||
|  | ||||
| 	it('should update local items', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'un', parent_id: folder1.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await sleep(0.1); | ||||
|  | ||||
| 		let note2 = await Note.load(note1.id); | ||||
| 		note2.title = 'Updated on client 2'; | ||||
| 		await Note.save(note2); | ||||
| 		note2 = await Note.load(note2.id); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		const all = await allNotesFolders(); | ||||
|  | ||||
| 		await localNotesFoldersSameAsRemote(all, expect); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete remote notes', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'un', parent_id: folder1.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await sleep(0.1); | ||||
|  | ||||
| 		await Note.delete(note1.id); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		const remotes = await remoteNotesAndFolders(); | ||||
| 		expect(remotes.length).toBe(1); | ||||
| 		expect(remotes[0].id).toBe(folder1.id); | ||||
|  | ||||
| 		const deletedItems = await BaseItem.deletedItems(syncTargetId()); | ||||
| 		expect(deletedItems.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not created deleted_items entries for items deleted via sync', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		await Note.save({ title: 'un', parent_id: folder1.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		await Folder.delete(folder1.id); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		const deletedItems = await BaseItem.deletedItems(syncTargetId()); | ||||
| 		expect(deletedItems.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete local notes', (async () => { | ||||
| 		// For these tests we pass the context around for each user. This is to make sure that the "deletedItemsProcessed" | ||||
| 		// property of the basicDelta() function is cleared properly at the end of a sync operation. If it is not cleared | ||||
| 		// it means items will no longer be deleted locally via sync. | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'un', parent_id: folder1.id }); | ||||
| 		const note2 = await Note.save({ title: 'deux', parent_id: folder1.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		await Note.delete(note1.id); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		const items = await allNotesFolders(); | ||||
| 		expect(items.length).toBe(2); | ||||
| 		const deletedItems = await BaseItem.deletedItems(syncTargetId()); | ||||
| 		expect(deletedItems.length).toBe(0); | ||||
| 		await Note.delete(note2.id); | ||||
| 		await synchronizerStart(); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete remote folder', (async () => { | ||||
| 		await Folder.save({ title: 'folder1' }); | ||||
| 		const folder2 = await Folder.save({ title: 'folder2' }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await sleep(0.1); | ||||
|  | ||||
| 		await Folder.delete(folder2.id); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		const all = await allNotesFolders(); | ||||
| 		await localNotesFoldersSameAsRemote(all, expect); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete local folder', (async () => { | ||||
| 		await Folder.save({ title: 'folder1' }); | ||||
| 		const folder2 = await Folder.save({ title: 'folder2' }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		await Folder.delete(folder2.id); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		const items = await allNotesFolders(); | ||||
| 		await localNotesFoldersSameAsRemote(items, expect); | ||||
| 	})); | ||||
|  | ||||
| 	it('should cross delete all folders', (async () => { | ||||
| 		// If client1 and 2 have two folders, client 1 deletes item 1 and client | ||||
| 		// 2 deletes item 2, they should both end up with no items after sync. | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const folder2 = await Folder.save({ title: 'folder2' }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		await sleep(0.1); | ||||
| 		await Folder.delete(folder1.id); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await Folder.delete(folder2.id); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		const items2 = await allNotesFolders(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		const items1 = await allNotesFolders(); | ||||
| 		expect(items1.length).toBe(0); | ||||
| 		expect(items1.length).toBe(items2.length); | ||||
| 	})); | ||||
|  | ||||
| 	it('items should be downloaded again when user cancels in the middle of delta operation', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		synchronizer().testingHooks_ = ['cancelDeltaLoop2']; | ||||
| 		await synchronizerStart(); | ||||
| 		let notes = await Note.all(); | ||||
| 		expect(notes.length).toBe(0); | ||||
|  | ||||
| 		synchronizer().testingHooks_ = []; | ||||
| 		await synchronizerStart(); | ||||
| 		notes = await Note.all(); | ||||
| 		expect(notes.length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should skip items that cannot be synced', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id }); | ||||
| 		const noteId = note1.id; | ||||
| 		await synchronizerStart(); | ||||
| 		let disabledItems = await BaseItem.syncDisabledItems(syncTargetId()); | ||||
| 		expect(disabledItems.length).toBe(0); | ||||
| 		await Note.save({ id: noteId, title: 'un mod' }); | ||||
| 		synchronizer().testingHooks_ = ['notesRejectedByTarget']; | ||||
| 		await synchronizerStart(); | ||||
| 		synchronizer().testingHooks_ = []; | ||||
| 		await synchronizerStart(); // Another sync to check that this item is now excluded from sync | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		const notes = await Note.all(); | ||||
| 		expect(notes.length).toBe(1); | ||||
| 		expect(notes[0].title).toBe('un'); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		disabledItems = await BaseItem.syncDisabledItems(syncTargetId()); | ||||
| 		expect(disabledItems.length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should allow duplicate folder titles', (async () => { | ||||
| 		await Folder.save({ title: 'folder' }); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		let remoteF2 = await Folder.save({ title: 'folder' }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await sleep(0.1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		const localF2 = await Folder.load(remoteF2.id); | ||||
|  | ||||
| 		expect(localF2.title == remoteF2.title).toBe(true); | ||||
|  | ||||
| 		// Then that folder that has been renamed locally should be set in such a way | ||||
| 		// that synchronizing it applies the title change remotely, and that new title | ||||
| 		// should be retrieved by client 2. | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
| 		await sleep(0.1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		remoteF2 = await Folder.load(remoteF2.id); | ||||
|  | ||||
| 		expect(remoteF2.title == localF2.title).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create remote items with UTF-8 content', (async () => { | ||||
| 		const folder = await Folder.save({ title: 'Fahrräder' }); | ||||
| 		await Note.save({ title: 'Fahrräder', body: 'Fahrräder', parent_id: folder.id }); | ||||
| 		const all = await allNotesFolders(); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await localNotesFoldersSameAsRemote(all, expect); | ||||
| 	})); | ||||
|  | ||||
| 	it('should update remote items but not pull remote changes', (async () => { | ||||
| 		const folder = await Folder.save({ title: 'folder1' }); | ||||
| 		const note = await Note.save({ title: 'un', parent_id: folder.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		await Note.save({ title: 'deux', parent_id: folder.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await Note.save({ title: 'un UPDATE', id: note.id }); | ||||
| 		await synchronizerStart(null, { syncSteps: ['update_remote'] }); | ||||
| 		const all = await allNotesFolders(); | ||||
| 		expect(all.length).toBe(2); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		const note2 = await Note.load(note.id); | ||||
| 		expect(note2.title).toBe('un UPDATE'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create a new Welcome notebook on each client', (async () => { | ||||
| 		// Create the Welcome items on two separate clients | ||||
|  | ||||
| 		await WelcomeUtils.createWelcomeItems(); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await WelcomeUtils.createWelcomeItems(); | ||||
| 		const beforeFolderCount = (await Folder.all()).length; | ||||
| 		const beforeNoteCount = (await Note.all()).length; | ||||
| 		expect(beforeFolderCount === 1).toBe(true); | ||||
| 		expect(beforeNoteCount > 1).toBe(true); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		const afterFolderCount = (await Folder.all()).length; | ||||
| 		const afterNoteCount = (await Note.all()).length; | ||||
|  | ||||
| 		expect(afterFolderCount).toBe(beforeFolderCount * 2); | ||||
| 		expect(afterNoteCount).toBe(beforeNoteCount * 2); | ||||
|  | ||||
| 		// Changes to the Welcome items should be synced to all clients | ||||
|  | ||||
| 		const f1 = (await Folder.all())[0]; | ||||
| 		await Folder.save({ id: f1.id, title: 'Welcome MOD' }); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		const f1_1 = await Folder.load(f1.id); | ||||
| 		expect(f1_1.title).toBe('Welcome MOD'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not wipe out user data when syncing with an empty target', (async () => { | ||||
| 		// Only these targets support the wipeOutFailSafe flag (in other words, the targets that use basicDelta) | ||||
| 		if (!['nextcloud', 'memory', 'filesystem', 'amazon_s3'].includes(syncTargetName())) return; | ||||
|  | ||||
| 		for (let i = 0; i < 10; i++) await Note.save({ title: 'note' }); | ||||
|  | ||||
| 		Setting.setValue('sync.wipeOutFailSafe', true); | ||||
| 		await synchronizerStart(); | ||||
| 		await fileApi().clearRoot(); // oops | ||||
| 		await synchronizerStart(); | ||||
| 		expect((await Note.all()).length).toBe(10); // but since the fail-safe if on, the notes have not been deleted | ||||
|  | ||||
| 		Setting.setValue('sync.wipeOutFailSafe', false); // Now switch it off | ||||
| 		await synchronizerStart(); | ||||
| 		expect((await Note.all()).length).toBe(0); // Since the fail-safe was off, the data has been cleared | ||||
|  | ||||
| 		// Handle case where the sync target has been wiped out, then the user creates one note and sync. | ||||
|  | ||||
| 		for (let i = 0; i < 10; i++) await Note.save({ title: 'note' }); | ||||
| 		Setting.setValue('sync.wipeOutFailSafe', true); | ||||
| 		await synchronizerStart(); | ||||
| 		await fileApi().clearRoot(); | ||||
| 		await Note.save({ title: 'ma note encore' }); | ||||
| 		await synchronizerStart(); | ||||
| 		expect((await Note.all()).length).toBe(11); | ||||
| 	})); | ||||
|  | ||||
| }); | ||||
							
								
								
									
										294
									
								
								packages/app-cli/tests/Synchronizer.conflicts.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										294
									
								
								packages/app-cli/tests/Synchronizer.conflicts.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,294 @@ | ||||
| import time from '@joplin/lib/time'; | ||||
| import Setting from '@joplin/lib/models/Setting'; | ||||
| import { allNotesFolders, localNotesFoldersSameAsRemote } from './test-utils-synchronizer'; | ||||
|  | ||||
| const { synchronizerStart, setupDatabaseAndSynchronizer, sleep, switchClient, syncTargetId, loadEncryptionMasterKey, decryptionWorker } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const BaseItem = require('@joplin/lib/models/BaseItem.js'); | ||||
|  | ||||
| describe('Synchronizer.conflicts', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| 		await setupDatabaseAndSynchronizer(1); | ||||
| 		await setupDatabaseAndSynchronizer(2); | ||||
| 		await switchClient(1); | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should resolve note conflicts', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'un', parent_id: folder1.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		let note2 = await Note.load(note1.id); | ||||
| 		note2.title = 'Updated on client 2'; | ||||
| 		await Note.save(note2); | ||||
| 		note2 = await Note.load(note2.id); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		let note2conf = await Note.load(note1.id); | ||||
| 		note2conf.title = 'Updated on client 1'; | ||||
| 		await Note.save(note2conf); | ||||
| 		note2conf = await Note.load(note1.id); | ||||
| 		await synchronizerStart(); | ||||
| 		const conflictedNotes = await Note.conflictedNotes(); | ||||
| 		expect(conflictedNotes.length).toBe(1); | ||||
|  | ||||
| 		// Other than the id (since the conflicted note is a duplicate), and the is_conflict property | ||||
| 		// the conflicted and original note must be the same in every way, to make sure no data has been lost. | ||||
| 		const conflictedNote = conflictedNotes[0]; | ||||
| 		expect(conflictedNote.id == note2conf.id).toBe(false); | ||||
| 		for (const n in conflictedNote) { | ||||
| 			if (!conflictedNote.hasOwnProperty(n)) continue; | ||||
| 			if (n == 'id' || n == 'is_conflict') continue; | ||||
| 			expect(conflictedNote[n]).toBe(note2conf[n]); | ||||
| 		} | ||||
|  | ||||
| 		const noteUpdatedFromRemote = await Note.load(note1.id); | ||||
| 		for (const n in noteUpdatedFromRemote) { | ||||
| 			if (!noteUpdatedFromRemote.hasOwnProperty(n)) continue; | ||||
| 			expect(noteUpdatedFromRemote[n]).toBe(note2[n]); | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should resolve folders conflicts', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		await Note.save({ title: 'un', parent_id: folder1.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); // ---------------------------------- | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await sleep(0.1); | ||||
|  | ||||
| 		let folder1_modRemote = await Folder.load(folder1.id); | ||||
| 		folder1_modRemote.title = 'folder1 UPDATE CLIENT 2'; | ||||
| 		await Folder.save(folder1_modRemote); | ||||
| 		folder1_modRemote = await Folder.load(folder1_modRemote.id); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); // ---------------------------------- | ||||
|  | ||||
| 		await sleep(0.1); | ||||
|  | ||||
| 		let folder1_modLocal = await Folder.load(folder1.id); | ||||
| 		folder1_modLocal.title = 'folder1 UPDATE CLIENT 1'; | ||||
| 		await Folder.save(folder1_modLocal); | ||||
| 		folder1_modLocal = await Folder.load(folder1.id); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		const folder1_final = await Folder.load(folder1.id); | ||||
| 		expect(folder1_final.title).toBe(folder1_modRemote.title); | ||||
| 	})); | ||||
|  | ||||
| 	it('should resolve conflict if remote folder has been deleted, but note has been added to folder locally', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		await Folder.delete(folder1.id); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await Note.save({ title: 'note1', parent_id: folder1.id }); | ||||
| 		await synchronizerStart(); | ||||
| 		const items = await allNotesFolders(); | ||||
| 		expect(items.length).toBe(1); | ||||
| 		expect(items[0].title).toBe('note1'); | ||||
| 		expect(items[0].is_conflict).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should resolve conflict if note has been deleted remotely and locally', (async () => { | ||||
| 		const folder = await Folder.save({ title: 'folder' }); | ||||
| 		const note = await Note.save({ title: 'note', parent_id: folder.title }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		await Note.delete(note.id); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await Note.delete(note.id); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		const items = await allNotesFolders(); | ||||
| 		expect(items.length).toBe(1); | ||||
| 		expect(items[0].title).toBe('folder'); | ||||
|  | ||||
| 		await localNotesFoldersSameAsRemote(items, expect); | ||||
| 	})); | ||||
|  | ||||
| 	it('should handle conflict when remote note is deleted then local note is modified', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'un', parent_id: folder1.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await sleep(0.1); | ||||
|  | ||||
| 		await Note.delete(note1.id); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		const newTitle = 'Modified after having been deleted'; | ||||
| 		await Note.save({ id: note1.id, title: newTitle }); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		const conflictedNotes = await Note.conflictedNotes(); | ||||
|  | ||||
| 		expect(conflictedNotes.length).toBe(1); | ||||
| 		expect(conflictedNotes[0].title).toBe(newTitle); | ||||
|  | ||||
| 		const unconflictedNotes = await Note.unconflictedNotes(); | ||||
|  | ||||
| 		expect(unconflictedNotes.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should handle conflict when remote folder is deleted then local folder is renamed', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		await Folder.save({ title: 'folder2' }); | ||||
| 		await Note.save({ title: 'un', parent_id: folder1.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await sleep(0.1); | ||||
|  | ||||
| 		await Folder.delete(folder1.id); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await sleep(0.1); | ||||
|  | ||||
| 		const newTitle = 'Modified after having been deleted'; | ||||
| 		await Folder.save({ id: folder1.id, title: newTitle }); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		const items = await allNotesFolders(); | ||||
|  | ||||
| 		expect(items.length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not sync notes with conflicts', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'folder' }); | ||||
| 		await Note.save({ title: 'mynote', parent_id: f1.id, is_conflict: 1 }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		const notes = await Note.all(); | ||||
| 		const folders = await Folder.all(); | ||||
| 		expect(notes.length).toBe(0); | ||||
| 		expect(folders.length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not try to delete on remote conflicted notes that have been deleted', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'folder' }); | ||||
| 		const n1 = await Note.save({ title: 'mynote', parent_id: f1.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		await Note.save({ id: n1.id, is_conflict: 1 }); | ||||
| 		await Note.delete(n1.id); | ||||
| 		const deletedItems = await BaseItem.deletedItems(syncTargetId()); | ||||
|  | ||||
| 		expect(deletedItems.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	async function ignorableNoteConflictTest(withEncryption: boolean) { | ||||
| 		if (withEncryption) { | ||||
| 			Setting.setValue('encryption.enabled', true); | ||||
| 			await loadEncryptionMasterKey(); | ||||
| 		} | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		if (withEncryption) { | ||||
| 			await loadEncryptionMasterKey(null, true); | ||||
| 			await decryptionWorker().start(); | ||||
| 		} | ||||
| 		let note2 = await Note.load(note1.id); | ||||
| 		note2.todo_completed = time.unixMs() - 1; | ||||
| 		await Note.save(note2); | ||||
| 		note2 = await Note.load(note2.id); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		let note2conf = await Note.load(note1.id); | ||||
| 		note2conf.todo_completed = time.unixMs(); | ||||
| 		await Note.save(note2conf); | ||||
| 		note2conf = await Note.load(note1.id); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		if (!withEncryption) { | ||||
| 			// That was previously a common conflict: | ||||
| 			// - Client 1 mark todo as "done", and sync | ||||
| 			// - Client 2 doesn't sync, mark todo as "done" todo. Then sync. | ||||
| 			// In theory it is a conflict because the todo_completed dates are different | ||||
| 			// but in practice it doesn't matter, we can just take the date when the | ||||
| 			// todo was marked as "done" the first time. | ||||
|  | ||||
| 			const conflictedNotes = await Note.conflictedNotes(); | ||||
| 			expect(conflictedNotes.length).toBe(0); | ||||
|  | ||||
| 			const notes = await Note.all(); | ||||
| 			expect(notes.length).toBe(1); | ||||
| 			expect(notes[0].id).toBe(note1.id); | ||||
| 			expect(notes[0].todo_completed).toBe(note2.todo_completed); | ||||
| 		} else { | ||||
| 			// If the notes are encrypted however it's not possible to do this kind of | ||||
| 			// smart conflict resolving since we don't know the content, so in that | ||||
| 			// case it's handled as a regular conflict. | ||||
|  | ||||
| 			const conflictedNotes = await Note.conflictedNotes(); | ||||
| 			expect(conflictedNotes.length).toBe(1); | ||||
|  | ||||
| 			const notes = await Note.all(); | ||||
| 			expect(notes.length).toBe(2); | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	it('should not consider it is a conflict if neither the title nor body of the note have changed', (async () => { | ||||
| 		await ignorableNoteConflictTest(false); | ||||
| 	})); | ||||
|  | ||||
| 	it('should always handle conflict if local or remote are encrypted', (async () => { | ||||
| 		await ignorableNoteConflictTest(true); | ||||
| 	})); | ||||
|  | ||||
| }); | ||||
							
								
								
									
										403
									
								
								packages/app-cli/tests/Synchronizer.e2ee.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										403
									
								
								packages/app-cli/tests/Synchronizer.e2ee.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,403 @@ | ||||
| import time from '@joplin/lib/time'; | ||||
| import shim from '@joplin/lib/shim'; | ||||
| import Setting from '@joplin/lib/models/Setting'; | ||||
|  | ||||
| const { synchronizerStart, allSyncTargetItemsEncrypted, kvStore, setupDatabaseAndSynchronizer, synchronizer, fileApi, switchClient, encryptionService, loadEncryptionMasterKey, decryptionWorker, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const Resource = require('@joplin/lib/models/Resource.js'); | ||||
| const ResourceFetcher = require('@joplin/lib/services/ResourceFetcher'); | ||||
| const MasterKey = require('@joplin/lib/models/MasterKey'); | ||||
| const BaseItem = require('@joplin/lib/models/BaseItem.js'); | ||||
|  | ||||
| let insideBeforeEach = false; | ||||
|  | ||||
| describe('Synchronizer.e2ee', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| 		insideBeforeEach = true; | ||||
|  | ||||
| 		await setupDatabaseAndSynchronizer(1); | ||||
| 		await setupDatabaseAndSynchronizer(2); | ||||
| 		await switchClient(1); | ||||
| 		done(); | ||||
|  | ||||
| 		insideBeforeEach = false; | ||||
| 	}); | ||||
|  | ||||
| 	it('notes and folders should get encrypted when encryption is enabled', (async () => { | ||||
| 		Setting.setValue('encryption.enabled', true); | ||||
| 		const masterKey = await loadEncryptionMasterKey(); | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		let note1 = await Note.save({ title: 'un', body: 'to be encrypted', parent_id: folder1.id }); | ||||
| 		await synchronizerStart(); | ||||
| 		// After synchronisation, remote items should be encrypted but local ones remain plain text | ||||
| 		note1 = await Note.load(note1.id); | ||||
| 		expect(note1.title).toBe('un'); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		let folder1_2 = await Folder.load(folder1.id); | ||||
| 		let note1_2 = await Note.load(note1.id); | ||||
| 		const masterKey_2 = await MasterKey.load(masterKey.id); | ||||
| 		// On this side however it should be received encrypted | ||||
| 		expect(!note1_2.title).toBe(true); | ||||
| 		expect(!folder1_2.title).toBe(true); | ||||
| 		expect(!!note1_2.encryption_cipher_text).toBe(true); | ||||
| 		expect(!!folder1_2.encryption_cipher_text).toBe(true); | ||||
| 		// Master key is already encrypted so it does not get re-encrypted during sync | ||||
| 		expect(masterKey_2.content).toBe(masterKey.content); | ||||
| 		expect(masterKey_2.checksum).toBe(masterKey.checksum); | ||||
| 		// Now load the master key we got from client 1 and try to decrypt | ||||
| 		await encryptionService().loadMasterKey_(masterKey_2, '123456', true); | ||||
| 		// Get the decrypted items back | ||||
| 		await Folder.decrypt(folder1_2); | ||||
| 		await Note.decrypt(note1_2); | ||||
| 		folder1_2 = await Folder.load(folder1.id); | ||||
| 		note1_2 = await Note.load(note1.id); | ||||
| 		// Check that properties match the original items. Also check | ||||
| 		// the encryption did not affect the updated_time timestamp. | ||||
| 		expect(note1_2.title).toBe(note1.title); | ||||
| 		expect(note1_2.body).toBe(note1.body); | ||||
| 		expect(note1_2.updated_time).toBe(note1.updated_time); | ||||
| 		expect(!note1_2.encryption_cipher_text).toBe(true); | ||||
| 		expect(folder1_2.title).toBe(folder1.title); | ||||
| 		expect(folder1_2.updated_time).toBe(folder1.updated_time); | ||||
| 		expect(!folder1_2.encryption_cipher_text).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should enable encryption automatically when downloading new master key (and none was previously available)',(async () => { | ||||
| 		// Enable encryption on client 1 and sync an item | ||||
| 		Setting.setValue('encryption.enabled', true); | ||||
| 		await loadEncryptionMasterKey(); | ||||
| 		let folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		// Synchronising should enable encryption since we're going to get a master key | ||||
| 		expect(Setting.value('encryption.enabled')).toBe(false); | ||||
| 		await synchronizerStart(); | ||||
| 		expect(Setting.value('encryption.enabled')).toBe(true); | ||||
|  | ||||
| 		// Check that we got the master key from client 1 | ||||
| 		const masterKey = (await MasterKey.all())[0]; | ||||
| 		expect(!!masterKey).toBe(true); | ||||
|  | ||||
| 		// Since client 2 hasn't supplied a password yet, no master key is currently loaded | ||||
| 		expect(encryptionService().loadedMasterKeyIds().length).toBe(0); | ||||
|  | ||||
| 		// If we sync now, nothing should be sent to target since we don't have a password. | ||||
| 		// Technically it's incorrect to set the property of an encrypted variable but it allows confirming | ||||
| 		// that encryption doesn't work if user hasn't supplied a password. | ||||
| 		await BaseItem.forceSync(folder1.id); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		folder1 = await Folder.load(folder1.id); | ||||
| 		expect(folder1.title).toBe('folder1'); // Still at old value | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		// Now client 2 set the master key password | ||||
| 		Setting.setObjectValue('encryption.passwordCache', masterKey.id, '123456'); | ||||
| 		await encryptionService().loadMasterKeysFromSettings(); | ||||
|  | ||||
| 		// Now that master key should be loaded | ||||
| 		expect(encryptionService().loadedMasterKeyIds()[0]).toBe(masterKey.id); | ||||
|  | ||||
| 		// Decrypt all the data. Now change the title and sync again - this time the changes should be transmitted | ||||
| 		await decryptionWorker().start(); | ||||
| 		await Folder.save({ id: folder1.id, title: 'change test' }); | ||||
|  | ||||
| 		// If we sync now, this time client 1 should get the changes we did earlier | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		// Decrypt the data we just got | ||||
| 		await decryptionWorker().start(); | ||||
| 		folder1 = await Folder.load(folder1.id); | ||||
| 		expect(folder1.title).toBe('change test'); // Got title from client 2 | ||||
| 	})); | ||||
|  | ||||
| 	it('should encrypt existing notes too when enabling E2EE', (async () => { | ||||
| 		// First create a folder, without encryption enabled, and sync it | ||||
| 		await Folder.save({ title: 'folder1' }); | ||||
| 		await synchronizerStart(); | ||||
| 		let files = await fileApi().list('', { includeDirs: false, syncItemsOnly: true }); | ||||
| 		let content = await fileApi().get(files.items[0].path); | ||||
| 		expect(content.indexOf('folder1') >= 0).toBe(true); | ||||
|  | ||||
| 		// Then enable encryption and sync again | ||||
| 		let masterKey = await encryptionService().generateMasterKey('123456'); | ||||
| 		masterKey = await MasterKey.save(masterKey); | ||||
| 		await encryptionService().enableEncryption(masterKey, '123456'); | ||||
| 		await encryptionService().loadMasterKeysFromSettings(); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		// Even though the folder has not been changed it should have been synced again so that | ||||
| 		// an encrypted version of it replaces the decrypted version. | ||||
| 		files = await fileApi().list('', { includeDirs: false, syncItemsOnly: true }); | ||||
| 		expect(files.items.length).toBe(2); | ||||
| 		// By checking that the folder title is not present, we can confirm that the item has indeed been encrypted | ||||
| 		// One of the two items is the master key | ||||
| 		content = await fileApi().get(files.items[0].path); | ||||
| 		expect(content.indexOf('folder1') < 0).toBe(true); | ||||
| 		content = await fileApi().get(files.items[1].path); | ||||
| 		expect(content.indexOf('folder1') < 0).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should upload decrypted items to sync target after encryption disabled', (async () => { | ||||
| 		Setting.setValue('encryption.enabled', true); | ||||
| 		await loadEncryptionMasterKey(); | ||||
|  | ||||
| 		await Folder.save({ title: 'folder1' }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		let allEncrypted = await allSyncTargetItemsEncrypted(); | ||||
| 		expect(allEncrypted).toBe(true); | ||||
|  | ||||
| 		await encryptionService().disableEncryption(); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		allEncrypted = await allSyncTargetItemsEncrypted(); | ||||
| 		expect(allEncrypted).toBe(false); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not upload any item if encryption was enabled, and items have not been decrypted, and then encryption disabled', (async () => { | ||||
| 		// For some reason I can't explain, this test is sometimes executed before beforeEach is finished | ||||
| 		// which means it's going to fail in unexpected way. So the loop below wait for beforeEach to be done. | ||||
| 		while (insideBeforeEach) await time.msleep(100); | ||||
|  | ||||
| 		Setting.setValue('encryption.enabled', true); | ||||
| 		const masterKey = await loadEncryptionMasterKey(); | ||||
|  | ||||
| 		await Folder.save({ title: 'folder1' }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		expect(Setting.value('encryption.enabled')).toBe(true); | ||||
|  | ||||
| 		// If we try to disable encryption now, it should throw an error because some items are | ||||
| 		// currently encrypted. They must be decrypted first so that they can be sent as | ||||
| 		// plain text to the sync target. | ||||
| 		// let hasThrown = await checkThrowAsync(async () => await encryptionService().disableEncryption()); | ||||
| 		// expect(hasThrown).toBe(true); | ||||
|  | ||||
| 		// Now supply the password, and decrypt the items | ||||
| 		Setting.setObjectValue('encryption.passwordCache', masterKey.id, '123456'); | ||||
| 		await encryptionService().loadMasterKeysFromSettings(); | ||||
| 		await decryptionWorker().start(); | ||||
|  | ||||
| 		// Try to disable encryption again | ||||
| 		const hasThrown = await checkThrowAsync(async () => await encryptionService().disableEncryption()); | ||||
| 		expect(hasThrown).toBe(false); | ||||
|  | ||||
| 		// If we sync now the target should receive the decrypted items | ||||
| 		await synchronizerStart(); | ||||
| 		const allEncrypted = await allSyncTargetItemsEncrypted(); | ||||
| 		expect(allEncrypted).toBe(false); | ||||
| 	})); | ||||
|  | ||||
| 	it('should set the resource file size after decryption', (async () => { | ||||
| 		Setting.setValue('encryption.enabled', true); | ||||
| 		const masterKey = await loadEncryptionMasterKey(); | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`); | ||||
| 		const resource1 = (await Resource.all())[0]; | ||||
| 		await Resource.setFileSizeOnly(resource1.id, -1); | ||||
| 		Resource.fullPath(resource1); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		Setting.setObjectValue('encryption.passwordCache', masterKey.id, '123456'); | ||||
| 		await encryptionService().loadMasterKeysFromSettings(); | ||||
|  | ||||
| 		const fetcher = new ResourceFetcher(() => { return synchronizer().api(); }); | ||||
| 		fetcher.queueDownload_(resource1.id); | ||||
| 		await fetcher.waitForAllFinished(); | ||||
| 		await decryptionWorker().start(); | ||||
|  | ||||
| 		const resource1_2 = await Resource.load(resource1.id); | ||||
| 		expect(resource1_2.size).toBe(2720); | ||||
| 	})); | ||||
|  | ||||
| 	it('should encrypt remote resources after encryption has been enabled', (async () => { | ||||
| 		while (insideBeforeEach) await time.msleep(100); | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		expect(await allSyncTargetItemsEncrypted()).toBe(false); | ||||
|  | ||||
| 		const masterKey = await loadEncryptionMasterKey(); | ||||
| 		await encryptionService().enableEncryption(masterKey, '123456'); | ||||
| 		await encryptionService().loadMasterKeysFromSettings(); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		expect(await allSyncTargetItemsEncrypted()).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should upload encrypted resource, but it should not mark the blob as encrypted locally', (async () => { | ||||
| 		while (insideBeforeEach) await time.msleep(100); | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`); | ||||
| 		const masterKey = await loadEncryptionMasterKey(); | ||||
| 		await encryptionService().enableEncryption(masterKey, '123456'); | ||||
| 		await encryptionService().loadMasterKeysFromSettings(); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		const resource1 = (await Resource.all())[0]; | ||||
| 		expect(resource1.encryption_blob_encrypted).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should decrypt the resource metadata, but not try to decrypt the file, if it is not present', (async () => { | ||||
| 		const note1 = await Note.save({ title: 'note' }); | ||||
| 		await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`); | ||||
| 		const masterKey = await loadEncryptionMasterKey(); | ||||
| 		await encryptionService().enableEncryption(masterKey, '123456'); | ||||
| 		await encryptionService().loadMasterKeysFromSettings(); | ||||
| 		await synchronizerStart(); | ||||
| 		expect(await allSyncTargetItemsEncrypted()).toBe(true); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		Setting.setObjectValue('encryption.passwordCache', masterKey.id, '123456'); | ||||
| 		await encryptionService().loadMasterKeysFromSettings(); | ||||
| 		await decryptionWorker().start(); | ||||
|  | ||||
| 		let resource = (await Resource.all())[0]; | ||||
|  | ||||
| 		expect(!!resource.encryption_applied).toBe(false); | ||||
| 		expect(!!resource.encryption_blob_encrypted).toBe(true); | ||||
|  | ||||
| 		const resourceFetcher = new ResourceFetcher(() => { return synchronizer().api(); }); | ||||
| 		await resourceFetcher.start(); | ||||
| 		await resourceFetcher.waitForAllFinished(); | ||||
|  | ||||
| 		const ls = await Resource.localState(resource); | ||||
| 		expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_DONE); | ||||
|  | ||||
| 		await decryptionWorker().start(); | ||||
| 		resource = (await Resource.all())[0]; | ||||
|  | ||||
| 		expect(!!resource.encryption_blob_encrypted).toBe(false); | ||||
| 	})); | ||||
|  | ||||
| 	it('should stop trying to decrypt item after a few attempts', (async () => { | ||||
| 		let hasThrown; | ||||
|  | ||||
| 		const note = await Note.save({ title: 'ma note' }); | ||||
| 		const masterKey = await loadEncryptionMasterKey(); | ||||
| 		await encryptionService().enableEncryption(masterKey, '123456'); | ||||
| 		await encryptionService().loadMasterKeysFromSettings(); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		// First, simulate a broken note and check that the decryption worker | ||||
| 		// gives up decrypting after a number of tries. This is mainly relevant | ||||
| 		// for data that crashes the mobile application - we don't want to keep | ||||
| 		// decrypting these. | ||||
|  | ||||
| 		const encryptedNote = await Note.load(note.id); | ||||
| 		const goodCipherText = encryptedNote.encryption_cipher_text; | ||||
| 		await Note.save({ id: note.id, encryption_cipher_text: 'doesntlookright' }); | ||||
|  | ||||
| 		Setting.setObjectValue('encryption.passwordCache', masterKey.id, '123456'); | ||||
| 		await encryptionService().loadMasterKeysFromSettings(); | ||||
|  | ||||
| 		hasThrown = await checkThrowAsync(async () => await decryptionWorker().start({ errorHandler: 'throw' })); | ||||
| 		expect(hasThrown).toBe(true); | ||||
|  | ||||
| 		hasThrown = await checkThrowAsync(async () => await decryptionWorker().start({ errorHandler: 'throw' })); | ||||
| 		expect(hasThrown).toBe(true); | ||||
|  | ||||
| 		// Third time, an error is logged and no error is thrown | ||||
| 		hasThrown = await checkThrowAsync(async () => await decryptionWorker().start({ errorHandler: 'throw' })); | ||||
| 		expect(hasThrown).toBe(false); | ||||
|  | ||||
| 		const disabledItems = await decryptionWorker().decryptionDisabledItems(); | ||||
| 		expect(disabledItems.length).toBe(1); | ||||
| 		expect(disabledItems[0].id).toBe(note.id); | ||||
|  | ||||
| 		expect((await kvStore().all()).length).toBe(1); | ||||
| 		await kvStore().clear(); | ||||
|  | ||||
| 		// Now check that if it fails once but succeed the second time, the note | ||||
| 		// is correctly decrypted and the counters are cleared. | ||||
|  | ||||
| 		hasThrown = await checkThrowAsync(async () => await decryptionWorker().start({ errorHandler: 'throw' })); | ||||
| 		expect(hasThrown).toBe(true); | ||||
|  | ||||
| 		await Note.save({ id: note.id, encryption_cipher_text: goodCipherText }); | ||||
|  | ||||
| 		hasThrown = await checkThrowAsync(async () => await decryptionWorker().start({ errorHandler: 'throw' })); | ||||
| 		expect(hasThrown).toBe(false); | ||||
|  | ||||
| 		const decryptedNote = await Note.load(note.id); | ||||
| 		expect(decryptedNote.title).toBe('ma note'); | ||||
|  | ||||
| 		expect((await kvStore().all()).length).toBe(0); | ||||
| 		expect((await decryptionWorker().decryptionDisabledItems()).length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not encrypt notes that are shared', (async () => { | ||||
| 		Setting.setValue('encryption.enabled', true); | ||||
| 		await loadEncryptionMasterKey(); | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'un', parent_id: folder1.id }); | ||||
| 		let note2 = await Note.save({ title: 'deux', parent_id: folder1.id }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		const origNote2 = Object.assign({}, note2); | ||||
| 		await BaseItem.updateShareStatus(note2, true); | ||||
| 		note2 = await Note.load(note2.id); | ||||
|  | ||||
| 		// Sharing a note should not modify the timestamps | ||||
| 		expect(note2.user_updated_time).toBe(origNote2.user_updated_time); | ||||
| 		expect(note2.user_created_time).toBe(origNote2.user_created_time); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		// The shared note should be decrypted | ||||
| 		const note2_2 = await Note.load(note2.id); | ||||
| 		expect(note2_2.title).toBe('deux'); | ||||
| 		expect(note2_2.is_shared).toBe(1); | ||||
|  | ||||
| 		// The non-shared note should be encrypted | ||||
| 		const note1_2 = await Note.load(note1.id); | ||||
| 		expect(note1_2.title).toBe(''); | ||||
| 	})); | ||||
|  | ||||
| }); | ||||
							
								
								
									
										357
									
								
								packages/app-cli/tests/Synchronizer.resources.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										357
									
								
								packages/app-cli/tests/Synchronizer.resources.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,357 @@ | ||||
| import time from '@joplin/lib/time'; | ||||
| import shim from '@joplin/lib/shim'; | ||||
| import Setting from '@joplin/lib/models/Setting'; | ||||
| import { NoteEntity } from '@joplin/lib/services/database/types'; | ||||
| import { remoteNotesFoldersResources, remoteResources } from './test-utils-synchronizer'; | ||||
|  | ||||
| const { synchronizerStart, tempFilePath, resourceFetcher, setupDatabaseAndSynchronizer, synchronizer, fileApi, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const Resource = require('@joplin/lib/models/Resource.js'); | ||||
| const ResourceFetcher = require('@joplin/lib/services/ResourceFetcher'); | ||||
| const BaseItem = require('@joplin/lib/models/BaseItem.js'); | ||||
|  | ||||
| let insideBeforeEach = false; | ||||
|  | ||||
| describe('Synchronizer.resources', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| 		insideBeforeEach = true; | ||||
|  | ||||
| 		await setupDatabaseAndSynchronizer(1); | ||||
| 		await setupDatabaseAndSynchronizer(2); | ||||
| 		await switchClient(1); | ||||
| 		done(); | ||||
|  | ||||
| 		insideBeforeEach = false; | ||||
| 	}); | ||||
|  | ||||
| 	it('should sync resources', (async () => { | ||||
| 		while (insideBeforeEach) await time.msleep(500); | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`); | ||||
| 		const resource1 = (await Resource.all())[0]; | ||||
| 		const resourcePath1 = Resource.fullPath(resource1); | ||||
| 		await synchronizerStart(); | ||||
| 		expect((await remoteNotesFoldersResources()).length).toBe(3); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		const allResources = await Resource.all(); | ||||
| 		expect(allResources.length).toBe(1); | ||||
| 		let resource1_2 = allResources[0]; | ||||
| 		let ls = await Resource.localState(resource1_2); | ||||
| 		expect(resource1_2.id).toBe(resource1.id); | ||||
| 		expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_IDLE); | ||||
|  | ||||
| 		const fetcher = new ResourceFetcher(() => { return synchronizer().api(); }); | ||||
| 		fetcher.queueDownload_(resource1_2.id); | ||||
| 		await fetcher.waitForAllFinished(); | ||||
|  | ||||
| 		resource1_2 = await Resource.load(resource1.id); | ||||
| 		ls = await Resource.localState(resource1_2); | ||||
| 		expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_DONE); | ||||
|  | ||||
| 		const resourcePath1_2 = Resource.fullPath(resource1_2); | ||||
| 		expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should handle resource download errors', (async () => { | ||||
| 		while (insideBeforeEach) await time.msleep(500); | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`); | ||||
| 		let resource1 = (await Resource.all())[0]; | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		const fetcher = new ResourceFetcher(() => { | ||||
| 			return { | ||||
| 			// Simulate a failed download | ||||
| 				get: () => { return new Promise((_resolve: Function, reject: Function) => { reject(new Error('did not work')); }); }, | ||||
| 			}; | ||||
| 		}); | ||||
| 		fetcher.queueDownload_(resource1.id); | ||||
| 		await fetcher.waitForAllFinished(); | ||||
|  | ||||
| 		resource1 = await Resource.load(resource1.id); | ||||
| 		const ls = await Resource.localState(resource1); | ||||
| 		expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_ERROR); | ||||
| 		expect(ls.fetch_error).toBe('did not work'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should set the resource file size if it is missing', (async () => { | ||||
| 		while (insideBeforeEach) await time.msleep(500); | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		let r1 = (await Resource.all())[0]; | ||||
| 		await Resource.setFileSizeOnly(r1.id, -1); | ||||
| 		r1 = await Resource.load(r1.id); | ||||
| 		expect(r1.size).toBe(-1); | ||||
|  | ||||
| 		const fetcher = new ResourceFetcher(() => { return synchronizer().api(); }); | ||||
| 		fetcher.queueDownload_(r1.id); | ||||
| 		await fetcher.waitForAllFinished(); | ||||
| 		r1 = await Resource.load(r1.id); | ||||
| 		expect(r1.size).toBe(2720); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete resources', (async () => { | ||||
| 		while (insideBeforeEach) await time.msleep(500); | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`); | ||||
| 		const resource1 = (await Resource.all())[0]; | ||||
| 		const resourcePath1 = Resource.fullPath(resource1); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		let allResources = await Resource.all(); | ||||
| 		expect(allResources.length).toBe(1); | ||||
| 		expect((await remoteNotesFoldersResources()).length).toBe(3); | ||||
| 		await Resource.delete(resource1.id); | ||||
| 		await synchronizerStart(); | ||||
| 		expect((await remoteNotesFoldersResources()).length).toBe(2); | ||||
|  | ||||
| 		const remoteBlob = await fileApi().stat(`.resource/${resource1.id}`); | ||||
| 		expect(!remoteBlob).toBe(true); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		expect(await shim.fsDriver().exists(resourcePath1)).toBe(true); | ||||
| 		await synchronizerStart(); | ||||
| 		allResources = await Resource.all(); | ||||
| 		expect(allResources.length).toBe(0); | ||||
| 		expect(await shim.fsDriver().exists(resourcePath1)).toBe(false); | ||||
| 	})); | ||||
|  | ||||
| 	it('should encrypt resources', (async () => { | ||||
| 		Setting.setValue('encryption.enabled', true); | ||||
| 		const masterKey = await loadEncryptionMasterKey(); | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`); | ||||
| 		const resource1 = (await Resource.all())[0]; | ||||
| 		const resourcePath1 = Resource.fullPath(resource1); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		Setting.setObjectValue('encryption.passwordCache', masterKey.id, '123456'); | ||||
| 		await encryptionService().loadMasterKeysFromSettings(); | ||||
|  | ||||
| 		const fetcher = new ResourceFetcher(() => { return synchronizer().api(); }); | ||||
| 		fetcher.queueDownload_(resource1.id); | ||||
| 		await fetcher.waitForAllFinished(); | ||||
|  | ||||
| 		let resource1_2 = (await Resource.all())[0]; | ||||
| 		resource1_2 = await Resource.decrypt(resource1_2); | ||||
| 		const resourcePath1_2 = Resource.fullPath(resource1_2); | ||||
|  | ||||
| 		expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should sync resource blob changes', (async () => { | ||||
| 		const tempFile = tempFilePath('txt'); | ||||
| 		await shim.fsDriver().writeFile(tempFile, '1234', 'utf8'); | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await shim.attachFileToNote(note1, tempFile); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		await resourceFetcher().start(); | ||||
| 		await resourceFetcher().waitForAllFinished(); | ||||
| 		let resource1_2 = (await Resource.all())[0]; | ||||
| 		const modFile = tempFilePath('txt'); | ||||
| 		await shim.fsDriver().writeFile(modFile, '1234 MOD', 'utf8'); | ||||
| 		await Resource.updateResourceBlobContent(resource1_2.id, modFile); | ||||
| 		const originalSize = resource1_2.size; | ||||
| 		resource1_2 = (await Resource.all())[0]; | ||||
| 		const newSize = resource1_2.size; | ||||
| 		expect(originalSize).toBe(4); | ||||
| 		expect(newSize).toBe(8); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		await resourceFetcher().start(); | ||||
| 		await resourceFetcher().waitForAllFinished(); | ||||
| 		const resource1_1 = (await Resource.all())[0]; | ||||
| 		expect(resource1_1.size).toBe(newSize); | ||||
| 		expect(await Resource.resourceBlobContent(resource1_1.id, 'utf8')).toBe('1234 MOD'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should handle resource conflicts', (async () => { | ||||
| 		{ | ||||
| 			const tempFile = tempFilePath('txt'); | ||||
| 			await shim.fsDriver().writeFile(tempFile, '1234', 'utf8'); | ||||
| 			const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 			const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 			await shim.attachFileToNote(note1, tempFile); | ||||
| 			await synchronizerStart(); | ||||
| 		} | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		{ | ||||
| 			await synchronizerStart(); | ||||
| 			await resourceFetcher().start(); | ||||
| 			await resourceFetcher().waitForAllFinished(); | ||||
| 			const resource = (await Resource.all())[0]; | ||||
| 			const modFile2 = tempFilePath('txt'); | ||||
| 			await shim.fsDriver().writeFile(modFile2, '1234 MOD 2', 'utf8'); | ||||
| 			await Resource.updateResourceBlobContent(resource.id, modFile2); | ||||
| 			await synchronizerStart(); | ||||
| 		} | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		{ | ||||
| 			// Going to modify a resource without syncing first, which will cause a conflict | ||||
| 			const resource = (await Resource.all())[0]; | ||||
| 			const modFile1 = tempFilePath('txt'); | ||||
| 			await shim.fsDriver().writeFile(modFile1, '1234 MOD 1', 'utf8'); | ||||
| 			await Resource.updateResourceBlobContent(resource.id, modFile1); | ||||
| 			await synchronizerStart(); // CONFLICT | ||||
|  | ||||
| 			// If we try to read the resource content now, it should throw because the local | ||||
| 			// content has been moved to the conflict notebook, and the new local content | ||||
| 			// has not been downloaded yet. | ||||
| 			await checkThrowAsync(async () => await Resource.resourceBlobContent(resource.id)); | ||||
|  | ||||
| 			// Now download resources, and our local content would have been overwritten by | ||||
| 			// the content from client 2 | ||||
| 			await resourceFetcher().start(); | ||||
| 			await resourceFetcher().waitForAllFinished(); | ||||
| 			const localContent =  await Resource.resourceBlobContent(resource.id, 'utf8'); | ||||
| 			expect(localContent).toBe('1234 MOD 2'); | ||||
|  | ||||
| 			// Check that the Conflict note has been generated, with the conflict resource | ||||
| 			// attached to it, and check that it has the original content. | ||||
| 			const allNotes = await Note.all(); | ||||
| 			expect(allNotes.length).toBe(2); | ||||
| 			const conflictNote = allNotes.find((v: NoteEntity) => { | ||||
| 				return !!v.is_conflict; | ||||
| 			}); | ||||
| 			expect(!!conflictNote).toBe(true); | ||||
| 			const resourceIds = await Note.linkedResourceIds(conflictNote.body); | ||||
| 			expect(resourceIds.length).toBe(1); | ||||
| 			const conflictContent =  await Resource.resourceBlobContent(resourceIds[0], 'utf8'); | ||||
| 			expect(conflictContent).toBe('1234 MOD 1'); | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should handle resource conflicts if a resource is changed locally but deleted remotely', (async () => { | ||||
| 		{ | ||||
| 			const tempFile = tempFilePath('txt'); | ||||
| 			await shim.fsDriver().writeFile(tempFile, '1234', 'utf8'); | ||||
| 			const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 			const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 			await shim.attachFileToNote(note1, tempFile); | ||||
| 			await synchronizerStart(); | ||||
| 		} | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		{ | ||||
| 			await synchronizerStart(); | ||||
| 			await resourceFetcher().start(); | ||||
| 			await resourceFetcher().waitForAllFinished(); | ||||
| 		} | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		{ | ||||
| 			const resource = (await Resource.all())[0]; | ||||
| 			await Resource.delete(resource.id); | ||||
| 			await synchronizerStart(); | ||||
|  | ||||
| 		} | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		{ | ||||
| 			const originalResource = (await Resource.all())[0]; | ||||
| 			await Resource.save({ id: originalResource.id, title: 'modified resource' }); | ||||
| 			await synchronizerStart(); // CONFLICT | ||||
|  | ||||
| 			const deletedResource = await Resource.load(originalResource.id); | ||||
| 			expect(!deletedResource).toBe(true); | ||||
|  | ||||
| 			const allResources = await Resource.all(); | ||||
| 			expect(allResources.length).toBe(1); | ||||
| 			const conflictResource = allResources[0]; | ||||
| 			expect(originalResource.id).not.toBe(conflictResource.id); | ||||
| 			expect(conflictResource.title).toBe('modified resource'); | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should not upload a resource if it has not been fetched yet', (async () => { | ||||
| 		// In some rare cases, the synchronizer might try to upload a resource even though it | ||||
| 		// doesn't have the resource file. It can happen in this situation: | ||||
| 		// - C1 create resource | ||||
| 		// - C1 sync | ||||
| 		// - C2 sync | ||||
| 		// - C2 resource metadata is received but ResourceFetcher hasn't downloaded the file yet | ||||
| 		// - C2 enables E2EE - all the items are marked for forced sync | ||||
| 		// - C2 sync | ||||
| 		// The synchronizer will try to upload the resource, even though it doesn't have the file, | ||||
| 		// so we need to make sure it doesn't. But also that once it gets the file, the resource | ||||
| 		// does get uploaded. | ||||
|  | ||||
| 		const note1 = await Note.save({ title: 'note' }); | ||||
| 		await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`); | ||||
| 		const resource = (await Resource.all())[0]; | ||||
| 		await Resource.setLocalState(resource.id, { fetch_status: Resource.FETCH_STATUS_IDLE }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		expect((await remoteResources()).length).toBe(0); | ||||
|  | ||||
| 		await Resource.setLocalState(resource.id, { fetch_status: Resource.FETCH_STATUS_DONE }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		expect((await remoteResources()).length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not download resources over the limit', (async () => { | ||||
| 		const note1 = await Note.save({ title: 'note' }); | ||||
| 		await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`); | ||||
| 		await synchronizer().start(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		const previousMax = synchronizer().maxResourceSize_; | ||||
| 		synchronizer().maxResourceSize_ = 1; | ||||
| 		await synchronizerStart(); | ||||
| 		synchronizer().maxResourceSize_ = previousMax; | ||||
|  | ||||
| 		const syncItems = await BaseItem.allSyncItems(syncTargetId()); | ||||
| 		expect(syncItems.length).toBe(2); | ||||
| 		expect(syncItems[1].item_location).toBe(BaseItem.SYNC_ITEM_LOCATION_REMOTE); | ||||
| 		expect(syncItems[1].sync_disabled).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| }); | ||||
							
								
								
									
										185
									
								
								packages/app-cli/tests/Synchronizer.revisions.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										185
									
								
								packages/app-cli/tests/Synchronizer.revisions.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,185 @@ | ||||
| import Setting from '@joplin/lib/models/Setting'; | ||||
| import BaseModel from '@joplin/lib/BaseModel'; | ||||
|  | ||||
| const { synchronizerStart, revisionService, setupDatabaseAndSynchronizer, synchronizer, switchClient, encryptionService, loadEncryptionMasterKey, decryptionWorker } = require('./test-utils.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const Revision = require('@joplin/lib/models/Revision.js'); | ||||
|  | ||||
| describe('Synchronizer.revisions', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| 		await setupDatabaseAndSynchronizer(1); | ||||
| 		await setupDatabaseAndSynchronizer(2); | ||||
| 		await switchClient(1); | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should not save revisions when updating a note via sync', (async () => { | ||||
| 		// When a note is updated, a revision of the original is created. | ||||
| 		// Here, on client 1, the note is updated for the first time, however since it is | ||||
| 		// via sync, we don't create a revision - that revision has already been created on client | ||||
| 		// 2 and is going to be synced. | ||||
|  | ||||
| 		const n1 = await Note.save({ title: 'testing' }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		await Note.save({ id: n1.id, title: 'mod from client 2' }); | ||||
| 		await revisionService().collectRevisions(); | ||||
| 		const allRevs1 = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id); | ||||
| 		expect(allRevs1.length).toBe(1); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		const allRevs2 = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id); | ||||
| 		expect(allRevs2.length).toBe(1); | ||||
| 		expect(allRevs2[0].id).toBe(allRevs1[0].id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not save revisions when deleting a note via sync', (async () => { | ||||
| 		const n1 = await Note.save({ title: 'testing' }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		await Note.delete(n1.id); | ||||
| 		await revisionService().collectRevisions(); // REV 1 | ||||
| 		{ | ||||
| 			const allRevs = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id); | ||||
| 			expect(allRevs.length).toBe(1); | ||||
| 		} | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await synchronizerStart(); // The local note gets deleted here, however a new rev is *not* created | ||||
| 		{ | ||||
| 			const allRevs = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id); | ||||
| 			expect(allRevs.length).toBe(1); | ||||
| 		} | ||||
|  | ||||
| 		const notes = await Note.all(); | ||||
| 		expect(notes.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not save revisions when an item_change has been generated as a result of a sync', (async () => { | ||||
| 		// When a note is modified an item_change object is going to be created. This | ||||
| 		// is used for example to tell the search engine, when note should be indexed. It is | ||||
| 		// also used by the revision service to tell what note should get a new revision. | ||||
| 		// When a note is modified via sync, this item_change object is also created. The issue | ||||
| 		// is that we don't want to create revisions for these particular item_changes, because | ||||
| 		// such revision has already been created on another client (whatever client initially | ||||
| 		// modified the note), and that rev is going to be synced. | ||||
| 		// | ||||
| 		// So in the end we need to make sure that we don't create these unecessary additional revisions. | ||||
|  | ||||
| 		const n1 = await Note.save({ title: 'testing' }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		await Note.save({ id: n1.id, title: 'mod from client 2' }); | ||||
| 		await revisionService().collectRevisions(); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		{ | ||||
| 			const allRevs = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id); | ||||
| 			expect(allRevs.length).toBe(1); | ||||
| 		} | ||||
|  | ||||
| 		await revisionService().collectRevisions(); | ||||
|  | ||||
| 		{ | ||||
| 			const allRevs = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id); | ||||
| 			expect(allRevs.length).toBe(1); | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should handle case when new rev is created on client, then older rev arrives later via sync', (async () => { | ||||
| 		// - C1 creates note 1 | ||||
| 		// - C1 modifies note 1 - REV1 created | ||||
| 		// - C1 sync | ||||
| 		// - C2 sync | ||||
| 		// - C2 receives note 1 | ||||
| 		// - C2 modifies note 1 - REV2 created (but not based on REV1) | ||||
| 		// - C2 receives REV1 | ||||
| 		// | ||||
| 		// In that case, we need to make sure that REV1 and REV2 are both valid and can be retrieved. | ||||
| 		// Even though REV1 was created before REV2, REV2 is *not* based on REV1. This is not ideal | ||||
| 		// due to unecessary data being saved, but a possible edge case and we simply need to check | ||||
| 		// all the data is valid. | ||||
|  | ||||
| 		// Note: this test seems to be a bit shaky because it doesn't work if the synchronizer | ||||
| 		// context is passed around (via synchronizerStart()), but it should. | ||||
|  | ||||
| 		const n1 = await Note.save({ title: 'note' }); | ||||
| 		await Note.save({ id: n1.id, title: 'note REV1' }); | ||||
| 		await revisionService().collectRevisions(); // REV1 | ||||
| 		expect((await Revision.allByType(BaseModel.TYPE_NOTE, n1.id)).length).toBe(1); | ||||
| 		await synchronizer().start(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		synchronizer().testingHooks_ = ['skipRevisions']; | ||||
| 		await synchronizer().start(); | ||||
| 		synchronizer().testingHooks_ = []; | ||||
|  | ||||
| 		await Note.save({ id: n1.id, title: 'note REV2' }); | ||||
| 		await revisionService().collectRevisions(); // REV2 | ||||
| 		expect((await Revision.allByType(BaseModel.TYPE_NOTE, n1.id)).length).toBe(1); | ||||
| 		await synchronizer().start(); // Sync the rev that had been skipped above with skipRevisions | ||||
|  | ||||
| 		const revisions = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id); | ||||
| 		expect(revisions.length).toBe(2); | ||||
|  | ||||
| 		expect((await revisionService().revisionNote(revisions, 0)).title).toBe('note REV1'); | ||||
| 		expect((await revisionService().revisionNote(revisions, 1)).title).toBe('note REV2'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not create revisions when item is modified as a result of decryption', (async () => { | ||||
| 		// Handle this scenario: | ||||
| 		// - C1 creates note | ||||
| 		// - C1 never changes it | ||||
| 		// - E2EE is enabled | ||||
| 		// - C1 sync | ||||
| 		// - More than one week later (as defined by oldNoteCutOffDate_), C2 sync | ||||
| 		// - C2 enters master password and note gets decrypted | ||||
| 		// | ||||
| 		// Technically at this point the note is modified (from encrypted to non-encrypted) and thus a ItemChange | ||||
| 		// object is created. The note is also older than oldNoteCutOffDate. However, this should not lead to the | ||||
| 		// creation of a revision because that change was not the result of a user action. | ||||
| 		// I guess that's the general rule - changes that come from user actions should result in revisions, | ||||
| 		// while automated changes (sync, decryption) should not. | ||||
|  | ||||
| 		const dateInPast = revisionService().oldNoteCutOffDate_() - 1000; | ||||
|  | ||||
| 		await Note.save({ title: 'ma note', updated_time: dateInPast, created_time: dateInPast }, { autoTimestamp: false }); | ||||
| 		const masterKey = await loadEncryptionMasterKey(); | ||||
| 		await encryptionService().enableEncryption(masterKey, '123456'); | ||||
| 		await encryptionService().loadMasterKeysFromSettings(); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		Setting.setObjectValue('encryption.passwordCache', masterKey.id, '123456'); | ||||
| 		await encryptionService().loadMasterKeysFromSettings(); | ||||
| 		await decryptionWorker().start(); | ||||
|  | ||||
| 		await revisionService().collectRevisions(); | ||||
|  | ||||
| 		expect((await Revision.all()).length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| }); | ||||
							
								
								
									
										75
									
								
								packages/app-cli/tests/Synchronizer.tags.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										75
									
								
								packages/app-cli/tests/Synchronizer.tags.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,75 @@ | ||||
| import Setting from '@joplin/lib/models/Setting'; | ||||
|  | ||||
| const { synchronizerStart, setupDatabaseAndSynchronizer, switchClient, encryptionService, loadEncryptionMasterKey } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const Tag = require('@joplin/lib/models/Tag.js'); | ||||
| const MasterKey = require('@joplin/lib/models/MasterKey'); | ||||
|  | ||||
| describe('Synchronizer.tags', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| 		await setupDatabaseAndSynchronizer(1); | ||||
| 		await setupDatabaseAndSynchronizer(2); | ||||
| 		await switchClient(1); | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	async function shoudSyncTagTest(withEncryption: boolean) { | ||||
| 		let masterKey = null; | ||||
| 		if (withEncryption) { | ||||
| 			Setting.setValue('encryption.enabled', true); | ||||
| 			masterKey = await loadEncryptionMasterKey(); | ||||
| 		} | ||||
|  | ||||
| 		await Folder.save({ title: 'folder' }); | ||||
| 		const n1 = await Note.save({ title: 'mynote' }); | ||||
| 		const n2 = await Note.save({ title: 'mynote2' }); | ||||
| 		const tag = await Tag.save({ title: 'mytag' }); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		if (withEncryption) { | ||||
| 			const masterKey_2 = await MasterKey.load(masterKey.id); | ||||
| 			await encryptionService().loadMasterKey_(masterKey_2, '123456', true); | ||||
| 			const t = await Tag.load(tag.id); | ||||
| 			await Tag.decrypt(t); | ||||
| 		} | ||||
| 		const remoteTag = await Tag.loadByTitle(tag.title); | ||||
| 		expect(!!remoteTag).toBe(true); | ||||
| 		expect(remoteTag.id).toBe(tag.id); | ||||
| 		await Tag.addNote(remoteTag.id, n1.id); | ||||
| 		await Tag.addNote(remoteTag.id, n2.id); | ||||
| 		let noteIds = await Tag.noteIds(tag.id); | ||||
| 		expect(noteIds.length).toBe(2); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(1); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		let remoteNoteIds = await Tag.noteIds(tag.id); | ||||
| 		expect(remoteNoteIds.length).toBe(2); | ||||
| 		await Tag.removeNote(tag.id, n1.id); | ||||
| 		remoteNoteIds = await Tag.noteIds(tag.id); | ||||
| 		expect(remoteNoteIds.length).toBe(1); | ||||
| 		await synchronizerStart(); | ||||
|  | ||||
| 		await switchClient(2); | ||||
|  | ||||
| 		await synchronizerStart(); | ||||
| 		noteIds = await Tag.noteIds(tag.id); | ||||
| 		expect(noteIds.length).toBe(1); | ||||
| 		expect(remoteNoteIds[0]).toBe(noteIds[0]); | ||||
| 	} | ||||
|  | ||||
| 	it('should sync tags', (async () => { | ||||
| 		await shoudSyncTagTest(false); | ||||
| 	})); | ||||
|  | ||||
| 	it('should sync encrypted tags', (async () => { | ||||
| 		await shoudSyncTagTest(true); | ||||
| 	})); | ||||
|  | ||||
| }); | ||||
| @@ -1,13 +1,9 @@ | ||||
| /* eslint-disable no-unused-vars */ | ||||
|  | ||||
|  | ||||
| const { asyncTest, fileContentEqual, setupDatabase, revisionService, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, revisionService, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const TaskQueue = require('@joplin/lib/TaskQueue.js'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('TaskQueue', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| @@ -16,7 +12,7 @@ describe('TaskQueue', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should queue and execute tasks', asyncTest(async () => { | ||||
| 	it('should queue and execute tasks', (async () => { | ||||
| 		const queue = new TaskQueue(); | ||||
|  | ||||
| 		queue.push(1, async () => { await sleep(0.5); return 'a'; }); | ||||
| @@ -37,7 +33,7 @@ describe('TaskQueue', function() { | ||||
| 		expect(results[2].result).toBe('c'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should handle errors', asyncTest(async () => { | ||||
| 	it('should handle errors', (async () => { | ||||
| 		const queue = new TaskQueue(); | ||||
|  | ||||
| 		queue.push(1, async () => { await sleep(0.5); return 'a'; }); | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { sortedIds, createNTestNotes, asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { sortedIds, createNTestNotes, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const Setting = require('@joplin/lib/models/Setting').default; | ||||
| @@ -10,10 +10,6 @@ const BaseModel = require('@joplin/lib/BaseModel').default; | ||||
| const ArrayUtils = require('@joplin/lib/ArrayUtils.js'); | ||||
| const shim = require('@joplin/lib/shim').default; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('database', function() { | ||||
| 	beforeEach(async (done) => { | ||||
| 		await setupDatabaseAndSynchronizer(1); | ||||
| @@ -21,7 +17,7 @@ describe('database', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should not modify cached field names', asyncTest(async () => { | ||||
| 	it('should not modify cached field names', (async () => { | ||||
| 		const db = BaseModel.db(); | ||||
|  | ||||
| 		const fieldNames = db.tableFieldNames('notes'); | ||||
|   | ||||
| @@ -1,13 +1,9 @@ | ||||
| 'use strict'; | ||||
|  | ||||
|  | ||||
| const { asyncTest,checkThrow } = require('./test-utils.js'); | ||||
| const { checkThrow } = require('./test-utils.js'); | ||||
| const eventManager = require('@joplin/lib/eventManager').default; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('eventManager', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| @@ -20,7 +16,7 @@ describe('eventManager', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should watch state props', asyncTest(async () => { | ||||
| 	it('should watch state props', (async () => { | ||||
| 		let localStateName = ''; | ||||
| 		let callCount = 0; | ||||
|  | ||||
| @@ -51,7 +47,7 @@ describe('eventManager', function() { | ||||
| 		expect(callCount).toBe(2); | ||||
| 	})); | ||||
|  | ||||
| 	it('should unwatch state props', asyncTest(async () => { | ||||
| 	it('should unwatch state props', (async () => { | ||||
| 		let localStateName = ''; | ||||
|  | ||||
| 		function nameWatch(event) { | ||||
| @@ -69,7 +65,7 @@ describe('eventManager', function() { | ||||
| 		expect(localStateName).toBe(''); | ||||
| 	})); | ||||
|  | ||||
| 	it('should watch nested props', asyncTest(async () => { | ||||
| 	it('should watch nested props', (async () => { | ||||
| 		let localStateName = ''; | ||||
|  | ||||
| 		function nameWatch(event) { | ||||
| @@ -94,7 +90,7 @@ describe('eventManager', function() { | ||||
| 		expect(localStateName).toBe('paul'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not be possible to modify state props', asyncTest(async () => { | ||||
| 	it('should not be possible to modify state props', (async () => { | ||||
| 		let localUser = {}; | ||||
|  | ||||
| 		function userWatch(event) { | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| const { asyncTest, id, ids, createNTestFolders, sortedIds, createNTestNotes, TestApp } = require('./test-utils.js'); | ||||
| const { id, ids, createNTestFolders, sortedIds, createNTestNotes, TestApp } = require('./test-utils.js'); | ||||
| const BaseModel = require('@joplin/lib/BaseModel').default; | ||||
| const uuid = require('@joplin/lib/uuid').default; | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| @@ -35,7 +35,7 @@ describe('feature_NoteHistory', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should save history when navigating through notes', asyncTest(async () => { | ||||
| 	it('should save history when navigating through notes', (async () => { | ||||
| 		// setup | ||||
| 		const folders = await createNTestFolders(2); | ||||
| 		await testApp.wait(); | ||||
| @@ -87,7 +87,7 @@ describe('feature_NoteHistory', function() { | ||||
| 	})); | ||||
|  | ||||
|  | ||||
| 	it('should save history when navigating through notebooks', asyncTest(async () => { | ||||
| 	it('should save history when navigating through notebooks', (async () => { | ||||
| 		const folders = await createNTestFolders(2); | ||||
| 		await testApp.wait(); | ||||
| 		const notes0 = await createNTestNotes(5, folders[0]); | ||||
| @@ -127,7 +127,7 @@ describe('feature_NoteHistory', function() { | ||||
| 	})); | ||||
|  | ||||
|  | ||||
| 	it('should save history when searching for a note', asyncTest(async () => { | ||||
| 	it('should save history when searching for a note', (async () => { | ||||
| 		const folders = await createNTestFolders(2); | ||||
| 		await testApp.wait(); | ||||
| 		const notes0 = await createNTestNotes(5, folders[0]); | ||||
| @@ -169,7 +169,7 @@ describe('feature_NoteHistory', function() { | ||||
| 		expect(ids(state.forwardHistoryNotes)).toEqual([]); | ||||
| 	})); | ||||
|  | ||||
| 	it('should ensure no adjacent duplicates', asyncTest(async () => { | ||||
| 	it('should ensure no adjacent duplicates', (async () => { | ||||
| 		const folders = await createNTestFolders(2); | ||||
| 		const notes0 = await createNTestNotes(3, folders[0]); | ||||
| 		await testApp.wait(); | ||||
| @@ -207,7 +207,7 @@ describe('feature_NoteHistory', function() { | ||||
| 		expect(state.selectedFolderId).toEqual(folders[0].id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should ensure history is not corrupted when notes get deleted.', asyncTest(async () => { | ||||
| 	it('should ensure history is not corrupted when notes get deleted.', (async () => { | ||||
| 		const folders = await createNTestFolders(2); | ||||
| 		await testApp.wait(); | ||||
| 		const notes0 = await createNTestNotes(5, folders[0]); | ||||
| @@ -237,7 +237,7 @@ describe('feature_NoteHistory', function() { | ||||
| 		expect(state.selectedFolderId).toEqual(folders[0].id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should ensure history is not corrupted when notes get created.', asyncTest(async () => { | ||||
| 	it('should ensure history is not corrupted when notes get created.', (async () => { | ||||
| 		const folders = await createNTestFolders(2); | ||||
| 		await testApp.wait(); | ||||
| 		const notes0 = await createNTestNotes(5, folders[0]); | ||||
| @@ -298,7 +298,7 @@ describe('feature_NoteHistory', function() { | ||||
| 		expect(state.selectedFolderId).toEqual(folders[0].id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should ensure history works when traversing all notes', asyncTest(async () => { | ||||
| 	it('should ensure history works when traversing all notes', (async () => { | ||||
| 		const folders = await createNTestFolders(2); | ||||
| 		await testApp.wait(); | ||||
| 		const notes0 = await createNTestNotes(5, folders[0]); | ||||
| @@ -356,7 +356,7 @@ describe('feature_NoteHistory', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual([notes0[4].id]); | ||||
| 	})); | ||||
|  | ||||
| 	it('should ensure history works when traversing through conflict notes', asyncTest(async () => { | ||||
| 	it('should ensure history works when traversing through conflict notes', (async () => { | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		await testApp.wait(); | ||||
| 		const notes0 = await createNTestNotes(5, folders[0]); | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| /* eslint-disable no-unused-vars */ | ||||
| const { setupDatabaseAndSynchronizer, switchClient, asyncTest, createNTestFolders, createNTestNotes, createNTestTags, TestApp } = require('./test-utils.js'); | ||||
| const { setupDatabaseAndSynchronizer, switchClient, createNTestFolders, createNTestNotes, createNTestTags, TestApp } = require('./test-utils.js'); | ||||
| const Setting = require('@joplin/lib/models/Setting').default; | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| @@ -23,7 +23,7 @@ describe('integration_NoteList', function() { | ||||
| 	}); | ||||
|  | ||||
| 	// Reference: https://github.com/laurent22/joplin/issues/2709 | ||||
| 	it('should leave a conflict note in the conflict folder when it modified', asyncTest(async () => { | ||||
| 	it('should leave a conflict note in the conflict folder when it modified', (async () => { | ||||
| 		const folder = await Folder.save({ title: 'test' }); | ||||
| 		const note = await Note.save({ title: 'note 1', parent_id: folder.id, is_conflict: 1 }); | ||||
| 		await testApp.wait(); | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| /* eslint-disable no-unused-vars */ | ||||
| const { setupDatabaseAndSynchronizer, switchClient, asyncTest, id, ids, sortedIds, at, createNTestFolders, createNTestNotes, createNTestTags, TestApp } = require('./test-utils.js'); | ||||
| const { setupDatabaseAndSynchronizer, switchClient, id, ids, sortedIds, at, createNTestFolders, createNTestNotes, createNTestTags, TestApp } = require('./test-utils.js'); | ||||
| const Setting = require('@joplin/lib/models/Setting').default; | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| @@ -36,7 +36,7 @@ describe('integration_ShowAllNotes', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should show all notes', asyncTest(async () => { | ||||
| 	it('should show all notes', (async () => { | ||||
| 		// setup | ||||
| 		const folders = await createNTestFolders(3); | ||||
| 		Folder.moveToFolder(id(folders[2]), id(folders[1])); // subfolder | ||||
| @@ -57,7 +57,7 @@ describe('integration_ShowAllNotes', function() { | ||||
| 		expect(sortedIds(state.notes)).toEqual(sortedIds(notes0.concat(notes1).concat(notes2))); | ||||
| 	})); | ||||
|  | ||||
| 	it('should show retain note selection when going from a folder to all-notes', asyncTest(async () => { | ||||
| 	it('should show retain note selection when going from a folder to all-notes', (async () => { | ||||
| 		// setup | ||||
| 		const folders = await createNTestFolders(2); | ||||
| 		const notes0 = await createNTestNotes(3, folders[0]); | ||||
| @@ -88,7 +88,7 @@ describe('integration_ShowAllNotes', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual(ids([notes1[1]])); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support note duplication', asyncTest(async () => { | ||||
| 	it('should support note duplication', (async () => { | ||||
| 		// setup | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const folder2 = await Folder.save({ title: 'folder2' }); | ||||
| @@ -125,7 +125,7 @@ describe('integration_ShowAllNotes', function() { | ||||
| 		expect(sortedIds(state.notes)).toEqual(sortedIds([note1, note2, newNote1, newNote2])); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support changing the note parent', asyncTest(async () => { | ||||
| 	it('should support changing the note parent', (async () => { | ||||
| 		// setup | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const folder2 = await Folder.save({ title: 'folder2' }); | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| /* eslint-disable no-unused-vars */ | ||||
| const { setupDatabaseAndSynchronizer, switchClient, asyncTest, createNTestFolders, createNTestNotes, createNTestTags, TestApp } = require('./test-utils.js'); | ||||
| const { setupDatabaseAndSynchronizer, switchClient, createNTestFolders, createNTestNotes, createNTestTags, TestApp } = require('./test-utils.js'); | ||||
| const Setting = require('@joplin/lib/models/Setting').default; | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| @@ -23,7 +23,7 @@ describe('integration_TagList', function() { | ||||
| 	}); | ||||
|  | ||||
| 	// the tag list should be cleared if the next note has no tags | ||||
| 	it('should clear tag list when a note is deleted', asyncTest(async () => { | ||||
| 	it('should clear tag list when a note is deleted', (async () => { | ||||
| 		// setup and select the note | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
| @@ -54,7 +54,7 @@ describe('integration_TagList', function() { | ||||
| 	})); | ||||
|  | ||||
| 	// the tag list should be updated if the next note has tags | ||||
| 	it('should update tag list when a note is deleted', asyncTest(async () => { | ||||
| 	it('should update tag list when a note is deleted', (async () => { | ||||
| 		// set up and select the note | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
|   | ||||
| @@ -3,15 +3,11 @@ | ||||
|  | ||||
| const uuid = require('@joplin/lib/uuid').default; | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { asyncTest, sleep, fileApi, fileContentEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { sleep, fileApi, fileContentEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const shim = require('@joplin/lib/shim').default; | ||||
| const fs = require('fs-extra'); | ||||
| const Setting = require('@joplin/lib/models/Setting').default; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| const api = null; | ||||
|  | ||||
| // Adding empty test for Jest | ||||
| @@ -42,7 +38,7 @@ it('will pass', () => { | ||||
| // 	}); | ||||
|  | ||||
| // 	describe('list', function() { | ||||
| // 		it('should return items with relative path', asyncTest(async () => { | ||||
| // 		it('should return items with relative path', (async () => { | ||||
| // 			await api.mkdir('.subfolder'); | ||||
| // 			await api.put('1', 'something on root 1'); | ||||
| // 			await api.put('.subfolder/1', 'something subfolder 1'); | ||||
| @@ -57,7 +53,7 @@ it('will pass', () => { | ||||
| // 			expect(items[0].updated_time).toMatch(/^\d+$/); // make sure it's using epoch timestamp | ||||
| // 		})); | ||||
|  | ||||
| // 		it('should default to only files on root directory', asyncTest(async () => { | ||||
| // 		it('should default to only files on root directory', (async () => { | ||||
| // 			await api.mkdir('.subfolder'); | ||||
| // 			await api.put('.subfolder/1', 'something subfolder 1'); | ||||
| // 			await api.put('file1', 'something 1'); | ||||
| @@ -70,12 +66,12 @@ it('will pass', () => { | ||||
| // 	}); // list | ||||
|  | ||||
| // 	describe('delete', function() { | ||||
| // 		it('should not error if file does not exist', asyncTest(async () => { | ||||
| // 		it('should not error if file does not exist', (async () => { | ||||
| // 			const hasThrown = await checkThrowAsync(async () => await api.delete('nonexistant_file')); | ||||
| // 			expect(hasThrown).toBe(false); | ||||
| // 		})); | ||||
|  | ||||
| // 		it('should delete specific file given full path', asyncTest(async () => { | ||||
| // 		it('should delete specific file given full path', (async () => { | ||||
| // 			await api.mkdir('deleteDir'); | ||||
| // 			await api.put('deleteDir/1', 'something 1'); | ||||
| // 			await api.put('deleteDir/2', 'something 2'); | ||||
| @@ -90,19 +86,19 @@ it('will pass', () => { | ||||
| // 	}); // delete | ||||
|  | ||||
| // 	describe('get', function() { | ||||
| // 		it('should return null if object does not exist', asyncTest(async () => { | ||||
| // 		it('should return null if object does not exist', (async () => { | ||||
| // 			const response = await api.get('nonexistant_file'); | ||||
| // 			expect(response).toBe(null); | ||||
| // 		})); | ||||
|  | ||||
| // 		it('should return UTF-8 encoded string by default', asyncTest(async () => { | ||||
| // 		it('should return UTF-8 encoded string by default', (async () => { | ||||
| // 			await api.put('testnote.md', 'something 2'); | ||||
|  | ||||
| // 			const response = await api.get('testnote.md'); | ||||
| // 			expect(response).toBe('something 2'); | ||||
| // 		})); | ||||
|  | ||||
| // 		it('should return a Response object and writes file to options.path, if options.target is "file"', asyncTest(async () => { | ||||
| // 		it('should return a Response object and writes file to options.path, if options.target is "file"', (async () => { | ||||
| // 			const localFilePath = `${Setting.value('tempDir')}/${uuid.create()}.md`; | ||||
| // 			await api.put('testnote.md', 'something 2'); | ||||
| // 			sleep(0.2); | ||||
| @@ -116,7 +112,7 @@ it('will pass', () => { | ||||
| // 	}); // get | ||||
|  | ||||
| // 	describe('put', function() { | ||||
| // 		it('should create file to remote path and content', asyncTest(async () => { | ||||
| // 		it('should create file to remote path and content', (async () => { | ||||
| // 			await api.put('putTest.md', 'I am your content'); | ||||
| // 			sleep(0.2); | ||||
|  | ||||
| @@ -124,7 +120,7 @@ it('will pass', () => { | ||||
| // 			expect(response).toBe('I am your content'); | ||||
| // 		})); | ||||
|  | ||||
| // 		it('should upload file in options.path to remote path, if options.source is "file"', asyncTest(async () => { | ||||
| // 		it('should upload file in options.path to remote path, if options.source is "file"', (async () => { | ||||
| // 			const localFilePath = `${Setting.value('tempDir')}/${uuid.create()}.md`; | ||||
| // 			fs.writeFileSync(localFilePath, 'I am the local file.'); | ||||
|  | ||||
|   | ||||
| @@ -1,20 +1,14 @@ | ||||
| /* eslint-disable no-unused-vars */ | ||||
|  | ||||
|  | ||||
| const { asyncTest } = require('./test-utils.js'); | ||||
| const htmlUtils = require('@joplin/lib/htmlUtils.js'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('htmlUtils', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should extract image URLs', asyncTest(async () => { | ||||
| 	it('should extract image URLs', (async () => { | ||||
| 		const testCases = [ | ||||
| 			['<img src="http://test.com/img.png"/>', ['http://test.com/img.png']], | ||||
| 			['<img src="http://test.com/img.png"/> <img src="http://test.com/img2.png"/>', ['http://test.com/img.png', 'http://test.com/img2.png']], | ||||
| @@ -32,7 +26,7 @@ describe('htmlUtils', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should replace image URLs', asyncTest(async () => { | ||||
| 	it('should replace image URLs', (async () => { | ||||
| 		const testCases = [ | ||||
| 			['<img src="http://test.com/img.png"/>', ['http://other.com/img2.png'], '<img src="http://other.com/img2.png"/>'], | ||||
| 			['<img src="http://test.com/img.png"/> <img src="http://test.com/img2.png"/>', ['http://other.com/img2.png', 'http://other.com/img3.png'], '<img src="http://other.com/img2.png"/> <img src="http://other.com/img3.png"/>'], | ||||
| @@ -55,7 +49,7 @@ describe('htmlUtils', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should encode attributes', asyncTest(async () => { | ||||
| 	it('should encode attributes', (async () => { | ||||
| 		const testCases = [ | ||||
| 			[{ a: 'one', b: 'two' }, 'a="one" b="two"'], | ||||
| 			[{ a: 'one&two' }, 'a="one&two"'], | ||||
| @@ -68,7 +62,7 @@ describe('htmlUtils', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should prepend a base URL', asyncTest(async () => { | ||||
| 	it('should prepend a base URL', (async () => { | ||||
| 		const testCases = [ | ||||
| 			[ | ||||
| 				'<a href="a.html">Something</a>', | ||||
|   | ||||
| @@ -1,20 +1,14 @@ | ||||
| /* eslint-disable no-unused-vars */ | ||||
|  | ||||
|  | ||||
| const { asyncTest } = require('./test-utils.js'); | ||||
| const markdownUtils = require('@joplin/lib/markdownUtils').default; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at markdownUtils: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('markdownUtils', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should prepend a base URL', asyncTest(async () => { | ||||
| 	it('should prepend a base URL', (async () => { | ||||
| 		const baseUrl = 'https://test.com/site'; | ||||
|  | ||||
| 		const testCases = [ | ||||
| @@ -32,7 +26,7 @@ describe('markdownUtils', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should extract image URLs', asyncTest(async () => { | ||||
| 	it('should extract image URLs', (async () => { | ||||
| 		const testCases = [ | ||||
| 			['', ['http://test.com/img.png']], | ||||
| 			[' ', ['http://test.com/img.png', 'http://test.com/img2.png']], | ||||
| @@ -50,7 +44,7 @@ describe('markdownUtils', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('escape a markdown link', asyncTest(async () => { | ||||
| 	it('escape a markdown link', (async () => { | ||||
|  | ||||
| 		const testCases = [ | ||||
| 			['file:///Users/who put spaces in their username??/.config/joplin', 'file:///Users/who%20put%20spaces%20in%20their%20username??/.config/joplin'], | ||||
| @@ -65,7 +59,7 @@ describe('markdownUtils', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('escape a markdown link (title)', asyncTest(async () => { | ||||
| 	it('escape a markdown link (title)', (async () => { | ||||
|  | ||||
| 		const testCases = [ | ||||
| 			['Helmut K. C. Tessarek', 'Helmut K. C. Tessarek'], | ||||
| @@ -80,7 +74,7 @@ describe('markdownUtils', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('replace markdown link with description', asyncTest(async () => { | ||||
| 	it('replace markdown link with description', (async () => { | ||||
|  | ||||
| 		const testCases = [ | ||||
| 			['Test case [one](link)', 'Test case one'], | ||||
|   | ||||
| @@ -2,27 +2,23 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const mimeUtils = require('@joplin/lib/mime-utils.js').mime; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('mimeUils', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should get the file extension from the mime type', asyncTest(async () => { | ||||
| 	it('should get the file extension from the mime type', (async () => { | ||||
| 		expect(mimeUtils.toFileExtension('image/jpeg')).toBe('jpg'); | ||||
| 		expect(mimeUtils.toFileExtension('image/jpg')).toBe('jpg'); | ||||
| 		expect(mimeUtils.toFileExtension('IMAGE/JPG')).toBe('jpg'); | ||||
| 		expect(mimeUtils.toFileExtension('')).toBe(null); | ||||
| 	})); | ||||
|  | ||||
| 	it('should get the mime type from the filename', asyncTest(async () => { | ||||
| 	it('should get the mime type from the filename', (async () => { | ||||
| 		expect(mimeUtils.fromFilename('test.jpg')).toBe('image/jpeg'); | ||||
| 		expect(mimeUtils.fromFilename('test.JPG')).toBe('image/jpeg'); | ||||
| 		expect(mimeUtils.fromFilename('test.doesntexist')).toBe(null); | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const BaseItem = require('@joplin/lib/models/BaseItem.js'); | ||||
| @@ -10,10 +10,6 @@ const Resource = require('@joplin/lib/models/Resource.js'); | ||||
| const BaseModel = require('@joplin/lib/BaseModel').default; | ||||
| const shim = require('@joplin/lib/shim').default; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at models_BaseItem: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| async function allItems() { | ||||
| 	const folders = await Folder.all(); | ||||
| 	const notes = await Note.all(); | ||||
| @@ -30,7 +26,7 @@ describe('models_BaseItem', function() { | ||||
|  | ||||
| 	// This is to handle the case where a property is removed from a BaseItem table - in that case files in | ||||
| 	// the sync target will still have the old property but we don't need it locally. | ||||
| 	it('should ignore properties that are present in sync file but not in database when serialising', asyncTest(async () => { | ||||
| 	it('should ignore properties that are present in sync file but not in database when serialising', (async () => { | ||||
| 		const folder = await Folder.save({ title: 'folder1' }); | ||||
|  | ||||
| 		let serialized = await Folder.serialize(folder); | ||||
| @@ -41,7 +37,7 @@ describe('models_BaseItem', function() { | ||||
| 		expect('ignore_me' in unserialized).toBe(false); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not modify title when unserializing', asyncTest(async () => { | ||||
| 	it('should not modify title when unserializing', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: '' }); | ||||
| 		const folder2 = await Folder.save({ title: 'folder1' }); | ||||
|  | ||||
| @@ -56,7 +52,7 @@ describe('models_BaseItem', function() { | ||||
| 		expect(unserialized2.title).toBe(folder2.title); | ||||
| 	})); | ||||
|  | ||||
| 	it('should correctly unserialize note timestamps', asyncTest(async () => { | ||||
| 	it('should correctly unserialize note timestamps', (async () => { | ||||
| 		const folder = await Folder.save({ title: 'folder' }); | ||||
| 		const note = await Note.save({ title: 'note', parent_id: folder.id }); | ||||
|  | ||||
| @@ -69,7 +65,7 @@ describe('models_BaseItem', function() { | ||||
| 		expect(unserialized.user_updated_time).toEqual(note.user_updated_time); | ||||
| 	})); | ||||
|  | ||||
| 	it('should serialize geolocation fields', asyncTest(async () => { | ||||
| 	it('should serialize geolocation fields', (async () => { | ||||
| 		const folder = await Folder.save({ title: 'folder' }); | ||||
| 		let note = await Note.save({ title: 'note', parent_id: folder.id }); | ||||
| 		note = await Note.load(note.id); | ||||
| @@ -92,7 +88,7 @@ describe('models_BaseItem', function() { | ||||
| 		expect(unserialized.altitude).toEqual(note.altitude); | ||||
| 	})); | ||||
|  | ||||
| 	it('should serialize and unserialize notes', asyncTest(async () => { | ||||
| 	it('should serialize and unserialize notes', (async () => { | ||||
| 		const folder = await Folder.save({ title: 'folder' }); | ||||
| 		const note = await Note.save({ title: 'note', parent_id: folder.id }); | ||||
| 		await Note.updateGeolocation(note.id); | ||||
| @@ -104,7 +100,7 @@ describe('models_BaseItem', function() { | ||||
| 		expect(noteAfter).toEqual(noteBefore); | ||||
| 	})); | ||||
|  | ||||
| 	it('should serialize and unserialize properties that contain new lines', asyncTest(async () => { | ||||
| 	it('should serialize and unserialize properties that contain new lines', (async () => { | ||||
| 		const sourceUrl = ` | ||||
| https://joplinapp.org/ \\n | ||||
| `; | ||||
| @@ -118,7 +114,7 @@ https://joplinapp.org/ \\n | ||||
| 		expect(noteAfter).toEqual(noteBefore); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not serialize the note title and body', asyncTest(async () => { | ||||
| 	it('should not serialize the note title and body', (async () => { | ||||
| 		const note = await Note.save({ title: 'my note', body: `one line | ||||
| two line | ||||
| three line \\n no escape` }); | ||||
|   | ||||
| @@ -1,12 +1,8 @@ | ||||
| import { FolderEntity } from '@joplin/lib/services/database/types'; | ||||
| const { createNTestNotes, asyncTest, setupDatabaseAndSynchronizer, sleep, switchClient, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { createNTestNotes, setupDatabaseAndSynchronizer, sleep, switchClient, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at models_Folder: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| async function allItems() { | ||||
| 	const folders = await Folder.all(); | ||||
| 	const notes = await Note.all(); | ||||
| @@ -21,7 +17,7 @@ describe('models_Folder', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should tell if a notebook can be nested under another one', asyncTest(async () => { | ||||
| 	it('should tell if a notebook can be nested under another one', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id }); | ||||
| 		const f3 = await Folder.save({ title: 'folder3', parent_id: f2.id }); | ||||
| @@ -37,7 +33,7 @@ describe('models_Folder', function() { | ||||
| 		expect(await Folder.canNestUnder(f2.id, '')).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should recursively delete notes and sub-notebooks', asyncTest(async () => { | ||||
| 	it('should recursively delete notes and sub-notebooks', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id }); | ||||
| 		const f3 = await Folder.save({ title: 'folder3', parent_id: f2.id }); | ||||
| @@ -55,7 +51,7 @@ describe('models_Folder', function() { | ||||
| 		expect(all.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should sort by last modified, based on content', asyncTest(async () => { | ||||
| 	it('should sort by last modified, based on content', (async () => { | ||||
| 		let folders; | ||||
|  | ||||
| 		const f1 = await Folder.save({ title: 'folder1' }); await sleep(0.1); | ||||
| @@ -89,7 +85,7 @@ describe('models_Folder', function() { | ||||
| 		expect(folders[2].id).toBe(f2.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should sort by last modified, based on content (sub-folders too)', asyncTest(async () => { | ||||
| 	it('should sort by last modified, based on content (sub-folders too)', (async () => { | ||||
| 		let folders; | ||||
|  | ||||
| 		const f1 = await Folder.save({ title: 'folder1' }); await sleep(0.1); | ||||
| @@ -128,7 +124,7 @@ describe('models_Folder', function() { | ||||
| 		expect(folders[3].id).toBe(f2.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should add node counts', asyncTest(async () => { | ||||
| 	it('should add node counts', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id }); | ||||
| 		const f3 = await Folder.save({ title: 'folder3', parent_id: f2.id }); | ||||
| @@ -163,7 +159,7 @@ describe('models_Folder', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should not count completed to-dos', asyncTest(async () => { | ||||
| 	it('should not count completed to-dos', (async () => { | ||||
|  | ||||
| 		const f1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id }); | ||||
| @@ -190,7 +186,7 @@ describe('models_Folder', function() { | ||||
| 		expect(foldersById[f4.id].note_count).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should recursively find folder path', asyncTest(async () => { | ||||
| 	it('should recursively find folder path', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id }); | ||||
| 		const f3 = await Folder.save({ title: 'folder3', parent_id: f2.id }); | ||||
| @@ -204,7 +200,7 @@ describe('models_Folder', function() { | ||||
| 		expect(folderPath[2].id).toBe(f3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should sort folders alphabetically', asyncTest(async () => { | ||||
| 	it('should sort folders alphabetically', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id }); | ||||
| 		const f3 = await Folder.save({ title: 'folder3', parent_id: f1.id }); | ||||
| @@ -224,7 +220,7 @@ describe('models_Folder', function() { | ||||
| 		expect(sortedFolderTree[2].id).toBe(f6.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not allow setting a notebook parent as itself', asyncTest(async () => { | ||||
| 	it('should not allow setting a notebook parent as itself', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const hasThrown = await checkThrowAsync(() => Folder.save({ id: f1.id, parent_id: f1.id }, { userSideValidation: true })); | ||||
| 		expect(hasThrown).toBe(true); | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { asyncTest, fileContentEqual, revisionService, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, revisionService, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const SearchEngine = require('@joplin/lib/services/searchengine/SearchEngine'); | ||||
| const ResourceService = require('@joplin/lib/services/ResourceService').default; | ||||
| const ItemChangeUtils = require('@joplin/lib/services/ItemChangeUtils'); | ||||
| @@ -10,10 +10,6 @@ const Note = require('@joplin/lib/models/Note'); | ||||
| const Setting = require('@joplin/lib/models/Setting').default; | ||||
| const ItemChange = require('@joplin/lib/models/ItemChange'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| let searchEngine = null; | ||||
|  | ||||
| describe('models_ItemChange', function() { | ||||
| @@ -26,7 +22,7 @@ describe('models_ItemChange', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should delete old changes that have been processed', asyncTest(async () => { | ||||
| 	it('should delete old changes that have been processed', (async () => { | ||||
| 		const n1 = await Note.save({ title: 'abcd efgh' }); // 3 | ||||
|  | ||||
| 		await ItemChange.waitForAllSaved(); | ||||
|   | ||||
| @@ -1,15 +1,11 @@ | ||||
| import Setting from '@joplin/lib/models/Setting'; | ||||
| import BaseModel from '@joplin/lib/BaseModel'; | ||||
| import shim from '@joplin/lib/shim'; | ||||
| const { sortedIds, createNTestNotes, asyncTest, setupDatabaseAndSynchronizer, switchClient, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { sortedIds, createNTestNotes, setupDatabaseAndSynchronizer, switchClient, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const ArrayUtils = require('@joplin/lib/ArrayUtils.js'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at models_Note: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| async function allItems() { | ||||
| 	const folders = await Folder.all(); | ||||
| 	const notes = await Note.all(); | ||||
| @@ -23,7 +19,7 @@ describe('models_Note', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should find resource and note IDs', asyncTest(async () => { | ||||
| 	it('should find resource and note IDs', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		let note2 = await Note.save({ title: 'ma deuxième note', body: `Lien vers première note : ${Note.markdownTag(note1)}`, parent_id: folder1.id }); | ||||
| @@ -47,7 +43,7 @@ describe('models_Note', function() { | ||||
| 		expect(items.length).toBe(4); | ||||
| 	})); | ||||
|  | ||||
| 	it('should find linked items', asyncTest(async () => { | ||||
| 	it('should find linked items', (async () => { | ||||
| 		const testCases = [ | ||||
| 			['[](:/06894e83b8f84d3d8cbe0f1587f9e226)', ['06894e83b8f84d3d8cbe0f1587f9e226']], | ||||
| 			['[](:/06894e83b8f84d3d8cbe0f1587f9e226) [](:/06894e83b8f84d3d8cbe0f1587f9e226)', ['06894e83b8f84d3d8cbe0f1587f9e226']], | ||||
| @@ -69,7 +65,7 @@ describe('models_Note', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should change the type of notes', asyncTest(async () => { | ||||
| 	it('should change the type of notes', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		note1 = await Note.load(note1.id); | ||||
| @@ -90,7 +86,7 @@ describe('models_Note', function() { | ||||
| 		expect(!!changedNote.is_todo).toBe(false); | ||||
| 	})); | ||||
|  | ||||
| 	it('should serialize and unserialize without modifying data', asyncTest(async () => { | ||||
| 	it('should serialize and unserialize without modifying data', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const testCases = [ | ||||
| 			[{ title: '', body: 'Body and no title\nSecond line\nThird Line', parent_id: folder1.id }, | ||||
| @@ -115,7 +111,7 @@ describe('models_Note', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should reset fields for a duplicate', asyncTest(async () => { | ||||
| 	it('should reset fields for a duplicate', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'note', parent_id: folder1.id }); | ||||
|  | ||||
| @@ -128,7 +124,7 @@ describe('models_Note', function() { | ||||
| 		expect(duplicatedNote.user_updated_time !== note1.user_updated_time).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete a set of notes', asyncTest(async () => { | ||||
| 	it('should delete a set of notes', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const noOfNotes = 20; | ||||
| 		await createNTestNotes(noOfNotes, folder1); | ||||
| @@ -141,7 +137,7 @@ describe('models_Note', function() { | ||||
| 		expect(all[0].id).toBe(folder1.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete only the selected notes', asyncTest(async () => { | ||||
| 	it('should delete only the selected notes', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id }); | ||||
|  | ||||
| @@ -171,7 +167,7 @@ describe('models_Note', function() { | ||||
| 		expect(intersection.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete nothing', asyncTest(async () => { | ||||
| 	it('should delete nothing', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id }); | ||||
| 		const f3 = await Folder.save({ title: 'folder3', parent_id: f2.id }); | ||||
| @@ -190,7 +186,7 @@ describe('models_Note', function() { | ||||
| 		expect(sortedIds(afterDelete)).toEqual(sortedIds(beforeDelete)); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not move to conflict folder', asyncTest(async () => { | ||||
| 	it('should not move to conflict folder', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'Folder' }); | ||||
| 		const folder2 = await Folder.save({ title: Folder.conflictFolderTitle(), id: Folder.conflictFolderId() }); | ||||
| 		const note1 = await Note.save({ title: 'note', parent_id: folder1.id }); | ||||
| @@ -202,7 +198,7 @@ describe('models_Note', function() { | ||||
| 		expect(note.parent_id).toEqual(folder1.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not copy to conflict folder', asyncTest(async () => { | ||||
| 	it('should not copy to conflict folder', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'Folder' }); | ||||
| 		const folder2 = await Folder.save({ title: Folder.conflictFolderTitle(), id: Folder.conflictFolderId() }); | ||||
| 		const note1 = await Note.save({ title: 'note', parent_id: folder1.id }); | ||||
| @@ -211,7 +207,7 @@ describe('models_Note', function() { | ||||
| 		expect(hasThrown).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should convert resource paths from internal to external paths', asyncTest(async () => { | ||||
| 	it('should convert resource paths from internal to external paths', (async () => { | ||||
| 		const resourceDirName = Setting.value('resourceDirName'); | ||||
| 		const resourceDir = Setting.value('resourceDir'); | ||||
| 		const r1 = await shim.createResourceFromPath(`${__dirname}/../tests/support/photo.jpg`); | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { sortedIds, createNTestNotes, asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { sortedIds, createNTestNotes, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const Setting = require('@joplin/lib/models/Setting').default; | ||||
| @@ -10,10 +10,6 @@ const BaseModel = require('@joplin/lib/BaseModel').default; | ||||
| const ArrayUtils = require('@joplin/lib/ArrayUtils.js'); | ||||
| const shim = require('@joplin/lib/shim').default; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at models_Note_CustomSortOrder: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| async function allItems() { | ||||
| 	const folders = await Folder.all(); | ||||
| 	const notes = await Note.all(); | ||||
| @@ -27,7 +23,7 @@ describe('models_Note_CustomSortOrder', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should set the order property when saving a note', asyncTest(async () => { | ||||
| 	it('should set the order property when saving a note', (async () => { | ||||
| 		const now = Date.now(); | ||||
| 		const n1 = await Note.save({ title: 'testing' }); | ||||
| 		expect(n1.order).toBeGreaterThanOrEqual(now); | ||||
| @@ -36,7 +32,7 @@ describe('models_Note_CustomSortOrder', function() { | ||||
| 		expect(n2.order).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should insert notes at the specified position (order 0)', asyncTest(async () => { | ||||
| 	it('should insert notes at the specified position (order 0)', (async () => { | ||||
| 		// Notes always had an "order" property, but for a long time it wasn't used, and | ||||
| 		// set to 0. For custom sorting to work though, it needs to be set to some number | ||||
| 		// (which normally is the creation timestamp). So if the user tries to move notes | ||||
| @@ -90,7 +86,7 @@ describe('models_Note_CustomSortOrder', function() { | ||||
| 		expect(sortedNotes[4].id).toBe(notes1[0].id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should insert notes at the specified position (targets with same orders)', asyncTest(async () => { | ||||
| 	it('should insert notes at the specified position (targets with same orders)', (async () => { | ||||
| 		// If the target notes all have the same order, inserting a note should work | ||||
| 		// anyway, because the order of the other notes will be updated as needed. | ||||
|  | ||||
| @@ -115,7 +111,7 @@ describe('models_Note_CustomSortOrder', function() { | ||||
| 		expect(sortedNotes[3].id).toBe(notes[1].id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should insert notes at the specified position (insert at end)', asyncTest(async () => { | ||||
| 	it('should insert notes at the specified position (insert at end)', (async () => { | ||||
| 		const folder1 = await Folder.save({}); | ||||
|  | ||||
| 		const notes = []; | ||||
| @@ -138,7 +134,7 @@ describe('models_Note_CustomSortOrder', function() { | ||||
| 		expect(sortedNotes[3].id).toBe(notes[1].id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should insert notes at the specified position (insert at beginning)', asyncTest(async () => { | ||||
| 	it('should insert notes at the specified position (insert at beginning)', (async () => { | ||||
| 		const folder1 = await Folder.save({}); | ||||
|  | ||||
| 		const notes = []; | ||||
| @@ -161,7 +157,7 @@ describe('models_Note_CustomSortOrder', function() { | ||||
| 		expect(sortedNotes[3].id).toBe(notes[0].id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should insert notes even if sources are not adjacent', asyncTest(async () => { | ||||
| 	it('should insert notes even if sources are not adjacent', (async () => { | ||||
| 		const folder1 = await Folder.save({}); | ||||
|  | ||||
| 		const notes = []; | ||||
|   | ||||
| @@ -2,17 +2,13 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const Resource = require('@joplin/lib/models/Resource.js'); | ||||
| const BaseModel = require('@joplin/lib/BaseModel').default; | ||||
| const shim = require('@joplin/lib/shim').default; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| const testImagePath = `${__dirname}/../tests/support/photo.jpg`; | ||||
|  | ||||
| describe('models_Resource', function() { | ||||
| @@ -23,7 +19,7 @@ describe('models_Resource', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should have a "done" fetch_status when created locally', asyncTest(async () => { | ||||
| 	it('should have a "done" fetch_status when created locally', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await shim.attachFileToNote(note1, testImagePath); | ||||
| @@ -32,7 +28,7 @@ describe('models_Resource', function() { | ||||
| 		expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_DONE); | ||||
| 	})); | ||||
|  | ||||
| 	it('should have a default local state', asyncTest(async () => { | ||||
| 	it('should have a default local state', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await shim.attachFileToNote(note1, testImagePath); | ||||
| @@ -43,7 +39,7 @@ describe('models_Resource', function() { | ||||
| 		expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_DONE); | ||||
| 	})); | ||||
|  | ||||
| 	it('should save and delete local state', asyncTest(async () => { | ||||
| 	it('should save and delete local state', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await shim.attachFileToNote(note1, testImagePath); | ||||
| @@ -59,7 +55,7 @@ describe('models_Resource', function() { | ||||
| 		expect(!ls.id).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should resize the resource if the image is below the required dimensions', asyncTest(async () => { | ||||
| 	it('should resize the resource if the image is below the required dimensions', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		const previousMax = Resource.IMAGE_MAX_DIMENSION; | ||||
| @@ -74,7 +70,7 @@ describe('models_Resource', function() { | ||||
| 		expect(newStat.size < originalStat.size).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not resize the resource if the image is below the required dimensions', asyncTest(async () => { | ||||
| 	it('should not resize the resource if the image is below the required dimensions', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await shim.attachFileToNote(note1, testImagePath); | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const NoteTag = require('@joplin/lib/models/NoteTag.js'); | ||||
| @@ -11,10 +11,6 @@ const Revision = require('@joplin/lib/models/Revision.js'); | ||||
| const BaseModel = require('@joplin/lib/BaseModel').default; | ||||
| const shim = require('@joplin/lib/shim').default; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('models_Revision', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| @@ -23,7 +19,7 @@ describe('models_Revision', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should create patches of text and apply it', asyncTest(async () => { | ||||
| 	it('should create patches of text and apply it', (async () => { | ||||
| 		const note1 = await Note.save({ body: 'my note\nsecond line' }); | ||||
|  | ||||
| 		const patch = Revision.createTextPatch(note1.body, 'my new note\nsecond line'); | ||||
| @@ -32,7 +28,7 @@ describe('models_Revision', function() { | ||||
| 		expect(merged).toBe('my new note\nsecond line'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create patches of objects and apply it', asyncTest(async () => { | ||||
| 	it('should create patches of objects and apply it', (async () => { | ||||
| 		const oldObject = { | ||||
| 			one: '123', | ||||
| 			two: '456', | ||||
| @@ -50,7 +46,7 @@ describe('models_Revision', function() { | ||||
| 		expect(JSON.stringify(merged)).toBe(JSON.stringify(newObject)); | ||||
| 	})); | ||||
|  | ||||
| 	it('should move target revision to the top', asyncTest(async () => { | ||||
| 	it('should move target revision to the top', (async () => { | ||||
| 		const revs = [ | ||||
| 			{ id: '123' }, | ||||
| 			{ id: '456' }, | ||||
| @@ -69,7 +65,7 @@ describe('models_Revision', function() { | ||||
| 		expect(newRevs[2].id).toBe('789'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create patch stats', asyncTest(async () => { | ||||
| 	it('should create patch stats', (async () => { | ||||
| 		const tests = [ | ||||
| 			{ | ||||
| 				patch: `@@ -625,16 +625,48 @@ | ||||
|   | ||||
| @@ -1,10 +1,6 @@ | ||||
| import Setting from '@joplin/lib/models/Setting'; | ||||
|  | ||||
| const { asyncTest, setupDatabaseAndSynchronizer, switchClient, expectThrow, expectNotThrow } = require('./test-utils.js'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at models_Setting: Promise', p, 'reason:', reason); | ||||
| }); | ||||
| const { setupDatabaseAndSynchronizer, switchClient, expectThrow, expectNotThrow } = require('./test-utils.js'); | ||||
|  | ||||
| describe('models_Setting', function() { | ||||
|  | ||||
| @@ -14,7 +10,7 @@ describe('models_Setting', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should return only sub-values', asyncTest(async () => { | ||||
| 	it('should return only sub-values', (async () => { | ||||
| 		const settings = { | ||||
| 			'sync.5.path': 'http://example.com', | ||||
| 			'sync.5.username': 'testing', | ||||
| @@ -29,7 +25,7 @@ describe('models_Setting', function() { | ||||
| 		expect('username' in output).toBe(false); | ||||
| 	})); | ||||
|  | ||||
| 	it('should allow registering new settings dynamically', asyncTest(async () => { | ||||
| 	it('should allow registering new settings dynamically', (async () => { | ||||
| 		await expectThrow(async () => Setting.setValue('myCustom', '123')); | ||||
|  | ||||
| 		await Setting.registerSetting('myCustom', { | ||||
| @@ -43,7 +39,7 @@ describe('models_Setting', function() { | ||||
| 		expect(Setting.value('myCustom')).toBe('123'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not clear old custom settings', asyncTest(async () => { | ||||
| 	it('should not clear old custom settings', (async () => { | ||||
| 		// In general the following should work: | ||||
| 		// | ||||
| 		// - Plugin register a new setting | ||||
| @@ -85,7 +81,7 @@ describe('models_Setting', function() { | ||||
| 		expect(Setting.value('myCustom')).toBe('123'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return values with correct type for custom settings', asyncTest(async () => { | ||||
| 	it('should return values with correct type for custom settings', (async () => { | ||||
| 		await Setting.registerSetting('myCustom', { | ||||
| 			public: true, | ||||
| 			value: 123, | ||||
| @@ -108,7 +104,7 @@ describe('models_Setting', function() { | ||||
| 		expect(Setting.value('myCustom')).toBe(456); | ||||
| 	})); | ||||
|  | ||||
| 	it('should validate registered keys', asyncTest(async () => { | ||||
| 	it('should validate registered keys', (async () => { | ||||
| 		const md = { | ||||
| 			public: true, | ||||
| 			value: 'default', | ||||
| @@ -124,7 +120,7 @@ describe('models_Setting', function() { | ||||
| 		await expectNotThrow(async () => await Setting.registerSetting('so-ARE-dashes_123', md)); | ||||
| 	})); | ||||
|  | ||||
| 	it('should register new sections', asyncTest(async () => { | ||||
| 	it('should register new sections', (async () => { | ||||
| 		await Setting.registerSection('mySection', { | ||||
| 			label: 'My section', | ||||
| 		}); | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const NoteTag = require('@joplin/lib/models/NoteTag.js'); | ||||
| @@ -10,10 +10,6 @@ const Tag = require('@joplin/lib/models/Tag.js'); | ||||
| const BaseModel = require('@joplin/lib/BaseModel').default; | ||||
| const shim = require('@joplin/lib/shim').default; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at models_Tag: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('models_Tag', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| @@ -22,7 +18,7 @@ describe('models_Tag', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should add tags by title', asyncTest(async () => { | ||||
| 	it('should add tags by title', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
|  | ||||
| @@ -32,7 +28,7 @@ describe('models_Tag', function() { | ||||
| 		expect(noteTags.length).toBe(2); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not allow renaming tag to existing tag names', asyncTest(async () => { | ||||
| 	it('should not allow renaming tag to existing tag names', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
|  | ||||
| @@ -44,7 +40,7 @@ describe('models_Tag', function() { | ||||
| 		expect(hasThrown).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not return tags without notes', asyncTest(async () => { | ||||
| 	it('should not return tags without notes', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		await Tag.setNoteTagsByTitles(note1.id, ['un']); | ||||
| @@ -58,7 +54,7 @@ describe('models_Tag', function() { | ||||
| 		expect(tags.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return tags with note counts', asyncTest(async () => { | ||||
| 	it('should return tags with note counts', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		const note2 = await Note.save({ title: 'ma 2nd note', parent_id: folder1.id }); | ||||
| @@ -81,7 +77,7 @@ describe('models_Tag', function() { | ||||
| 		expect(tags.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should load individual tags with note count', asyncTest(async () => { | ||||
| 	it('should load individual tags with note count', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| 		const note2 = await Note.save({ title: 'ma 2nd note', parent_id: folder1.id }); | ||||
| @@ -96,7 +92,7 @@ describe('models_Tag', function() { | ||||
| 		expect(tagWithCount.note_count).toBe(2); | ||||
| 	})); | ||||
|  | ||||
| 	it('should get common tags for set of notes', asyncTest(async () => { | ||||
| 	it('should get common tags for set of notes', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const taga = await Tag.save({ title: 'mytaga' }); | ||||
| 		const tagb = await Tag.save({ title: 'mytagb' }); | ||||
|   | ||||
| @@ -2,11 +2,7 @@ | ||||
|  | ||||
|  | ||||
| const { extractExecutablePath, quotePath, unquotePath, friendlySafeFilename, toFileProtocolPath } = require('@joplin/lib/path-utils'); | ||||
| const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
|  | ||||
| describe('pathUtils', function() { | ||||
|  | ||||
| @@ -14,7 +10,7 @@ describe('pathUtils', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should create friendly safe filename', asyncTest(async () => { | ||||
| 	it('should create friendly safe filename', (async () => { | ||||
| 		const testCases = [ | ||||
| 			['生活', '生活'], | ||||
| 			['not/good', 'not_good'], | ||||
| @@ -35,7 +31,7 @@ describe('pathUtils', function() { | ||||
| 		expect(!!friendlySafeFilename('...')).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should quote and unquote paths', asyncTest(async () => { | ||||
| 	it('should quote and unquote paths', (async () => { | ||||
| 		const testCases = [ | ||||
| 			['', ''], | ||||
| 			['/my/path', '/my/path'], | ||||
| @@ -52,7 +48,7 @@ describe('pathUtils', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should extract executable path from command', asyncTest(async () => { | ||||
| 	it('should extract executable path from command', (async () => { | ||||
| 		const testCases = [ | ||||
| 			['', ''], | ||||
| 			['/my/cmd -some -args', '/my/cmd'], | ||||
| @@ -68,7 +64,7 @@ describe('pathUtils', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should create correct fileURL syntax', asyncTest(async () => { | ||||
| 	it('should create correct fileURL syntax', (async () => { | ||||
| 		const testCases_win32 = [ | ||||
| 			['C:\\handle\\space test', 'file:///C:/handle/space+test'], | ||||
| 			['C:\\escapeplus\\+', 'file:///C:/escapeplus/%2B'], | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| /* eslint-disable no-unused-vars */ | ||||
|  | ||||
| const { setupDatabaseAndSynchronizer, switchClient, asyncTest, createNTestNotes, createNTestFolders, createNTestTags } = require('./test-utils.js'); | ||||
| const { setupDatabaseAndSynchronizer, switchClient, createNTestNotes, createNTestFolders, createNTestTags } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const Tag = require('@joplin/lib/models/Tag.js'); | ||||
| @@ -101,7 +101,7 @@ describe('reducer', function() { | ||||
| 	}); | ||||
|  | ||||
| 	// tests for NOTE_DELETE | ||||
| 	it('should delete selected note', asyncTest(async () => { | ||||
| 	it('should delete selected note', (async () => { | ||||
| 		// create 1 folder | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		// create 5 notes | ||||
| @@ -122,7 +122,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual(expected.selectedIds); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete selected note at top', asyncTest(async () => { | ||||
| 	it('should delete selected note at top', (async () => { | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
| 		let state = initTestState(folders, 0, notes, [1]); | ||||
| @@ -136,7 +136,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual(expected.selectedIds); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete last remaining note', asyncTest(async () => { | ||||
| 	it('should delete last remaining note', (async () => { | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		const notes = await createNTestNotes(1, folders[0]); | ||||
| 		let state = initTestState(folders, 0, notes, [0]); | ||||
| @@ -150,7 +150,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual(expected.selectedIds); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete selected note at bottom', asyncTest(async () => { | ||||
| 	it('should delete selected note at bottom', (async () => { | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
| 		let state = initTestState(folders, 0, notes, [4]); | ||||
| @@ -164,7 +164,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual(expected.selectedIds); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete note when a note below is selected', asyncTest(async () => { | ||||
| 	it('should delete note when a note below is selected', (async () => { | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
| 		let state = initTestState(folders, 0, notes, [3]); | ||||
| @@ -178,7 +178,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual(expected.selectedIds); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete note when a note above is selected', asyncTest(async () => { | ||||
| 	it('should delete note when a note above is selected', (async () => { | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
| 		let state = initTestState(folders, 0, notes, [1]); | ||||
| @@ -192,7 +192,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual(expected.selectedIds); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete selected notes', asyncTest(async () => { | ||||
| 	it('should delete selected notes', (async () => { | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
| 		let state = initTestState(folders, 0, notes, [1,2]); | ||||
| @@ -207,7 +207,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual(expected.selectedIds); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete note when a notes below it are selected', asyncTest(async () => { | ||||
| 	it('should delete note when a notes below it are selected', (async () => { | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
| 		let state = initTestState(folders, 0, notes, [3,4]); | ||||
| @@ -221,7 +221,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual(expected.selectedIds); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete note when a notes above it are selected', asyncTest(async () => { | ||||
| 	it('should delete note when a notes above it are selected', (async () => { | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
| 		let state = initTestState(folders, 0, notes, [1,2]); | ||||
| @@ -235,7 +235,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual(expected.selectedIds); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete notes at end', asyncTest(async () => { | ||||
| 	it('should delete notes at end', (async () => { | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
| 		let state = initTestState(folders, 0, notes, [3,4]); | ||||
| @@ -250,7 +250,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual(expected.selectedIds); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete notes when non-contiguous selection', asyncTest(async () => { | ||||
| 	it('should delete notes when non-contiguous selection', (async () => { | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
| 		let state = initTestState(folders, 0, notes, [0,2,4]); | ||||
| @@ -267,7 +267,7 @@ describe('reducer', function() { | ||||
| 	})); | ||||
|  | ||||
| 	// tests for FOLDER_DELETE | ||||
| 	it('should delete selected notebook', asyncTest(async () => { | ||||
| 	it('should delete selected notebook', (async () => { | ||||
| 		const folders = await createNTestFolders(5); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
| 		let state = initTestState(folders, 2, notes, [2]); | ||||
| @@ -281,7 +281,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedFolderId).toEqual(expected.selectedIds[0]); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete notebook when a book above is selected', asyncTest(async () => { | ||||
| 	it('should delete notebook when a book above is selected', (async () => { | ||||
| 		const folders = await createNTestFolders(5); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
| 		let state = initTestState(folders, 1, notes, [2]); | ||||
| @@ -295,7 +295,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedFolderId).toEqual(expected.selectedIds[0]); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete notebook when a book below is selected', asyncTest(async () => { | ||||
| 	it('should delete notebook when a book below is selected', (async () => { | ||||
| 		const folders = await createNTestFolders(5); | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
| 		let state = initTestState(folders, 4, notes, [2]); | ||||
| @@ -310,7 +310,7 @@ describe('reducer', function() { | ||||
| 	})); | ||||
|  | ||||
| 	// tests for TAG_DELETE | ||||
| 	it('should delete selected tag', asyncTest(async () => { | ||||
| 	it('should delete selected tag', (async () => { | ||||
| 		const tags = await createNTestTags(5); | ||||
| 		let state = initTestState(null, null, null, null, tags, [2]); | ||||
|  | ||||
| @@ -323,7 +323,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedTagId).toEqual(expected.selectedIds[0]); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete tag when a tag above is selected', asyncTest(async () => { | ||||
| 	it('should delete tag when a tag above is selected', (async () => { | ||||
| 		const tags = await createNTestTags(5); | ||||
| 		let state = initTestState(null, null, null, null, tags, [2]); | ||||
|  | ||||
| @@ -336,7 +336,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedTagId).toEqual(expected.selectedIds[0]); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete tag when a tag below is selected', asyncTest(async () => { | ||||
| 	it('should delete tag when a tag below is selected', (async () => { | ||||
| 		const tags = await createNTestTags(5); | ||||
| 		let state = initTestState(null, null, null, null, tags, [2]); | ||||
|  | ||||
| @@ -349,7 +349,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedTagId).toEqual(expected.selectedIds[0]); | ||||
| 	})); | ||||
|  | ||||
| 	it('should select all notes', asyncTest(async () => { | ||||
| 	it('should select all notes', (async () => { | ||||
| 		const folders = await createNTestFolders(2); | ||||
| 		const notes = []; | ||||
| 		for (let i = 0; i < folders.length; i++) { | ||||
| @@ -372,7 +372,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual(expected.selectedIds); | ||||
| 	})); | ||||
|  | ||||
| 	it('should remove deleted note from history', asyncTest(async () => { | ||||
| 	it('should remove deleted note from history', (async () => { | ||||
|  | ||||
| 		// create 1 folder | ||||
| 		const folders = await createNTestFolders(1); | ||||
| @@ -399,7 +399,7 @@ describe('reducer', function() { | ||||
| 		expect(getIds(state.backwardHistoryNotes)).not.toContain(notes[2].id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should remove all notes of a deleted notebook from history', asyncTest(async () => { | ||||
| 	it('should remove all notes of a deleted notebook from history', (async () => { | ||||
| 		const folders = await createNTestFolders(2); | ||||
| 		const notes = []; | ||||
| 		for (let i = 0; i < folders.length; i++) { | ||||
| @@ -421,7 +421,7 @@ describe('reducer', function() { | ||||
| 		expect(getIds(state.backwardHistoryNotes)).toEqual([]); | ||||
| 	})); | ||||
|  | ||||
| 	it('should maintain history correctly when going backward and forward', asyncTest(async () => { | ||||
| 	it('should maintain history correctly when going backward and forward', (async () => { | ||||
| 		const folders = await createNTestFolders(2); | ||||
| 		const notes = []; | ||||
| 		for (let i = 0; i < folders.length; i++) { | ||||
| @@ -454,7 +454,7 @@ describe('reducer', function() { | ||||
| 		expect(getIds(state.forwardHistoryNotes)).toEqual([]); | ||||
| 	})); | ||||
|  | ||||
| 	it('should remember the last seen note of a notebook', asyncTest(async () => { | ||||
| 	it('should remember the last seen note of a notebook', (async () => { | ||||
| 		const folders = await createNTestFolders(2); | ||||
| 		const notes = []; | ||||
| 		for (let i = 0; i < folders.length; i++) { | ||||
| @@ -483,7 +483,7 @@ describe('reducer', function() { | ||||
|  | ||||
| 	})); | ||||
|  | ||||
| 	it('should ensure that history is free of adjacent duplicates', asyncTest(async () => { | ||||
| 	it('should ensure that history is free of adjacent duplicates', (async () => { | ||||
| 		// create 1 folder | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		// create 5 notes | ||||
| @@ -552,7 +552,7 @@ describe('reducer', function() { | ||||
| 		expect(state.selectedNoteIds).toEqual([notes[3].id]); | ||||
| 	})); | ||||
|  | ||||
| 	it('should ensure history max limit is maintained', asyncTest(async () => { | ||||
| 	it('should ensure history max limit is maintained', (async () => { | ||||
| 		const folders = await createNTestFolders(1); | ||||
| 		// create 5 notes | ||||
| 		const notes = await createNTestNotes(5, folders[0]); | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| import sandboxProxy, { Target } from '@joplin/lib/services/plugins/sandboxProxy'; | ||||
|  | ||||
| const { asyncTest, setupDatabaseAndSynchronizer, switchClient } = require('../../test-utils.js'); | ||||
| const { setupDatabaseAndSynchronizer, switchClient } = require('../../test-utils.js'); | ||||
|  | ||||
| describe('services_plugins_sandboxProxy', function() { | ||||
|  | ||||
| @@ -10,7 +10,7 @@ describe('services_plugins_sandboxProxy', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should create a new sandbox proxy', asyncTest(async () => { | ||||
| 	it('should create a new sandbox proxy', (async () => { | ||||
| 		interface Result { | ||||
| 			path: string; | ||||
| 			args: any[]; | ||||
| @@ -33,7 +33,7 @@ describe('services_plugins_sandboxProxy', function() { | ||||
| 		expect(results[1].args.join('_')).toBe(''); | ||||
| 	})); | ||||
|  | ||||
| 	it('should allow importing a namespace', asyncTest(async () => { | ||||
| 	it('should allow importing a namespace', (async () => { | ||||
| 		interface Result { | ||||
| 			path: string; | ||||
| 			args: any[]; | ||||
|   | ||||
| @@ -4,7 +4,7 @@ import CommandService, { CommandDeclaration, CommandRuntime } from '@joplin/lib/ | ||||
| import stateToWhenClauseContext from '@joplin/lib/services/commands/stateToWhenClauseContext'; | ||||
| import KeymapService from '@joplin/lib/services/KeymapService'; | ||||
|  | ||||
| const { asyncTest, setupDatabaseAndSynchronizer, switchClient, expectThrow, expectNotThrow } = require('./test-utils.js'); | ||||
| const { setupDatabaseAndSynchronizer, switchClient, expectThrow, expectNotThrow } = require('./test-utils.js'); | ||||
|  | ||||
| interface TestCommand { | ||||
| 	declaration: CommandDeclaration; | ||||
| @@ -52,7 +52,7 @@ describe('services_CommandService', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should create toolbar button infos from commands', asyncTest(async () => { | ||||
| 	it('should create toolbar button infos from commands', (async () => { | ||||
| 		const service = newService(); | ||||
| 		const toolbarButtonUtils = new ToolbarButtonUtils(service); | ||||
|  | ||||
| @@ -80,7 +80,7 @@ describe('services_CommandService', function() { | ||||
| 		expect(toolbarInfos[1].enabled).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should enable and disable toolbar buttons depending on state', asyncTest(async () => { | ||||
| 	it('should enable and disable toolbar buttons depending on state', (async () => { | ||||
| 		const service = newService(); | ||||
| 		const toolbarButtonUtils = new ToolbarButtonUtils(service); | ||||
|  | ||||
| @@ -103,7 +103,7 @@ describe('services_CommandService', function() { | ||||
| 		expect(toolbarInfos[1].enabled).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should enable commands by default', asyncTest(async () => { | ||||
| 	it('should enable commands by default', (async () => { | ||||
| 		const service = newService(); | ||||
|  | ||||
| 		registerCommand(service, createCommand('test1', { | ||||
| @@ -113,7 +113,7 @@ describe('services_CommandService', function() { | ||||
| 		expect(service.isEnabled('test1', {})).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return the same toolbarButtons array if nothing has changed', asyncTest(async () => { | ||||
| 	it('should return the same toolbarButtons array if nothing has changed', (async () => { | ||||
| 		const service = newService(); | ||||
| 		const toolbarButtonUtils = new ToolbarButtonUtils(service); | ||||
|  | ||||
| @@ -161,7 +161,7 @@ describe('services_CommandService', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should create menu items from commands', asyncTest(async () => { | ||||
| 	it('should create menu items from commands', (async () => { | ||||
| 		const service = newService(); | ||||
| 		const utils = new MenuUtils(service); | ||||
|  | ||||
| @@ -190,7 +190,7 @@ describe('services_CommandService', function() { | ||||
| 		expect(utils.commandsToMenuItems(['test1', 'test2'], onClick)).toBe(utils.commandsToMenuItems(['test1', 'test2'], onClick)); | ||||
| 	})); | ||||
|  | ||||
| 	it('should give menu item props from state', asyncTest(async () => { | ||||
| 	it('should give menu item props from state', (async () => { | ||||
| 		const service = newService(); | ||||
| 		const utils = new MenuUtils(service); | ||||
|  | ||||
| @@ -228,7 +228,7 @@ describe('services_CommandService', function() { | ||||
| 			.toBe(utils.commandsToMenuItemProps(['test1', 'test2'], { cond1: true, cond2: true })); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create stateful menu items', asyncTest(async () => { | ||||
| 	it('should create stateful menu items', (async () => { | ||||
| 		const service = newService(); | ||||
| 		const utils = new MenuUtils(service); | ||||
|  | ||||
| @@ -246,7 +246,7 @@ describe('services_CommandService', function() { | ||||
| 		expect(propValue).toBe('hello'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should throw an error for invalid when clause keys in dev mode', asyncTest(async () => { | ||||
| 	it('should throw an error for invalid when clause keys in dev mode', (async () => { | ||||
| 		const service = newService(); | ||||
|  | ||||
| 		registerCommand(service, createCommand('test1', { | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const Tag = require('@joplin/lib/models/Tag.js'); | ||||
| @@ -14,10 +14,6 @@ const MasterKey = require('@joplin/lib/models/MasterKey'); | ||||
| const SyncTargetRegistry = require('@joplin/lib/SyncTargetRegistry.js'); | ||||
| const EncryptionService = require('@joplin/lib/services/EncryptionService.js'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at services_EncryptionService: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| let service = null; | ||||
|  | ||||
| describe('services_EncryptionService', function() { | ||||
| @@ -31,7 +27,7 @@ describe('services_EncryptionService', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should encode and decode header', asyncTest(async () => { | ||||
| 	it('should encode and decode header', (async () => { | ||||
| 		const header = { | ||||
| 			encryptionMethod: EncryptionService.METHOD_SJCL, | ||||
| 			masterKeyId: '01234568abcdefgh01234568abcdefgh', | ||||
| @@ -44,7 +40,7 @@ describe('services_EncryptionService', function() { | ||||
| 		expect(objectsEqual(header, decodedHeader)).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should generate and decrypt a master key', asyncTest(async () => { | ||||
| 	it('should generate and decrypt a master key', (async () => { | ||||
| 		const masterKey = await service.generateMasterKey('123456'); | ||||
| 		expect(!!masterKey.content).toBe(true); | ||||
|  | ||||
| @@ -61,7 +57,7 @@ describe('services_EncryptionService', function() { | ||||
| 		expect(decryptedMasterKey.length).toBe(512); | ||||
| 	})); | ||||
|  | ||||
| 	it('should upgrade a master key', asyncTest(async () => { | ||||
| 	it('should upgrade a master key', (async () => { | ||||
| 		// Create an old style master key | ||||
| 		let masterKey = await service.generateMasterKey('123456', { | ||||
| 			encryptionMethod: EncryptionService.METHOD_SJCL_2, | ||||
| @@ -90,7 +86,7 @@ describe('services_EncryptionService', function() { | ||||
| 		expect(plainTextFromOld).toBe(plainTextFromNew); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not upgrade master key if invalid password', asyncTest(async () => { | ||||
| 	it('should not upgrade master key if invalid password', (async () => { | ||||
| 		const masterKey = await service.generateMasterKey('123456', { | ||||
| 			encryptionMethod: EncryptionService.METHOD_SJCL_2, | ||||
| 		}); | ||||
| @@ -98,7 +94,7 @@ describe('services_EncryptionService', function() { | ||||
| 		const hasThrown = await checkThrowAsync(async () => await service.upgradeMasterKey(masterKey, '777')); | ||||
| 	})); | ||||
|  | ||||
| 	it('should require a checksum only for old master keys', asyncTest(async () => { | ||||
| 	it('should require a checksum only for old master keys', (async () => { | ||||
| 		const masterKey = await service.generateMasterKey('123456', { | ||||
| 			encryptionMethod: EncryptionService.METHOD_SJCL_2, | ||||
| 		}); | ||||
| @@ -107,7 +103,7 @@ describe('services_EncryptionService', function() { | ||||
| 		expect(!!masterKey.content).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not require a checksum for new master keys', asyncTest(async () => { | ||||
| 	it('should not require a checksum for new master keys', (async () => { | ||||
| 		const masterKey = await service.generateMasterKey('123456', { | ||||
| 			encryptionMethod: EncryptionService.METHOD_SJCL_4, | ||||
| 		}); | ||||
| @@ -119,7 +115,7 @@ describe('services_EncryptionService', function() { | ||||
| 		expect(decryptedMasterKey.length).toBe(512); | ||||
| 	})); | ||||
|  | ||||
| 	it('should throw an error if master key decryption fails', asyncTest(async () => { | ||||
| 	it('should throw an error if master key decryption fails', (async () => { | ||||
| 		const masterKey = await service.generateMasterKey('123456', { | ||||
| 			encryptionMethod: EncryptionService.METHOD_SJCL_4, | ||||
| 		}); | ||||
| @@ -129,7 +125,7 @@ describe('services_EncryptionService', function() { | ||||
| 		expect(hasThrown).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return the master keys that need an upgrade', asyncTest(async () => { | ||||
| 	it('should return the master keys that need an upgrade', (async () => { | ||||
| 		const masterKey1 = await MasterKey.save(await service.generateMasterKey('123456', { | ||||
| 			encryptionMethod: EncryptionService.METHOD_SJCL_2, | ||||
| 		})); | ||||
| @@ -146,7 +142,7 @@ describe('services_EncryptionService', function() { | ||||
| 		expect(needUpgrade.map(k => k.id).sort()).toEqual([masterKey1.id, masterKey2.id].sort()); | ||||
| 	})); | ||||
|  | ||||
| 	it('should encrypt and decrypt with a master key', asyncTest(async () => { | ||||
| 	it('should encrypt and decrypt with a master key', (async () => { | ||||
| 		let masterKey = await service.generateMasterKey('123456'); | ||||
| 		masterKey = await MasterKey.save(masterKey); | ||||
|  | ||||
| @@ -168,7 +164,7 @@ describe('services_EncryptionService', function() { | ||||
| 		expect(plainText2 === veryLongSecret).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should decrypt various encryption methods', asyncTest(async () => { | ||||
| 	it('should decrypt various encryption methods', (async () => { | ||||
| 		let masterKey = await service.generateMasterKey('123456'); | ||||
| 		masterKey = await MasterKey.save(masterKey); | ||||
| 		await service.loadMasterKey_(masterKey, '123456', true); | ||||
| @@ -194,7 +190,7 @@ describe('services_EncryptionService', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should fail to decrypt if master key not present', asyncTest(async () => { | ||||
| 	it('should fail to decrypt if master key not present', (async () => { | ||||
| 		let masterKey = await service.generateMasterKey('123456'); | ||||
| 		masterKey = await MasterKey.save(masterKey); | ||||
|  | ||||
| @@ -210,7 +206,7 @@ describe('services_EncryptionService', function() { | ||||
| 	})); | ||||
|  | ||||
|  | ||||
| 	it('should fail to decrypt if data tampered with', asyncTest(async () => { | ||||
| 	it('should fail to decrypt if data tampered with', (async () => { | ||||
| 		let masterKey = await service.generateMasterKey('123456'); | ||||
| 		masterKey = await MasterKey.save(masterKey); | ||||
|  | ||||
| @@ -224,7 +220,7 @@ describe('services_EncryptionService', function() { | ||||
| 		expect(hasThrown).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should encrypt and decrypt notes and folders', asyncTest(async () => { | ||||
| 	it('should encrypt and decrypt notes and folders', (async () => { | ||||
| 		let masterKey = await service.generateMasterKey('123456'); | ||||
| 		masterKey = await MasterKey.save(masterKey); | ||||
| 		await service.loadMasterKey_(masterKey, '123456', true); | ||||
| @@ -255,7 +251,7 @@ describe('services_EncryptionService', function() { | ||||
| 		expect(decryptedNote.parent_id).toBe(note.parent_id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should encrypt and decrypt files', asyncTest(async () => { | ||||
| 	it('should encrypt and decrypt files', (async () => { | ||||
| 		let masterKey = await service.generateMasterKey('123456'); | ||||
| 		masterKey = await MasterKey.save(masterKey); | ||||
| 		await service.loadMasterKey_(masterKey, '123456', true); | ||||
| @@ -271,7 +267,7 @@ describe('services_EncryptionService', function() { | ||||
| 		expect(fileContentEqual(sourcePath, decryptedPath)).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should encrypt invalid UTF-8 data', asyncTest(async () => { | ||||
| 	it('should encrypt invalid UTF-8 data', (async () => { | ||||
| 		let masterKey = await service.generateMasterKey('123456'); | ||||
| 		masterKey = await MasterKey.save(masterKey); | ||||
|  | ||||
|   | ||||
| @@ -3,7 +3,7 @@ import { CustomExportContext, CustomImportContext, Module, ModuleType } from '@j | ||||
| import shim from '@joplin/lib/shim'; | ||||
|  | ||||
|  | ||||
| const { asyncTest, fileContentEqual, setupDatabaseAndSynchronizer, switchClient, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabaseAndSynchronizer, switchClient, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const Tag = require('@joplin/lib/models/Tag.js'); | ||||
| @@ -11,10 +11,6 @@ const Resource = require('@joplin/lib/models/Resource.js'); | ||||
| const fs = require('fs-extra'); | ||||
| const ArrayUtils = require('@joplin/lib/ArrayUtils'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at services_InteropService: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| function exportDir() { | ||||
| 	return `${__dirname}/export`; | ||||
| } | ||||
| @@ -41,7 +37,7 @@ describe('services_InteropService', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should export and import folders', asyncTest(async () => { | ||||
| 	it('should export and import folders', (async () => { | ||||
| 		const service = InteropService.instance(); | ||||
| 		let folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		folder1 = await Folder.load(folder1.id); | ||||
| @@ -76,7 +72,7 @@ describe('services_InteropService', function() { | ||||
| 		fieldsEqual(folder3, folder1, fieldNames); | ||||
| 	})); | ||||
|  | ||||
| 	it('should import folders and de-duplicate titles when needed', asyncTest(async () => { | ||||
| 	it('should import folders and de-duplicate titles when needed', (async () => { | ||||
| 		const service = InteropService.instance(); | ||||
| 		const folder1 = await Folder.save({ title: 'folder' }); | ||||
| 		const folder2 = await Folder.save({ title: 'folder' }); | ||||
| @@ -92,7 +88,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(allFolders.map((f: any) => f.title).sort().join(' - ')).toBe('folder - folder (1)'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should import folders, and only de-duplicate titles when needed', asyncTest(async () => { | ||||
| 	it('should import folders, and only de-duplicate titles when needed', (async () => { | ||||
| 		const service = InteropService.instance(); | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const folder2 = await Folder.save({ title: 'folder2' }); | ||||
| @@ -115,7 +111,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(importedSub2.title).toBe('Sub'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should export and import folders and notes', asyncTest(async () => { | ||||
| 	it('should export and import folders and notes', (async () => { | ||||
| 		const service = InteropService.instance(); | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| @@ -154,7 +150,7 @@ describe('services_InteropService', function() { | ||||
| 		fieldsEqual(note2, note3, fieldNames); | ||||
| 	})); | ||||
|  | ||||
| 	it('should export and import notes to specific folder', asyncTest(async () => { | ||||
| 	it('should export and import notes to specific folder', (async () => { | ||||
| 		const service = InteropService.instance(); | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id }); | ||||
| @@ -173,7 +169,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(await checkThrowAsync(async () => await service.import({ path: filePath, destinationFolderId: 'oops' }))).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should export and import tags', asyncTest(async () => { | ||||
| 	it('should export and import tags', (async () => { | ||||
| 		const service = InteropService.instance(); | ||||
| 		const filePath = `${exportDir()}/test.jex`; | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| @@ -213,7 +209,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(noteIds.length).toBe(2); | ||||
| 	})); | ||||
|  | ||||
| 	it('should export and import resources', asyncTest(async () => { | ||||
| 	it('should export and import resources', (async () => { | ||||
| 		const service = InteropService.instance(); | ||||
| 		const filePath = `${exportDir()}/test.jex`; | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| @@ -249,7 +245,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(fileContentEqual(resourcePath1, resourcePath2)).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should export and import single notes', asyncTest(async () => { | ||||
| 	it('should export and import single notes', (async () => { | ||||
| 		const service = InteropService.instance(); | ||||
| 		const filePath = `${exportDir()}/test.jex`; | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| @@ -269,7 +265,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(folder2.title).toBe('test'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should export and import single folders', asyncTest(async () => { | ||||
| 	it('should export and import single folders', (async () => { | ||||
| 		const service = InteropService.instance(); | ||||
| 		const filePath = `${exportDir()}/test.jex`; | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| @@ -289,7 +285,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(folder2.title).toBe('folder1'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should export and import folder and its sub-folders', asyncTest(async () => { | ||||
| 	it('should export and import folder and its sub-folders', (async () => { | ||||
|  | ||||
| 		const service = InteropService.instance(); | ||||
| 		const filePath = `${exportDir()}/test.jex`; | ||||
| @@ -324,7 +320,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(note1_2.parent_id).toBe(folder4_2.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should export and import links to notes', asyncTest(async () => { | ||||
| 	it('should export and import links to notes', (async () => { | ||||
| 		const service = InteropService.instance(); | ||||
| 		const filePath = `${exportDir()}/test.jex`; | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| @@ -348,7 +344,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(note2_2.body.indexOf(note1_2.id) >= 0).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should export selected notes in md format', asyncTest(async () => { | ||||
| 	it('should export selected notes in md format', (async () => { | ||||
| 		const service = InteropService.instance(); | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		let note11 = await Note.save({ title: 'title note11', parent_id: folder1.id }); | ||||
| @@ -377,7 +373,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(await shim.fsDriver().exists(`${outDir}/folder3`)).toBe(false); | ||||
| 	})); | ||||
|  | ||||
| 	it('should export MD with unicode filenames', asyncTest(async () => { | ||||
| 	it('should export MD with unicode filenames', (async () => { | ||||
| 		const service = InteropService.instance(); | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const folder2 = await Folder.save({ title: 'ジョプリン' }); | ||||
| @@ -402,7 +398,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(await shim.fsDriver().exists(`${outDir}/ジョプリン/ジョプリン.md`)).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should export a notebook as MD', asyncTest(async () => { | ||||
| 	it('should export a notebook as MD', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'testexportfolder' }); | ||||
| 		await Note.save({ title: 'textexportnote1', parent_id: folder1.id }); | ||||
| 		await Note.save({ title: 'textexportnote2', parent_id: folder1.id }); | ||||
| @@ -419,7 +415,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(await shim.fsDriver().exists(`${exportDir()}/testexportfolder/textexportnote2.md`)).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should export conflict notes', asyncTest(async () => { | ||||
| 	it('should export conflict notes', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'testexportfolder' }); | ||||
| 		await Note.save({ title: 'textexportnote1', parent_id: folder1.id, is_conflict: 1 }); | ||||
| 		await Note.save({ title: 'textexportnote2', parent_id: folder1.id }); | ||||
| @@ -449,7 +445,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(await shim.fsDriver().exists(`${exportDir()}/testexportfolder/textexportnote2.md`)).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not try to export folders with a non-existing parent', asyncTest(async () => { | ||||
| 	it('should not try to export folders with a non-existing parent', (async () => { | ||||
| 		// Handles and edge case where user has a folder but this folder with a parent | ||||
| 		// that doesn't exist. Can happen for example in this case: | ||||
| 		// | ||||
| @@ -471,7 +467,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(result.warnings.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should allow registering new import modules', asyncTest(async () => { | ||||
| 	it('should allow registering new import modules', (async () => { | ||||
| 		const testImportFilePath = `${exportDir()}/testImport${Math.random()}.test`; | ||||
| 		await shim.fsDriver().writeFile(testImportFilePath, 'test', 'utf8'); | ||||
|  | ||||
| @@ -504,7 +500,7 @@ describe('services_InteropService', function() { | ||||
| 		expect(result.sourcePath).toBe(testImportFilePath); | ||||
| 	})); | ||||
|  | ||||
| 	it('should allow registering new export modules', asyncTest(async () => { | ||||
| 	it('should allow registering new export modules', (async () => { | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| 		const note1 = await Note.save({ title: 'note1', parent_id: folder1.id }); | ||||
| 		await Note.save({ title: 'note2', parent_id: folder1.id }); | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
|  | ||||
| const fs = require('fs-extra'); | ||||
| const { asyncTest, setupDatabaseAndSynchronizer, switchClient } = require('./test-utils.js'); | ||||
| const { setupDatabaseAndSynchronizer, switchClient } = require('./test-utils.js'); | ||||
| const InteropService_Exporter_Md = require('@joplin/lib/services/interop/InteropService_Exporter_Md').default; | ||||
| const BaseModel = require('@joplin/lib/BaseModel').default; | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| @@ -12,10 +12,6 @@ const shim = require('@joplin/lib/shim').default; | ||||
|  | ||||
| const exportDir = `${__dirname}/export`; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('services_InteropService_Exporter_Md', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| @@ -27,14 +23,14 @@ describe('services_InteropService_Exporter_Md', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should create resources directory', asyncTest(async () => { | ||||
| 	it('should create resources directory', (async () => { | ||||
| 		const service = new InteropService_Exporter_Md(); | ||||
| 		await service.init(exportDir); | ||||
|  | ||||
| 		expect(await shim.fsDriver().exists(`${exportDir}/_resources/`)).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create note paths and add them to context', asyncTest(async () => { | ||||
| 	it('should create note paths and add them to context', (async () => { | ||||
| 		const exporter = new InteropService_Exporter_Md(); | ||||
| 		await exporter.init(exportDir); | ||||
|  | ||||
| @@ -76,7 +72,7 @@ describe('services_InteropService_Exporter_Md', function() { | ||||
| 		expect(exporter.context().notePaths[note3.id]).toBe('folder2/note3.md'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should handle duplicate note names', asyncTest(async () => { | ||||
| 	it('should handle duplicate note names', (async () => { | ||||
| 		const exporter = new InteropService_Exporter_Md(); | ||||
| 		await exporter.init(exportDir); | ||||
|  | ||||
| @@ -103,7 +99,7 @@ describe('services_InteropService_Exporter_Md', function() { | ||||
| 		expect(exporter.context().notePaths[note1_2.id]).toBe('folder1/note1 (1).md'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not override existing files', asyncTest(async () => { | ||||
| 	it('should not override existing files', (async () => { | ||||
| 		const exporter = new InteropService_Exporter_Md(); | ||||
| 		await exporter.init(exportDir); | ||||
|  | ||||
| @@ -130,7 +126,7 @@ describe('services_InteropService_Exporter_Md', function() { | ||||
| 		expect(exporter.context().notePaths[note1.id]).toBe('folder1/note1 (1).md'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should save resource files in _resource directory', asyncTest(async () => { | ||||
| 	it('should save resource files in _resource directory', (async () => { | ||||
| 		const exporter = new InteropService_Exporter_Md(); | ||||
| 		await exporter.init(exportDir); | ||||
|  | ||||
| @@ -167,7 +163,7 @@ describe('services_InteropService_Exporter_Md', function() { | ||||
| 		expect(await shim.fsDriver().exists(`${exportDir}/_resources/${Resource.filename(resource2)}`)).toBe(true, 'Resource file should be copied to _resources directory.'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create folders in fs', asyncTest(async () => { | ||||
| 	it('should create folders in fs', (async () => { | ||||
| 		const exporter = new InteropService_Exporter_Md(); | ||||
| 		await exporter.init(exportDir); | ||||
|  | ||||
| @@ -198,7 +194,7 @@ describe('services_InteropService_Exporter_Md', function() { | ||||
| 		expect(await shim.fsDriver().exists(`${exportDir}/folder1/folder3`)).toBe(true, 'Folder should be created in filesystem.'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should save notes in fs', asyncTest(async () => { | ||||
| 	it('should save notes in fs', (async () => { | ||||
| 		const exporter = new InteropService_Exporter_Md(); | ||||
| 		await exporter.init(exportDir); | ||||
|  | ||||
| @@ -235,7 +231,7 @@ describe('services_InteropService_Exporter_Md', function() { | ||||
| 		expect(await shim.fsDriver().exists(`${exportDir}/${exporter.context().notePaths[note3.id]}`)).toBe(true, 'File should be saved in filesystem.'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should replace resource ids with relative paths', asyncTest(async () => { | ||||
| 	it('should replace resource ids with relative paths', (async () => { | ||||
| 		const exporter = new InteropService_Exporter_Md(); | ||||
| 		await exporter.init(exportDir); | ||||
|  | ||||
| @@ -280,7 +276,7 @@ describe('services_InteropService_Exporter_Md', function() { | ||||
| 		expect(note2_body).toContain('](../../_resources/resource2.jpg)', 'Resource id should be replaced with a relative path.'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should replace note ids with relative paths', asyncTest(async () => { | ||||
| 	it('should replace note ids with relative paths', (async () => { | ||||
| 		const exporter = new InteropService_Exporter_Md(); | ||||
| 		await exporter.init(exportDir); | ||||
|  | ||||
| @@ -332,7 +328,7 @@ describe('services_InteropService_Exporter_Md', function() { | ||||
| 		expect(note3_body).toContain('](../folder1/folder2/note2.md)', 'Resource id should be replaced with a relative path.'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should url encode relative note links', asyncTest(async () => { | ||||
| 	it('should url encode relative note links', (async () => { | ||||
| 		const exporter = new InteropService_Exporter_Md(); | ||||
| 		await exporter.init(exportDir); | ||||
|  | ||||
|   | ||||
| @@ -1,13 +1,9 @@ | ||||
| /* eslint-disable no-unused-vars */ | ||||
|  | ||||
|  | ||||
| const { asyncTest, fileContentEqual, setupDatabase, revisionService, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, revisionService, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const KvStore = require('@joplin/lib/services/KvStore').default; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| function setupStore() { | ||||
| 	const store = KvStore.instance(); | ||||
| 	store.setDb(db()); | ||||
| @@ -22,7 +18,7 @@ describe('services_KvStore', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should set and get values', asyncTest(async () => { | ||||
| 	it('should set and get values', (async () => { | ||||
| 		const store = setupStore(); | ||||
| 		await store.setValue('a', 123); | ||||
| 		expect(await store.value('a')).toBe(123); | ||||
| @@ -41,7 +37,7 @@ describe('services_KvStore', function() { | ||||
| 		expect(await store.value('b')).toBe(789); | ||||
| 	})); | ||||
|  | ||||
| 	it('should set and get values with the right type', asyncTest(async () => { | ||||
| 	it('should set and get values with the right type', (async () => { | ||||
| 		const store = setupStore(); | ||||
| 		await store.setValue('string', 'something'); | ||||
| 		await store.setValue('int', 123); | ||||
| @@ -49,7 +45,7 @@ describe('services_KvStore', function() { | ||||
| 		expect(await store.value('int')).toBe(123); | ||||
| 	})); | ||||
|  | ||||
| 	it('should increment values', asyncTest(async () => { | ||||
| 	it('should increment values', (async () => { | ||||
| 		const store = setupStore(); | ||||
| 		await store.setValue('int', 1); | ||||
| 		const newValue = await store.incValue('int'); | ||||
| @@ -61,12 +57,12 @@ describe('services_KvStore', function() { | ||||
| 		expect(await store.countKeys()).toBe(2); | ||||
| 	})); | ||||
|  | ||||
| 	it('should handle non-existent values', asyncTest(async () => { | ||||
| 	it('should handle non-existent values', (async () => { | ||||
| 		const store = setupStore(); | ||||
| 		expect(await store.value('nope')).toBe(null); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete values', asyncTest(async () => { | ||||
| 	it('should delete values', (async () => { | ||||
| 		const store = setupStore(); | ||||
| 		await store.setValue('int', 1); | ||||
| 		expect(await store.countKeys()).toBe(1); | ||||
| @@ -76,7 +72,7 @@ describe('services_KvStore', function() { | ||||
| 		await store.deleteValue('int'); // That should not throw | ||||
| 	})); | ||||
|  | ||||
| 	it('should increment in an atomic way', asyncTest(async () => { | ||||
| 	it('should increment in an atomic way', (async () => { | ||||
| 		const store = setupStore(); | ||||
| 		await store.setValue('int', 0); | ||||
|  | ||||
| @@ -90,7 +86,7 @@ describe('services_KvStore', function() { | ||||
| 		expect(await store.value('int')).toBe(20); | ||||
| 	})); | ||||
|  | ||||
| 	it('should search by prefix', asyncTest(async () => { | ||||
| 	it('should search by prefix', (async () => { | ||||
| 		const store = setupStore(); | ||||
| 		await store.setValue('testing:1', 1); | ||||
| 		await store.setValue('testing:2', 2); | ||||
|   | ||||
| @@ -6,14 +6,10 @@ import shim from '@joplin/lib/shim'; | ||||
| import Setting from '@joplin/lib/models/Setting'; | ||||
|  | ||||
| const fs = require('fs-extra'); | ||||
| const { asyncTest, expectNotThrow, setupDatabaseAndSynchronizer, switchClient, expectThrow, createTempDir } = require('./test-utils.js'); | ||||
| const { expectNotThrow, setupDatabaseAndSynchronizer, switchClient, expectThrow, createTempDir } = require('./test-utils.js'); | ||||
| const Note = require('@joplin/lib/models/Note'); | ||||
| const Folder = require('@joplin/lib/models/Folder'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at services_PluginService: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| const testPluginDir = `${__dirname}/../tests/support/plugins`; | ||||
|  | ||||
| function newPluginService(appVersion: string = '1.4') { | ||||
| @@ -41,7 +37,7 @@ describe('services_PluginService', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should load and run a simple plugin', asyncTest(async () => { | ||||
| 	it('should load and run a simple plugin', (async () => { | ||||
| 		const service = newPluginService(); | ||||
| 		await service.loadAndRunPlugins([`${testPluginDir}/simple`], {}); | ||||
|  | ||||
| @@ -57,13 +53,13 @@ describe('services_PluginService', function() { | ||||
| 		expect(allNotes[0].parent_id).toBe(allFolders[0].id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should load and run a simple plugin and handle trailing slash', asyncTest(async () => { | ||||
| 	it('should load and run a simple plugin and handle trailing slash', (async () => { | ||||
| 		const service = newPluginService(); | ||||
| 		await service.loadAndRunPlugins([`${testPluginDir}/simple/`], {}); | ||||
| 		expect(() => service.pluginById('org.joplinapp.plugins.Simple')).not.toThrowError(); | ||||
| 	})); | ||||
|  | ||||
| 	it('should load and run a plugin that uses external packages', asyncTest(async () => { | ||||
| 	it('should load and run a plugin that uses external packages', (async () => { | ||||
| 		const service = newPluginService(); | ||||
| 		await service.loadAndRunPlugins([`${testPluginDir}/withExternalModules`], {}); | ||||
| 		expect(() => service.pluginById('org.joplinapp.plugins.ExternalModuleDemo')).not.toThrowError(); | ||||
| @@ -76,7 +72,7 @@ describe('services_PluginService', function() { | ||||
| 		expect(allFolders[0].title).toBe('  foo'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should load multiple plugins from a directory', asyncTest(async () => { | ||||
| 	it('should load multiple plugins from a directory', (async () => { | ||||
| 		const service = newPluginService(); | ||||
| 		await service.loadAndRunPlugins(`${testPluginDir}/multi_plugins`, {}); | ||||
|  | ||||
| @@ -90,7 +86,7 @@ describe('services_PluginService', function() { | ||||
| 		expect(allFolders.map((f: any) => f.title).sort().join(', ')).toBe('multi - simple1, multi - simple2'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should load plugins from JS bundles', asyncTest(async () => { | ||||
| 	it('should load plugins from JS bundles', (async () => { | ||||
| 		const service = newPluginService(); | ||||
|  | ||||
| 		const plugin = await service.loadPluginFromJsBundle('/tmp', ` | ||||
| @@ -123,21 +119,21 @@ describe('services_PluginService', function() { | ||||
| 		expect(allFolders.length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should load plugins from JS bundle files', asyncTest(async () => { | ||||
| 	it('should load plugins from JS bundle files', (async () => { | ||||
| 		const service = newPluginService(); | ||||
| 		await service.loadAndRunPlugins(`${testPluginDir}/jsbundles`, {}); | ||||
| 		expect(!!service.pluginById('org.joplinapp.plugins.JsBundleDemo')).toBe(true); | ||||
| 		expect((await Folder.all()).length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should load plugins from JPL archive', asyncTest(async () => { | ||||
| 	it('should load plugins from JPL archive', (async () => { | ||||
| 		const service = newPluginService(); | ||||
| 		await service.loadAndRunPlugins([`${testPluginDir}/jpl_test/org.joplinapp.FirstJplPlugin.jpl`], {}); | ||||
| 		expect(!!service.pluginById('org.joplinapp.FirstJplPlugin')).toBe(true); | ||||
| 		expect((await Folder.all()).length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should validate JS bundles', asyncTest(async () => { | ||||
| 	it('should validate JS bundles', (async () => { | ||||
| 		const invalidJsBundles = [ | ||||
| 			` | ||||
| 				/* joplin-manifest: | ||||
| @@ -170,7 +166,7 @@ describe('services_PluginService', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should register a Markdown-it plugin', asyncTest(async () => { | ||||
| 	it('should register a Markdown-it plugin', (async () => { | ||||
| 		const tempDir = await createTempDir(); | ||||
|  | ||||
| 		const contentScriptPath = `${tempDir}/markdownItTestPlugin.js`; | ||||
| @@ -222,7 +218,7 @@ describe('services_PluginService', function() { | ||||
| 		await shim.fsDriver().remove(tempDir); | ||||
| 	})); | ||||
|  | ||||
| 	it('should enable and disable plugins depending on what app version they support', asyncTest(async () => { | ||||
| 	it('should enable and disable plugins depending on what app version they support', (async () => { | ||||
| 		const pluginScript = ` | ||||
| 			/* joplin-manifest: | ||||
| 			{ | ||||
| @@ -260,7 +256,7 @@ describe('services_PluginService', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should install a plugin', asyncTest(async () => { | ||||
| 	it('should install a plugin', (async () => { | ||||
| 		const service = newPluginService(); | ||||
| 		const pluginPath = `${testPluginDir}/jpl_test/org.joplinapp.FirstJplPlugin.jpl`; | ||||
| 		await service.installPlugin(pluginPath); | ||||
| @@ -268,7 +264,7 @@ describe('services_PluginService', function() { | ||||
| 		expect(await fs.existsSync(installedPluginPath)).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should rename the plugin archive to the right name', asyncTest(async () => { | ||||
| 	it('should rename the plugin archive to the right name', (async () => { | ||||
| 		const tempDir = await createTempDir(); | ||||
| 		const service = newPluginService(); | ||||
| 		const pluginPath = `${testPluginDir}/jpl_test/org.joplinapp.FirstJplPlugin.jpl`; | ||||
|   | ||||
| @@ -3,7 +3,7 @@ import NoteResource from '@joplin/lib/models/NoteResource'; | ||||
| import ResourceService from '@joplin/lib/services/ResourceService'; | ||||
| import shim from '@joplin/lib/shim'; | ||||
|  | ||||
| const { asyncTest, resourceService, decryptionWorker, encryptionService, loadEncryptionMasterKey, allSyncTargetItemsEncrypted, setupDatabaseAndSynchronizer, db, synchronizer, switchClient } = require('./test-utils.js'); | ||||
| const { resourceService, decryptionWorker, encryptionService, loadEncryptionMasterKey, allSyncTargetItemsEncrypted, setupDatabaseAndSynchronizer, db, synchronizer, switchClient } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const Resource = require('@joplin/lib/models/Resource.js'); | ||||
| @@ -18,7 +18,7 @@ describe('services_ResourceService', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should delete orphaned resources', asyncTest(async () => { | ||||
| 	it('should delete orphaned resources', (async () => { | ||||
| 		const service = new ResourceService(); | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| @@ -49,7 +49,7 @@ describe('services_ResourceService', function() { | ||||
| 		expect(!(await NoteResource.all()).length).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not delete resource if still associated with at least one note', asyncTest(async () => { | ||||
| 	it('should not delete resource if still associated with at least one note', (async () => { | ||||
| 		const service = new ResourceService(); | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| @@ -73,7 +73,7 @@ describe('services_ResourceService', function() { | ||||
| 		expect(!!(await Resource.load(resource1.id))).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not delete a resource that has never been associated with any note, because it probably means the resource came via sync, and associated note has not arrived yet', asyncTest(async () => { | ||||
| 	it('should not delete a resource that has never been associated with any note, because it probably means the resource came via sync, and associated note has not arrived yet', (async () => { | ||||
| 		const service = new ResourceService(); | ||||
| 		await shim.createResourceFromPath(`${__dirname}/../tests/support/photo.jpg`); | ||||
|  | ||||
| @@ -83,7 +83,7 @@ describe('services_ResourceService', function() { | ||||
| 		expect((await Resource.all()).length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not delete resource if it is used in an IMG tag', asyncTest(async () => { | ||||
| 	it('should not delete resource if it is used in an IMG tag', (async () => { | ||||
| 		const service = new ResourceService(); | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| @@ -102,7 +102,7 @@ describe('services_ResourceService', function() { | ||||
| 		expect(!!(await Resource.load(resource1.id))).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not process twice the same change', asyncTest(async () => { | ||||
| 	it('should not process twice the same change', (async () => { | ||||
| 		const service = new ResourceService(); | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| @@ -122,7 +122,7 @@ describe('services_ResourceService', function() { | ||||
| 		expect(before.last_seen_time).toBe(after.last_seen_time); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not delete resources that are associated with an encrypted note', asyncTest(async () => { | ||||
| 	it('should not delete resources that are associated with an encrypted note', (async () => { | ||||
| 		// https://github.com/laurent22/joplin/issues/1433 | ||||
| 		// | ||||
| 		// Client 1 and client 2 have E2EE setup. | ||||
| @@ -168,7 +168,7 @@ describe('services_ResourceService', function() { | ||||
| 		expect((await Resource.all()).length).toBe(2); | ||||
| 	})); | ||||
|  | ||||
| 	it('should double-check if the resource is still linked before deleting it', asyncTest(async () => { | ||||
| 	it('should double-check if the resource is still linked before deleting it', (async () => { | ||||
| 		SearchEngine.instance().setDb(db()); // /!\ Note that we use the global search engine here, which we shouldn't but will work for now | ||||
|  | ||||
| 		const folder1 = await Folder.save({ title: 'folder1' }); | ||||
| @@ -187,7 +187,7 @@ describe('services_ResourceService', function() { | ||||
| 		expect(!!nr.is_associated).toBe(true); // And it should have fixed the situation by re-indexing the note content | ||||
| 	})); | ||||
|  | ||||
| 	// it('should auto-delete resource even if the associated note was deleted immediately', asyncTest(async () => { | ||||
| 	// it('should auto-delete resource even if the associated note was deleted immediately', (async () => { | ||||
| 	// 	// Previoulsy, when a resource was be attached to a note, then the | ||||
| 	// 	// note was immediately deleted, the ResourceService would not have | ||||
| 	// 	// time to quick in an index the resource/note relation. It means | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { asyncTest, fileContentEqual, setupDatabase, revisionService, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, revisionService, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Setting = require('@joplin/lib/models/Setting').default; | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| @@ -14,10 +14,6 @@ const BaseModel = require('@joplin/lib/BaseModel').default; | ||||
| const RevisionService = require('@joplin/lib/services/RevisionService.js'); | ||||
| const shim = require('@joplin/lib/shim').default; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at services_Revision: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('services_Revision', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| @@ -27,7 +23,7 @@ describe('services_Revision', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should create diff and rebuild notes', asyncTest(async () => { | ||||
| 	it('should create diff and rebuild notes', (async () => { | ||||
| 		const service = new RevisionService(); | ||||
|  | ||||
| 		const n1_v1 = await Note.save({ title: '', author: 'testing' }); | ||||
| @@ -58,7 +54,7 @@ describe('services_Revision', function() { | ||||
| 		expect(revisions2.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete old revisions (1 note, 2 rev)', asyncTest(async () => { | ||||
| 	it('should delete old revisions (1 note, 2 rev)', (async () => { | ||||
| 		const service = new RevisionService(); | ||||
|  | ||||
| 		const n1_v0 = await Note.save({ title: '' }); | ||||
| @@ -81,7 +77,7 @@ describe('services_Revision', function() { | ||||
| 		expect(rev1.title).toBe('hello welcome'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete old revisions (1 note, 3 rev)', asyncTest(async () => { | ||||
| 	it('should delete old revisions (1 note, 3 rev)', (async () => { | ||||
| 		const service = new RevisionService(); | ||||
|  | ||||
| 		const n1_v0 = await Note.save({ title: '' }); | ||||
| @@ -122,7 +118,7 @@ describe('services_Revision', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete old revisions (2 notes, 2 rev)', asyncTest(async () => { | ||||
| 	it('should delete old revisions (2 notes, 2 rev)', (async () => { | ||||
| 		const service = new RevisionService(); | ||||
|  | ||||
| 		const n1_v0 = await Note.save({ title: '' }); | ||||
| @@ -157,7 +153,7 @@ describe('services_Revision', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should handle conflicts', asyncTest(async () => { | ||||
| 	it('should handle conflicts', (async () => { | ||||
| 		const service = new RevisionService(); | ||||
|  | ||||
| 		// A conflict happens in this case: | ||||
| @@ -193,7 +189,7 @@ describe('services_Revision', function() { | ||||
| 		expect(revNote3.title).toBe('hello John'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create a revision for notes that are older than a given interval', asyncTest(async () => { | ||||
| 	it('should create a revision for notes that are older than a given interval', (async () => { | ||||
| 		const n1 = await Note.save({ title: 'hello' }); | ||||
| 		const noteId = n1.id; | ||||
|  | ||||
| @@ -229,7 +225,7 @@ describe('services_Revision', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should create a revision for notes that get deleted (recyle bin)', asyncTest(async () => { | ||||
| 	it('should create a revision for notes that get deleted (recyle bin)', (async () => { | ||||
| 		const n1 = await Note.save({ title: 'hello' }); | ||||
| 		const noteId = n1.id; | ||||
|  | ||||
| @@ -243,7 +239,7 @@ describe('services_Revision', function() { | ||||
| 		expect(rev1.title).toBe('hello'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not create a revision for notes that get deleted if there is already a revision', asyncTest(async () => { | ||||
| 	it('should not create a revision for notes that get deleted if there is already a revision', (async () => { | ||||
| 		const n1 = await Note.save({ title: 'hello' }); | ||||
| 		await revisionService().collectRevisions(); | ||||
| 		const noteId = n1.id; | ||||
| @@ -261,7 +257,7 @@ describe('services_Revision', function() { | ||||
| 		expect((await Revision.allByType(BaseModel.TYPE_NOTE, n1.id)).length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not create a revision for new note the first time they are saved', asyncTest(async () => { | ||||
| 	it('should not create a revision for new note the first time they are saved', (async () => { | ||||
| 		const n1 = await Note.save({ title: 'hello' }); | ||||
|  | ||||
| 		{ | ||||
| @@ -277,7 +273,7 @@ describe('services_Revision', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should abort collecting revisions when one of them is encrypted', asyncTest(async () => { | ||||
| 	it('should abort collecting revisions when one of them is encrypted', (async () => { | ||||
| 		const n1 = await Note.save({ title: 'hello' }); // CHANGE 1 | ||||
| 		await revisionService().collectRevisions(); | ||||
| 		await Note.save({ id: n1.id, title: 'hello Ringo' }); // CHANGE 2 | ||||
| @@ -311,7 +307,7 @@ describe('services_Revision', function() { | ||||
| 		expect(Setting.value('revisionService.lastProcessedChangeId')).toBe(4); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not delete old revisions if one of them is still encrypted (1)', asyncTest(async () => { | ||||
| 	it('should not delete old revisions if one of them is still encrypted (1)', (async () => { | ||||
| 		// Test case 1: Two revisions and the first one is encrypted. | ||||
| 		// Calling deleteOldRevisions() with low TTL, which means all revisions | ||||
| 		// should be deleted, but they won't be due to the encrypted one. | ||||
| @@ -338,7 +334,7 @@ describe('services_Revision', function() { | ||||
| 		expect((await Revision.all()).length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not delete old revisions if one of them is still encrypted (2)', asyncTest(async () => { | ||||
| 	it('should not delete old revisions if one of them is still encrypted (2)', (async () => { | ||||
| 		// Test case 2: Two revisions and the first one is encrypted. | ||||
| 		// Calling deleteOldRevisions() with higher TTL, which means the oldest | ||||
| 		// revision should be deleted, but it won't be due to the encrypted one. | ||||
| @@ -362,7 +358,7 @@ describe('services_Revision', function() { | ||||
| 		expect((await Revision.all()).length).toBe(2); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not delete old revisions if one of them is still encrypted (3)', asyncTest(async () => { | ||||
| 	it('should not delete old revisions if one of them is still encrypted (3)', (async () => { | ||||
| 		// Test case 2: Two revisions and the second one is encrypted. | ||||
| 		// Calling deleteOldRevisions() with higher TTL, which means the oldest | ||||
| 		// revision should be deleted, but it won't be due to the encrypted one. | ||||
| @@ -392,7 +388,7 @@ describe('services_Revision', function() { | ||||
| 		expect((await Revision.all()).length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not create a revision if the note has not changed', asyncTest(async () => { | ||||
| 	it('should not create a revision if the note has not changed', (async () => { | ||||
| 		const n1_v0 = await Note.save({ title: '' }); | ||||
| 		const n1_v1 = await Note.save({ id: n1_v0.id, title: 'hello' }); | ||||
| 		await revisionService().collectRevisions(); // REV 1 | ||||
| @@ -403,7 +399,7 @@ describe('services_Revision', function() { | ||||
| 		expect((await Revision.all()).length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should preserve user update time', asyncTest(async () => { | ||||
| 	it('should preserve user update time', (async () => { | ||||
| 		// user_updated_time is kind of tricky and can be changed automatically in various | ||||
| 		// places so make sure it is saved correctly with the revision | ||||
|  | ||||
| @@ -423,7 +419,7 @@ describe('services_Revision', function() { | ||||
| 		expect(revNote.user_updated_time).toBe(userUpdatedTime); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not create a revision if there is already a recent one', asyncTest(async () => { | ||||
| 	it('should not create a revision if there is already a recent one', (async () => { | ||||
| 		const n1_v0 = await Note.save({ title: '' }); | ||||
| 		const n1_v1 = await Note.save({ id: n1_v0.id, title: 'hello' }); | ||||
| 		await revisionService().collectRevisions(); // REV 1 | ||||
|   | ||||
| @@ -3,16 +3,12 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, asyncTest, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync, restoreDate } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync, restoreDate } = require('./test-utils.js'); | ||||
| const SearchEngine = require('@joplin/lib/services/searchengine/SearchEngine'); | ||||
| const Note = require('@joplin/lib/models/Note'); | ||||
| const ItemChange = require('@joplin/lib/models/ItemChange'); | ||||
| const Setting = require('@joplin/lib/models/Setting').default; | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| let engine = null; | ||||
|  | ||||
|  | ||||
| @@ -76,7 +72,7 @@ describe('services_SearchEngine', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should keep the content and FTS table in sync', asyncTest(async () => { | ||||
| 	it('should keep the content and FTS table in sync', (async () => { | ||||
| 		let rows, n1, n2, n3; | ||||
|  | ||||
| 		n1 = await Note.save({ title: 'a' }); | ||||
| @@ -111,7 +107,7 @@ describe('services_SearchEngine', function() { | ||||
| 		expect(rows.length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should, after initial indexing, save the last change ID', asyncTest(async () => { | ||||
| 	it('should, after initial indexing, save the last change ID', (async () => { | ||||
| 		const n1 = await Note.save({ title: 'abcd efgh' }); // 3 | ||||
| 		const n2 = await Note.save({ title: 'abcd aaaaa abcd abcd' }); // 1 | ||||
|  | ||||
| @@ -127,7 +123,7 @@ describe('services_SearchEngine', function() { | ||||
| 	})); | ||||
|  | ||||
|  | ||||
| 	it('should order search results by relevance BM25', asyncTest(async () => { | ||||
| 	it('should order search results by relevance BM25', (async () => { | ||||
| 		// BM25 is based on term frequency - inverse document frequency | ||||
| 		// The tf–idf value increases proportionally to the number of times a word appears in the document | ||||
| 		// and is offset by the number of documents in the corpus that contain the word, which helps to adjust | ||||
| @@ -160,7 +156,7 @@ describe('services_SearchEngine', function() { | ||||
| 	// TODO: Need to update and replace jasmine.mockDate() calls with Jest | ||||
| 	// equivalent | ||||
|  | ||||
| 	// it('should correctly weigh notes using BM25 and user_updated_time', asyncTest(async () => { | ||||
| 	// it('should correctly weigh notes using BM25 and user_updated_time', (async () => { | ||||
| 	// 	await mockDate(2020, 9, 30, 50); | ||||
| 	// 	const noteData = [ | ||||
| 	// 		{ | ||||
| @@ -240,7 +236,7 @@ describe('services_SearchEngine', function() { | ||||
| 	// 	await restoreDate(); | ||||
| 	// })); | ||||
|  | ||||
| 	it('should tell where the results are found', asyncTest(async () => { | ||||
| 	it('should tell where the results are found', (async () => { | ||||
| 		const notes = [ | ||||
| 			await Note.save({ title: 'abcd efgh', body: 'abcd' }), | ||||
| 			await Note.save({ title: 'abcd' }), | ||||
| @@ -266,7 +262,7 @@ describe('services_SearchEngine', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should order search results by relevance (last updated first)', asyncTest(async () => { | ||||
| 	it('should order search results by relevance (last updated first)', (async () => { | ||||
| 		let rows; | ||||
|  | ||||
| 		const n1 = await Note.save({ title: 'abcd' }); | ||||
| @@ -292,7 +288,7 @@ describe('services_SearchEngine', function() { | ||||
| 		expect(rows[2].id).toBe(n2.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should order search results by relevance (completed to-dos last)', asyncTest(async () => { | ||||
| 	it('should order search results by relevance (completed to-dos last)', (async () => { | ||||
| 		let rows; | ||||
|  | ||||
| 		const n1 = await Note.save({ title: 'abcd', is_todo: 1 }); | ||||
| @@ -318,7 +314,7 @@ describe('services_SearchEngine', function() { | ||||
| 		expect(rows[2].id).toBe(n3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should supports various query types', asyncTest(async () => { | ||||
| 	it('should supports various query types', (async () => { | ||||
| 		let rows; | ||||
|  | ||||
| 		const n1 = await Note.save({ title: 'abcd efgh ijkl', body: 'aaaa bbbb' }); | ||||
| @@ -369,7 +365,7 @@ describe('services_SearchEngine', function() { | ||||
| 		expect(rows.length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support queries with or without accents', asyncTest(async () => { | ||||
| 	it('should support queries with or without accents', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'père noël' }); | ||||
|  | ||||
| @@ -381,7 +377,7 @@ describe('services_SearchEngine', function() { | ||||
| 		expect((await engine.search('noë*')).length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support queries with Chinese characters', asyncTest(async () => { | ||||
| 	it('should support queries with Chinese characters', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: '我是法国人', body: '中文测试' }); | ||||
|  | ||||
| @@ -395,7 +391,7 @@ describe('services_SearchEngine', function() { | ||||
| 		expect((await engine.search('测试*'))[0].fields).toEqual(['body']); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support queries with Japanese characters', asyncTest(async () => { | ||||
| 	it('should support queries with Japanese characters', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: '私は日本語を話すことができません', body: 'テスト' }); | ||||
|  | ||||
| @@ -408,7 +404,7 @@ describe('services_SearchEngine', function() { | ||||
|  | ||||
| 	})); | ||||
|  | ||||
| 	it('should support queries with Korean characters', asyncTest(async () => { | ||||
| 	it('should support queries with Korean characters', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: '이것은 한국말이다' }); | ||||
|  | ||||
| @@ -418,7 +414,7 @@ describe('services_SearchEngine', function() { | ||||
| 		expect((await engine.search('말')).length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support queries with Thai characters', asyncTest(async () => { | ||||
| 	it('should support queries with Thai characters', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'นี่คือคนไทย' }); | ||||
|  | ||||
| @@ -428,7 +424,7 @@ describe('services_SearchEngine', function() { | ||||
| 		expect((await engine.search('ไทย')).length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support field restricted queries with Chinese characters', asyncTest(async () => { | ||||
| 	it('should support field restricted queries with Chinese characters', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: '你好', body: '我是法国人' }); | ||||
|  | ||||
| @@ -450,7 +446,7 @@ describe('services_SearchEngine', function() { | ||||
| 		// expect((await engine.search('title:你好 title:hello')).length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should parse normal query strings', asyncTest(async () => { | ||||
| 	it('should parse normal query strings', (async () => { | ||||
| 		let rows; | ||||
|  | ||||
| 		const testCases = [ | ||||
| @@ -478,7 +474,7 @@ describe('services_SearchEngine', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should handle queries with special characters', asyncTest(async () => { | ||||
| 	it('should handle queries with special characters', (async () => { | ||||
| 		let rows; | ||||
|  | ||||
| 		const testCases = [ | ||||
| @@ -505,7 +501,7 @@ describe('services_SearchEngine', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should allow using basic search', asyncTest(async () => { | ||||
| 	it('should allow using basic search', (async () => { | ||||
| 		const n1 = await Note.save({ title: '- [ ] abcd' }); | ||||
| 		const n2 = await Note.save({ title: '[ ] abcd' }); | ||||
|  | ||||
|   | ||||
| @@ -3,7 +3,7 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, asyncTest, db, synchronizer, fileApi, sleep, createNTestNotes, switchClient, createNTestFolders } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, createNTestNotes, switchClient, createNTestFolders } = require('./test-utils.js'); | ||||
| const SearchEngine = require('@joplin/lib/services/searchengine/SearchEngine'); | ||||
| const Note = require('@joplin/lib/models/Note'); | ||||
| const Folder = require('@joplin/lib/models/Folder'); | ||||
| @@ -15,10 +15,6 @@ const shim = require('@joplin/lib/shim').default; | ||||
| const ResourceService = require('@joplin/lib/services/ResourceService').default; | ||||
|  | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at services_SearchFilter: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| let engine = null; | ||||
|  | ||||
| const ids = (array) => array.map(a => a.id); | ||||
| @@ -35,7 +31,7 @@ describe('services_SearchFilter', function() { | ||||
| 	}); | ||||
|  | ||||
|  | ||||
| 	it('should return note matching title', asyncTest(async () => { | ||||
| 	it('should return note matching title', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'abcd', body: 'body 1' }); | ||||
| 		const n2 = await Note.save({ title: 'efgh', body: 'body 2' }); | ||||
| @@ -47,7 +43,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(rows[0].id).toBe(n1.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return note matching negated title', asyncTest(async () => { | ||||
| 	it('should return note matching negated title', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'abcd', body: 'body 1' }); | ||||
| 		const n2 = await Note.save({ title: 'efgh', body: 'body 2' }); | ||||
| @@ -59,7 +55,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(rows[0].id).toBe(n2.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return note matching body', asyncTest(async () => { | ||||
| 	it('should return note matching body', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'abcd', body: 'body1' }); | ||||
| 		const n2 = await Note.save({ title: 'efgh', body: 'body2' }); | ||||
| @@ -71,7 +67,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(rows[0].id).toBe(n1.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return note matching negated body', asyncTest(async () => { | ||||
| 	it('should return note matching negated body', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'abcd', body: 'body1' }); | ||||
| 		const n2 = await Note.save({ title: 'efgh', body: 'body2' }); | ||||
| @@ -83,7 +79,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(rows[0].id).toBe(n2.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return note matching title containing multiple words', asyncTest(async () => { | ||||
| 	it('should return note matching title containing multiple words', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'abcd xyz', body: 'body1' }); | ||||
| 		const n2 = await Note.save({ title: 'efgh ijk', body: 'body2' }); | ||||
| @@ -95,7 +91,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(rows[0].id).toBe(n1.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return note matching body containing multiple words', asyncTest(async () => { | ||||
| 	it('should return note matching body containing multiple words', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'abcd', body: 'ho ho ho' }); | ||||
| 		const n2 = await Note.save({ title: 'efgh', body: 'foo bar' }); | ||||
| @@ -107,7 +103,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(rows[0].id).toBe(n2.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return note matching title AND body', asyncTest(async () => { | ||||
| 	it('should return note matching title AND body', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'abcd', body: 'ho ho ho' }); | ||||
| 		const n2 = await Note.save({ title: 'efgh', body: 'foo bar' }); | ||||
| @@ -121,7 +117,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(rows.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return note matching title OR body', asyncTest(async () => { | ||||
| 	it('should return note matching title OR body', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'abcd', body: 'ho ho ho' }); | ||||
| 		const n2 = await Note.save({ title: 'efgh', body: 'foo bar' }); | ||||
| @@ -136,7 +132,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(rows.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return notes matching text', asyncTest(async () => { | ||||
| 	it('should return notes matching text', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'foo beef', body: 'dead bar' }); | ||||
| 		const n2 = await Note.save({ title: 'bar efgh', body: 'foo dog' }); | ||||
| @@ -162,7 +158,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(rows.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return notes matching any negated text', asyncTest(async () => { | ||||
| 	it('should return notes matching any negated text', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'abc', body: 'def' }); | ||||
| 		const n2 = await Note.save({ title: 'def', body: 'ghi' }); | ||||
| @@ -176,7 +172,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(rows.map(r=>r.id)).toContain(n3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return notes matching any negated title', asyncTest(async () => { | ||||
| 	it('should return notes matching any negated title', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'abc', body: 'def' }); | ||||
| 		const n2 = await Note.save({ title: 'def', body: 'ghi' }); | ||||
| @@ -190,7 +186,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(rows.map(r=>r.id)).toContain(n3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return notes matching any negated body', asyncTest(async () => { | ||||
| 	it('should return notes matching any negated body', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'abc', body: 'def' }); | ||||
| 		const n2 = await Note.save({ title: 'def', body: 'ghi' }); | ||||
| @@ -204,7 +200,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(rows.map(r=>r.id)).toContain(n3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support phrase search', asyncTest(async () => { | ||||
| 	it('should support phrase search', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'foo beef', body: 'bar dog' }); | ||||
| 		const n2 = await Note.save({ title: 'bar efgh', body: 'foo dog' }); | ||||
| @@ -215,7 +211,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(rows[0].id).toBe(n1.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support prefix search', asyncTest(async () => { | ||||
| 	it('should support prefix search', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'foo beef', body: 'bar dog' }); | ||||
| 		const n2 = await Note.save({ title: 'bar efgh', body: 'foo dog' }); | ||||
| @@ -227,7 +223,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows)).toContain(n2.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by tags', asyncTest(async () => { | ||||
| 	it('should support filtering by tags', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'But I would', body: 'walk 500 miles' }); | ||||
| 		const n2 = await Note.save({ title: 'And I would', body: 'walk 500 more' }); | ||||
| @@ -253,7 +249,7 @@ describe('services_SearchFilter', function() { | ||||
| 	})); | ||||
|  | ||||
|  | ||||
| 	it('should support filtering by tags', asyncTest(async () => { | ||||
| 	it('should support filtering by tags', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'peace talks', body: 'battle ground' }); | ||||
| 		const n2 = await Note.save({ title: 'mouse', body: 'mister' }); | ||||
| @@ -303,7 +299,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows)).toContain(n3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by notebook', asyncTest(async () => { | ||||
| 	it('should support filtering by notebook', (async () => { | ||||
| 		let rows; | ||||
| 		const folder0 = await Folder.save({ title: 'notebook0' }); | ||||
| 		const folder1 = await Folder.save({ title: 'notebook1' }); | ||||
| @@ -318,7 +314,7 @@ describe('services_SearchFilter', function() { | ||||
|  | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by nested notebook', asyncTest(async () => { | ||||
| 	it('should support filtering by nested notebook', (async () => { | ||||
| 		let rows; | ||||
| 		const folder0 = await Folder.save({ title: 'notebook0' }); | ||||
| 		const folder00 = await Folder.save({ title: 'notebook00', parent_id: folder0.id }); | ||||
| @@ -334,7 +330,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows).sort()).toEqual(ids(notes0.concat(notes00)).sort()); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by multiple notebooks', asyncTest(async () => { | ||||
| 	it('should support filtering by multiple notebooks', (async () => { | ||||
| 		let rows; | ||||
| 		const folder0 = await Folder.save({ title: 'notebook0' }); | ||||
| 		const folder00 = await Folder.save({ title: 'notebook00', parent_id: folder0.id }); | ||||
| @@ -352,7 +348,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows).sort()).toEqual(ids(notes0).concat(ids(notes00).concat(ids(notes1))).sort()); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by created date', asyncTest(async () => { | ||||
| 	it('should support filtering by created date', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'I made this on', body: 'May 20 2020', user_created_time: Date.parse('2020-05-20') }); | ||||
| 		const n2 = await Note.save({ title: 'I made this on', body: 'May 19 2020', user_created_time: Date.parse('2020-05-19') }); | ||||
| @@ -375,7 +371,7 @@ describe('services_SearchFilter', function() { | ||||
|  | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by between two dates', asyncTest(async () => { | ||||
| 	it('should support filtering by between two dates', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'January 01 2020', body: 'January 01 2020', user_created_time: Date.parse('2020-01-01') }); | ||||
| 		const n2 = await Note.save({ title: 'February 15 2020', body: 'February 15 2020', user_created_time: Date.parse('2020-02-15') }); | ||||
| @@ -399,7 +395,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows)).toContain(n4.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by created with smart value: day', asyncTest(async () => { | ||||
| 	it('should support filtering by created with smart value: day', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'I made this', body: 'today', user_created_time: parseInt(time.goBackInTime(Date.now(), 0, 'day'), 10) }); | ||||
| 		const n2 = await Note.save({ title: 'I made this', body: 'yesterday', user_created_time: parseInt(time.goBackInTime(Date.now(), 1, 'day'), 10) }); | ||||
| @@ -423,7 +419,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows)).toContain(n3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by created with smart value: week', asyncTest(async () => { | ||||
| 	it('should support filtering by created with smart value: week', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'I made this', body: 'this week', user_created_time: parseInt(time.goBackInTime(Date.now(), 0, 'week'), 10) }); | ||||
| 		const n2 = await Note.save({ title: 'I made this', body: 'the week before', user_created_time: parseInt(time.goBackInTime(Date.now(), 1, 'week'), 10) }); | ||||
| @@ -447,7 +443,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows)).toContain(n3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by created with smart value: month', asyncTest(async () => { | ||||
| 	it('should support filtering by created with smart value: month', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'I made this', body: 'this month', user_created_time: parseInt(time.goBackInTime(Date.now(), 0, 'month'), 10) }); | ||||
| 		const n2 = await Note.save({ title: 'I made this', body: 'the month before', user_created_time: parseInt(time.goBackInTime(Date.now(), 1, 'month'), 10) }); | ||||
| @@ -471,7 +467,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows)).toContain(n3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by created with smart value: year', asyncTest(async () => { | ||||
| 	it('should support filtering by created with smart value: year', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'I made this', body: 'this year', user_created_time: parseInt(time.goBackInTime(Date.now(), 0, 'year'), 10) }); | ||||
| 		const n2 = await Note.save({ title: 'I made this', body: 'the year before', user_created_time: parseInt(time.goBackInTime(Date.now(), 1, 'year'), 10) }); | ||||
| @@ -495,7 +491,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows)).toContain(n3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by updated date', asyncTest(async () => { | ||||
| 	it('should support filtering by updated date', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'I updated this on', body: 'May 20 2020', updated_time: Date.parse('2020-05-20'), user_updated_time: Date.parse('2020-05-20') }, { autoTimestamp: false }); | ||||
| 		const n2 = await Note.save({ title: 'I updated this on', body: 'May 19 2020', updated_time: Date.parse('2020-05-19'), user_updated_time: Date.parse('2020-05-19') }, { autoTimestamp: false }); | ||||
| @@ -512,7 +508,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows)).toContain(n2.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by updated with smart value: day', asyncTest(async () => { | ||||
| 	it('should support filtering by updated with smart value: day', (async () => { | ||||
| 		let rows; | ||||
| 		const today = parseInt(time.goBackInTime(Date.now(), 0, 'day'), 10); | ||||
| 		const yesterday = parseInt(time.goBackInTime(Date.now(), 1, 'day'), 10); | ||||
| @@ -544,7 +540,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows)).toContain(n3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by type todo', asyncTest(async () => { | ||||
| 	it('should support filtering by type todo', (async () => { | ||||
| 		let rows; | ||||
| 		const t1 = await Note.save({ title: 'This is a ', body: 'todo', is_todo: 1 }); | ||||
| 		const t2 = await Note.save({ title: 'This is another', body: 'todo but completed', is_todo: 1, todo_completed: 1590085027710 }); | ||||
| @@ -571,7 +567,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows)).toContain(t1.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by type note', asyncTest(async () => { | ||||
| 	it('should support filtering by type note', (async () => { | ||||
| 		let rows; | ||||
| 		const t1 = await Note.save({ title: 'This is a ', body: 'todo', is_todo: 1 }); | ||||
| 		const t2 = await Note.save({ title: 'This is another', body: 'todo but completed', is_todo: 1, todo_completed: 1590085027710 }); | ||||
| @@ -584,7 +580,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows)).toContain(t3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by latitude, longitude, altitude', asyncTest(async () => { | ||||
| 	it('should support filtering by latitude, longitude, altitude', (async () => { | ||||
| 		let rows; | ||||
| 		const n1 = await Note.save({ title: 'I made this', body: 'this week', latitude: 12.97, longitude: 88.88, altitude: 69.96  }); | ||||
| 		const n2 = await Note.save({ title: 'I made this', body: 'the week before', latitude: 42.11, longitude: 77.77, altitude: 42.00  }); | ||||
| @@ -631,7 +627,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows)).toContain(n3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by resource MIME type', asyncTest(async () => { | ||||
| 	it('should support filtering by resource MIME type', (async () => { | ||||
| 		let rows; | ||||
| 		const service = new ResourceService(); | ||||
| 		// console.log(testImagePath) | ||||
| @@ -674,7 +670,7 @@ describe('services_SearchFilter', function() { | ||||
| 		expect(ids(rows)).toContain(n4.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should ignore dashes in a word', asyncTest(async () => { | ||||
| 	it('should ignore dashes in a word', (async () => { | ||||
| 		const n0 = await Note.save({ title: 'doesnotwork' }); | ||||
| 		const n1 = await Note.save({ title: 'does not work' }); | ||||
| 		const n2 = await Note.save({ title: 'does-not-work' }); | ||||
| @@ -712,7 +708,7 @@ describe('services_SearchFilter', function() { | ||||
|  | ||||
| 	})); | ||||
|  | ||||
| 	it('should support filtering by sourceurl', asyncTest(async () => { | ||||
| 	it('should support filtering by sourceurl', (async () => { | ||||
| 		const n0 = await Note.save({ title: 'n0', source_url: 'https://discourse.joplinapp.org' }); | ||||
| 		const n1 = await Note.save({ title: 'n1', source_url: 'https://google.com' }); | ||||
| 		const n2 = await Note.save({ title: 'n2', source_url: 'https://reddit.com' }); | ||||
|   | ||||
| @@ -2,7 +2,7 @@ import KeychainService from '@joplin/lib/services/keychain/KeychainService'; | ||||
| import shim from '@joplin/lib/shim'; | ||||
| import Setting from '@joplin/lib/models/Setting'; | ||||
|  | ||||
| const { db, asyncTest, setupDatabaseAndSynchronizer, switchClient } = require('./test-utils.js'); | ||||
| const { db, setupDatabaseAndSynchronizer, switchClient } = require('./test-utils.js'); | ||||
|  | ||||
| function describeIfCompatible(name: string, fn: any, elseFn: any) { | ||||
| 	if (['win32', 'darwin'].includes(shim.platformName())) { | ||||
| @@ -26,11 +26,11 @@ describeIfCompatible('services_KeychainService', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should be enabled on macOS and Windows', asyncTest(async () => { | ||||
| 	it('should be enabled on macOS and Windows', (async () => { | ||||
| 		expect(Setting.value('keychain.supported')).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should set, get and delete passwords', asyncTest(async () => { | ||||
| 	it('should set, get and delete passwords', (async () => { | ||||
| 		const service = KeychainService.instance(); | ||||
|  | ||||
| 		const isSet = await service.setPassword('zz_testunit', 'password'); | ||||
| @@ -44,7 +44,7 @@ describeIfCompatible('services_KeychainService', function() { | ||||
| 		expect(await service.password('zz_testunit')).toBe(null); | ||||
| 	})); | ||||
|  | ||||
| 	it('should save and load secure settings', asyncTest(async () => { | ||||
| 	it('should save and load secure settings', (async () => { | ||||
| 		Setting.setObjectValue('encryption.passwordCache', 'testing', '123456'); | ||||
| 		await Setting.saveAll(); | ||||
| 		await Setting.load(); | ||||
| @@ -52,7 +52,7 @@ describeIfCompatible('services_KeychainService', function() { | ||||
| 		expect(passwords.testing).toBe('123456'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete db settings if they have been saved in keychain', asyncTest(async () => { | ||||
| 	it('should delete db settings if they have been saved in keychain', (async () => { | ||||
| 		// First save some secure settings and make sure it ends up in the databse | ||||
| 		KeychainService.instance().enabled = false; | ||||
|  | ||||
|   | ||||
| @@ -2,7 +2,7 @@ import { PaginationOrderDir } from '@joplin/lib/models/utils/types'; | ||||
| import Api, { RequestMethod } from '@joplin/lib/services/rest/Api'; | ||||
| import shim from '@joplin/lib/shim'; | ||||
|  | ||||
| const { asyncTest, setupDatabaseAndSynchronizer, switchClient, checkThrowAsync, db } = require('./test-utils.js'); | ||||
| const { setupDatabaseAndSynchronizer, switchClient, checkThrowAsync, db } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder'); | ||||
| const Resource = require('@joplin/lib/models/Resource'); | ||||
| const Note = require('@joplin/lib/models/Note'); | ||||
| @@ -50,23 +50,23 @@ describe('services_rest_Api', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should ping', asyncTest(async () => { | ||||
| 	it('should ping', (async () => { | ||||
| 		const response = await api.route(RequestMethod.GET, 'ping'); | ||||
| 		expect(response).toBe('JoplinClipperServer'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should handle Not Found errors', asyncTest(async () => { | ||||
| 	it('should handle Not Found errors', (async () => { | ||||
| 		const hasThrown = await checkThrowAsync(async () => await api.route(RequestMethod.GET, 'pong')); | ||||
| 		expect(hasThrown).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should get folders', asyncTest(async () => { | ||||
| 	it('should get folders', (async () => { | ||||
| 		await Folder.save({ title: 'mon carnet' }); | ||||
| 		const response = await api.route(RequestMethod.GET, 'folders'); | ||||
| 		expect(response.items.length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should update folders', asyncTest(async () => { | ||||
| 	it('should update folders', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'mon carnet' }); | ||||
| 		await api.route(RequestMethod.PUT, `folders/${f1.id}`, null, JSON.stringify({ | ||||
| 			title: 'modifié', | ||||
| @@ -76,7 +76,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(f1b.title).toBe('modifié'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete folders', asyncTest(async () => { | ||||
| 	it('should delete folders', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'mon carnet' }); | ||||
| 		await api.route(RequestMethod.DELETE, `folders/${f1.id}`); | ||||
|  | ||||
| @@ -84,7 +84,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(!f1b).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create folders', asyncTest(async () => { | ||||
| 	it('should create folders', (async () => { | ||||
| 		const response = await api.route(RequestMethod.POST, 'folders', null, JSON.stringify({ | ||||
| 			title: 'from api', | ||||
| 		})); | ||||
| @@ -96,7 +96,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(f[0].title).toBe('from api'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should get one folder', asyncTest(async () => { | ||||
| 	it('should get one folder', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'mon carnet' }); | ||||
| 		const response = await api.route(RequestMethod.GET, `folders/${f1.id}`); | ||||
| 		expect(response.id).toBe(f1.id); | ||||
| @@ -105,7 +105,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(hasThrown).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should get the folder notes', asyncTest(async () => { | ||||
| 	it('should get the folder notes', (async () => { | ||||
| 		const f1 = await Folder.save({ title: 'mon carnet' }); | ||||
| 		const response2 = await api.route(RequestMethod.GET, `folders/${f1.id}/notes`); | ||||
| 		expect(response2.items.length).toBe(0); | ||||
| @@ -116,12 +116,12 @@ describe('services_rest_Api', function() { | ||||
| 		expect(response.items.length).toBe(2); | ||||
| 	})); | ||||
|  | ||||
| 	it('should fail on invalid paths', asyncTest(async () => { | ||||
| 	it('should fail on invalid paths', (async () => { | ||||
| 		const hasThrown = await checkThrowAsync(async () => await api.route(RequestMethod.GET, 'schtroumpf')); | ||||
| 		expect(hasThrown).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should get notes', asyncTest(async () => { | ||||
| 	it('should get notes', (async () => { | ||||
| 		let response = null; | ||||
| 		const f1 = await Folder.save({ title: 'mon carnet' }); | ||||
| 		const f2 = await Folder.save({ title: 'mon deuxième carnet' }); | ||||
| @@ -141,7 +141,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(response.title).toBe('trois'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create notes', asyncTest(async () => { | ||||
| 	it('should create notes', (async () => { | ||||
| 		let response = null; | ||||
| 		const f = await Folder.save({ title: 'mon carnet' }); | ||||
|  | ||||
| @@ -160,7 +160,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(!!response.id).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should allow setting note properties', asyncTest(async () => { | ||||
| 	it('should allow setting note properties', (async () => { | ||||
| 		let response: any = null; | ||||
| 		const f = await Folder.save({ title: 'mon carnet' }); | ||||
|  | ||||
| @@ -195,7 +195,7 @@ describe('services_rest_Api', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should preserve user timestamps when creating notes', asyncTest(async () => { | ||||
| 	it('should preserve user timestamps when creating notes', (async () => { | ||||
| 		let response = null; | ||||
| 		const f = await Folder.save({ title: 'mon carnet' }); | ||||
|  | ||||
| @@ -222,7 +222,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(newNote.user_created_time).toBeGreaterThanOrEqual(timeBefore); | ||||
| 	})); | ||||
|  | ||||
| 	it('should preserve user timestamps when updating notes', asyncTest(async () => { | ||||
| 	it('should preserve user timestamps when updating notes', (async () => { | ||||
| 		const folder = await Folder.save({ title: 'mon carnet' }); | ||||
|  | ||||
| 		const updatedTime = Date.now() - 1000; | ||||
| @@ -265,7 +265,7 @@ describe('services_rest_Api', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should create notes with supplied ID', asyncTest(async () => { | ||||
| 	it('should create notes with supplied ID', (async () => { | ||||
| 		let response = null; | ||||
| 		const f = await Folder.save({ title: 'mon carnet' }); | ||||
|  | ||||
| @@ -277,7 +277,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(response.id).toBe('12345678123456781234567812345678'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create todos', asyncTest(async () => { | ||||
| 	it('should create todos', (async () => { | ||||
| 		let response = null; | ||||
| 		const f = await Folder.save({ title: 'stuff to do' }); | ||||
|  | ||||
| @@ -308,7 +308,7 @@ describe('services_rest_Api', function() { | ||||
| 		})); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create folders with supplied ID', asyncTest(async () => { | ||||
| 	it('should create folders with supplied ID', (async () => { | ||||
| 		const response = await api.route(RequestMethod.POST, 'folders', null, JSON.stringify({ | ||||
| 			id: '12345678123456781234567812345678', | ||||
| 			title: 'from api', | ||||
| @@ -317,7 +317,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(response.id).toBe('12345678123456781234567812345678'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create notes with images', asyncTest(async () => { | ||||
| 	it('should create notes with images', (async () => { | ||||
| 		let response = null; | ||||
| 		const f = await Folder.save({ title: 'mon carnet' }); | ||||
|  | ||||
| @@ -334,7 +334,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(response.body.indexOf(resource.id) >= 0).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should delete resources', asyncTest(async () => { | ||||
| 	it('should delete resources', (async () => { | ||||
| 		const f = await Folder.save({ title: 'mon carnet' }); | ||||
|  | ||||
| 		await api.route(RequestMethod.POST, 'notes', null, JSON.stringify({ | ||||
| @@ -353,7 +353,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(!(await Resource.load(resource.id))).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create notes from HTML', asyncTest(async () => { | ||||
| 	it('should create notes from HTML', (async () => { | ||||
| 		let response = null; | ||||
| 		const f = await Folder.save({ title: 'mon carnet' }); | ||||
|  | ||||
| @@ -366,7 +366,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(response.body).toBe('**Bold text**'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should handle tokens', asyncTest(async () => { | ||||
| 	it('should handle tokens', (async () => { | ||||
| 		api = new Api('mytoken'); | ||||
|  | ||||
| 		let hasThrown = await checkThrowAsync(async () => await api.route(RequestMethod.GET, 'notes')); | ||||
| @@ -379,7 +379,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(hasThrown).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should add tags to notes', asyncTest(async () => { | ||||
| 	it('should add tags to notes', (async () => { | ||||
| 		const tag = await Tag.save({ title: 'mon étiquette' }); | ||||
| 		const note = await Note.save({ title: 'ma note' }); | ||||
|  | ||||
| @@ -391,7 +391,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(noteIds[0]).toBe(note.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should remove tags from notes', asyncTest(async () => { | ||||
| 	it('should remove tags from notes', (async () => { | ||||
| 		const tag = await Tag.save({ title: 'mon étiquette' }); | ||||
| 		const note = await Note.save({ title: 'ma note' }); | ||||
| 		await Tag.addNote(tag.id, note.id); | ||||
| @@ -402,7 +402,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(noteIds.length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should list all tag notes', asyncTest(async () => { | ||||
| 	it('should list all tag notes', (async () => { | ||||
| 		const tag = await Tag.save({ title: 'mon étiquette' }); | ||||
| 		const tag2 = await Tag.save({ title: 'mon étiquette 2' }); | ||||
| 		const note1 = await Note.save({ title: 'ma note un' }); | ||||
| @@ -422,7 +422,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(response3.items.length).toBe(2); | ||||
| 	})); | ||||
|  | ||||
| 	it('should update tags when updating notes', asyncTest(async () => { | ||||
| 	it('should update tags when updating notes', (async () => { | ||||
| 		const tag1 = await Tag.save({ title: 'mon étiquette 1' }); | ||||
| 		const tag2 = await Tag.save({ title: 'mon étiquette 2' }); | ||||
| 		const tag3 = await Tag.save({ title: 'mon étiquette 3' }); | ||||
| @@ -443,7 +443,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(tagIds.includes(tag3.id)).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should create and update tags when updating notes', asyncTest(async () => { | ||||
| 	it('should create and update tags when updating notes', (async () => { | ||||
| 		const tag1 = await Tag.save({ title: 'mon étiquette 1' }); | ||||
| 		const tag2 = await Tag.save({ title: 'mon étiquette 2' }); | ||||
| 		const newTagTitle = 'mon étiquette 3'; | ||||
| @@ -465,7 +465,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(tagIds.includes(newTag.id)).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not update tags if tags is not mentioned when updating', asyncTest(async () => { | ||||
| 	it('should not update tags if tags is not mentioned when updating', (async () => { | ||||
| 		const tag1 = await Tag.save({ title: 'mon étiquette 1' }); | ||||
| 		const tag2 = await Tag.save({ title: 'mon étiquette 2' }); | ||||
|  | ||||
| @@ -485,7 +485,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(tagIds.includes(tag2.id)).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should remove tags from note if tags is set to empty string when updating', asyncTest(async () => { | ||||
| 	it('should remove tags from note if tags is set to empty string when updating', (async () => { | ||||
| 		const tag1 = await Tag.save({ title: 'mon étiquette 1' }); | ||||
| 		const tag2 = await Tag.save({ title: 'mon étiquette 2' }); | ||||
|  | ||||
| @@ -503,7 +503,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(tagIds.length === 0).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should paginate results', asyncTest(async () => { | ||||
| 	it('should paginate results', (async () => { | ||||
| 		await createFolderForPagination(1, 1001); | ||||
| 		await createFolderForPagination(2, 1002); | ||||
| 		await createFolderForPagination(3, 1003); | ||||
| @@ -564,7 +564,7 @@ describe('services_rest_Api', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should paginate results and handle duplicate field values', asyncTest(async () => { | ||||
| 	it('should paginate results and handle duplicate field values', (async () => { | ||||
| 		// If, for example, ordering by updated_time, and two of the rows | ||||
| 		// have the same updated_time, it should make sure that the sort | ||||
| 		// order is stable and all results are correctly returned. | ||||
| @@ -593,7 +593,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(r2.items[1].title).toBe('folder4'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should paginate results and return the requested fields only', asyncTest(async () => { | ||||
| 	it('should paginate results and return the requested fields only', (async () => { | ||||
| 		await createNoteForPagination(1, 1001); | ||||
| 		await createNoteForPagination(2, 1002); | ||||
| 		await createNoteForPagination(3, 1003); | ||||
| @@ -621,7 +621,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(!!r2.items[0].id).toBe(true); | ||||
| 	})); | ||||
|  | ||||
| 	it('should paginate folder notes', asyncTest(async () => { | ||||
| 	it('should paginate folder notes', (async () => { | ||||
| 		const folder = await Folder.save({}); | ||||
| 		const note1 = await Note.save({ parent_id: folder.id }); | ||||
| 		await msleep(1); | ||||
| @@ -646,7 +646,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(r2.items[0].id).toBe(note3.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should sort search paginated results', asyncTest(async () => { | ||||
| 	it('should sort search paginated results', (async () => { | ||||
| 		SearchEngine.instance().setDb(db()); | ||||
|  | ||||
| 		await createNoteForPagination('note c', 1000); | ||||
| @@ -698,7 +698,7 @@ describe('services_rest_Api', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should return default fields', asyncTest(async () => { | ||||
| 	it('should return default fields', (async () => { | ||||
| 		const folder = await Folder.save({ title: 'folder' }); | ||||
| 		const note1 = await Note.save({ title: 'note1', parent_id: folder.id }); | ||||
| 		await Note.save({ title: 'note2', parent_id: folder.id }); | ||||
| @@ -740,7 +740,7 @@ describe('services_rest_Api', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should return the notes associated with a resource', asyncTest(async () => { | ||||
| 	it('should return the notes associated with a resource', (async () => { | ||||
| 		const note = await Note.save({}); | ||||
| 		await shim.attachFileToNote(note, `${__dirname}/../tests/support/photo.jpg`); | ||||
| 		const resource = (await Resource.all())[0]; | ||||
| @@ -754,7 +754,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(r.items[0].id).toBe(note.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return the resources associated with a note', asyncTest(async () => { | ||||
| 	it('should return the resources associated with a note', (async () => { | ||||
| 		const note = await Note.save({}); | ||||
| 		await shim.attachFileToNote(note, `${__dirname}/../tests/support/photo.jpg`); | ||||
| 		const resource = (await Resource.all())[0]; | ||||
| @@ -765,7 +765,7 @@ describe('services_rest_Api', function() { | ||||
| 		expect(r.items[0].id).toBe(resource.id); | ||||
| 	})); | ||||
|  | ||||
| 	it('should return search results', asyncTest(async () => { | ||||
| 	it('should return search results', (async () => { | ||||
| 		SearchEngine.instance().setDb(db()); | ||||
|  | ||||
| 		for (let i = 0; i < 10; i++) { | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| const { syncDir, asyncTest, fileApi, synchronizer, createSyncTargetSnapshot, loadEncryptionMasterKey, decryptionWorker, encryptionService, setupDatabaseAndSynchronizer, switchClient, expectThrow, expectNotThrow } = require('../test-utils.js'); | ||||
| const { syncDir, fileApi, synchronizer, createSyncTargetSnapshot, loadEncryptionMasterKey, decryptionWorker, encryptionService, setupDatabaseAndSynchronizer, switchClient, expectThrow, expectNotThrow } = require('../test-utils.js'); | ||||
| const Setting = require('@joplin/lib/models/Setting').default; | ||||
| const Folder = require('@joplin/lib/models/Folder'); | ||||
| const Note = require('@joplin/lib/models/Note'); | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,11 +1,7 @@ | ||||
| import LockHandler, { LockType, LockHandlerOptions, Lock } from '@joplin/lib/services/synchronizer/LockHandler'; | ||||
|  | ||||
|  | ||||
| const { isNetworkSyncTarget, asyncTest, fileApi, setupDatabaseAndSynchronizer, synchronizer, switchClient, msleep, expectThrow, expectNotThrow } = require('./test-utils.js'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason: any, p: any) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
| const { isNetworkSyncTarget, fileApi, setupDatabaseAndSynchronizer, synchronizer, switchClient, msleep, expectThrow, expectNotThrow } = require('./test-utils.js'); | ||||
|  | ||||
| // For tests with memory of file system we can use low intervals to make the tests faster. | ||||
| // However if we use such low values with network sync targets, some calls might randomly fail with | ||||
| @@ -39,7 +35,7 @@ describe('synchronizer_LockHandler', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should acquire and release a sync lock', asyncTest(async () => { | ||||
| 	it('should acquire and release a sync lock', (async () => { | ||||
| 		await lockHandler().acquireLock(LockType.Sync, 'mobile', '123456'); | ||||
| 		const locks = await lockHandler().locks(LockType.Sync); | ||||
| 		expect(locks.length).toBe(1); | ||||
| @@ -51,7 +47,7 @@ describe('synchronizer_LockHandler', function() { | ||||
| 		expect((await lockHandler().locks(LockType.Sync)).length).toBe(0); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not use files that are not locks', asyncTest(async () => { | ||||
| 	it('should not use files that are not locks', (async () => { | ||||
| 		await fileApi().put('locks/desktop.ini', 'a'); | ||||
| 		await fileApi().put('locks/exclusive.json', 'a'); | ||||
| 		await fileApi().put('locks/garbage.json', 'a'); | ||||
| @@ -61,7 +57,7 @@ describe('synchronizer_LockHandler', function() { | ||||
| 		expect(locks.length).toBe(1); | ||||
| 	})); | ||||
|  | ||||
| 	it('should allow multiple sync locks', asyncTest(async () => { | ||||
| 	it('should allow multiple sync locks', (async () => { | ||||
| 		await lockHandler().acquireLock(LockType.Sync, 'mobile', '111'); | ||||
|  | ||||
| 		await switchClient(2); | ||||
| @@ -78,7 +74,7 @@ describe('synchronizer_LockHandler', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should auto-refresh a lock', asyncTest(async () => { | ||||
| 	it('should auto-refresh a lock', (async () => { | ||||
| 		const handler = newLockHandler({ autoRefreshInterval: 100 * timeoutMultipler }); | ||||
| 		const lock = await handler.acquireLock(LockType.Sync, 'desktop', '111'); | ||||
| 		const lockBefore = await handler.activeLock(LockType.Sync, 'desktop', '111'); | ||||
| @@ -89,7 +85,7 @@ describe('synchronizer_LockHandler', function() { | ||||
| 		handler.stopAutoLockRefresh(lock); | ||||
| 	})); | ||||
|  | ||||
| 	it('should call the error handler when lock has expired while being auto-refreshed', asyncTest(async () => { | ||||
| 	it('should call the error handler when lock has expired while being auto-refreshed', (async () => { | ||||
| 		const handler = newLockHandler({ | ||||
| 			lockTtl: 50 * timeoutMultipler, | ||||
| 			autoRefreshInterval: 200 * timeoutMultipler, | ||||
| @@ -108,7 +104,7 @@ describe('synchronizer_LockHandler', function() { | ||||
| 		handler.stopAutoLockRefresh(lock); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not allow sync locks if there is an exclusive lock', asyncTest(async () => { | ||||
| 	it('should not allow sync locks if there is an exclusive lock', (async () => { | ||||
| 		await lockHandler().acquireLock(LockType.Exclusive, 'desktop', '111'); | ||||
|  | ||||
| 		await expectThrow(async () => { | ||||
| @@ -116,7 +112,7 @@ describe('synchronizer_LockHandler', function() { | ||||
| 		}, 'hasExclusiveLock'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should not allow exclusive lock if there are sync locks', asyncTest(async () => { | ||||
| 	it('should not allow exclusive lock if there are sync locks', (async () => { | ||||
| 		const lockHandler = newLockHandler({ lockTtl: 1000 * 60 * 60 }); | ||||
|  | ||||
| 		await lockHandler.acquireLock(LockType.Sync, 'mobile', '111'); | ||||
| @@ -127,7 +123,7 @@ describe('synchronizer_LockHandler', function() { | ||||
| 		}, 'hasSyncLock'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should allow exclusive lock if the sync locks have expired', asyncTest(async () => { | ||||
| 	it('should allow exclusive lock if the sync locks have expired', (async () => { | ||||
| 		const lockHandler = newLockHandler({ lockTtl: 500 * timeoutMultipler }); | ||||
|  | ||||
| 		await lockHandler.acquireLock(LockType.Sync, 'mobile', '111'); | ||||
| @@ -140,7 +136,7 @@ describe('synchronizer_LockHandler', function() { | ||||
| 		}); | ||||
| 	})); | ||||
|  | ||||
| 	it('should decide what is the active exclusive lock', asyncTest(async () => { | ||||
| 	it('should decide what is the active exclusive lock', (async () => { | ||||
| 		const lockHandler = newLockHandler(); | ||||
|  | ||||
| 		{ | ||||
| @@ -155,7 +151,7 @@ describe('synchronizer_LockHandler', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	// it('should not have race conditions', asyncTest(async () => { | ||||
| 	// it('should not have race conditions', (async () => { | ||||
| 	// 	const lockHandler = newLockHandler(); | ||||
|  | ||||
| 	// 	const clients = []; | ||||
|   | ||||
| @@ -8,7 +8,7 @@ import { Dirnames } from '@joplin/lib/services/synchronizer/utils/types'; | ||||
| // gulp buildTests -L && node tests-build/support/createSyncTargetSnapshot.js normal && node tests-build/support/createSyncTargetSnapshot.js e2ee | ||||
|  | ||||
|  | ||||
| const { asyncTest, setSyncTargetName, fileApi, synchronizer, decryptionWorker, encryptionService, setupDatabaseAndSynchronizer, switchClient, expectThrow, expectNotThrow } = require('./test-utils.js'); | ||||
| const { setSyncTargetName, fileApi, synchronizer, decryptionWorker, encryptionService, setupDatabaseAndSynchronizer, switchClient, expectThrow, expectNotThrow } = require('./test-utils.js'); | ||||
| const { deploySyncTargetSnapshot, testData, checkTestData } = require('./support/syncTargetUtils'); | ||||
| const Setting = require('@joplin/lib/models/Setting').default; | ||||
| const MasterKey = require('@joplin/lib/models/MasterKey'); | ||||
| @@ -52,6 +52,13 @@ let previousSyncTargetName: string = ''; | ||||
| describe('synchronizer_MigrationHandler', function() { | ||||
|  | ||||
| 	beforeEach(async (done: Function) => { | ||||
| 		// Note that, for undocumented reasons, the timeout argument passed | ||||
| 		// to `test()` (or `it()`) is ignored if it is higher than the | ||||
| 		// global Jest timeout. So we need to set it globally. | ||||
| 		// | ||||
| 		// https://github.com/facebook/jest/issues/5055#issuecomment-513585906 | ||||
| 		jest.setTimeout(specTimeout); | ||||
|  | ||||
| 		// To test the migrations, we have to use the filesystem sync target | ||||
| 		// because the sync target snapshots are plain files. Eventually | ||||
| 		// it should be possible to copy a filesystem target to memory | ||||
| @@ -70,7 +77,7 @@ describe('synchronizer_MigrationHandler', function() { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should init a new sync target', asyncTest(async () => { | ||||
| 	it('should init a new sync target', (async () => { | ||||
| 		// Check that basic folders "locks" and "temp" are created for new sync targets. | ||||
| 		await migrationHandler().upgrade(1); | ||||
| 		const result = await fileApi().list(); | ||||
| @@ -78,13 +85,13 @@ describe('synchronizer_MigrationHandler', function() { | ||||
| 		expect(result.items.filter((i: any) => i.path === Dirnames.Temp).length).toBe(1); | ||||
| 	}), specTimeout); | ||||
|  | ||||
| 	it('should not allow syncing if the sync target is out-dated', asyncTest(async () => { | ||||
| 	it('should not allow syncing if the sync target is out-dated', (async () => { | ||||
| 		await synchronizer().start(); | ||||
| 		await fileApi().put('info.json', `{"version":${Setting.value('syncVersion') - 1}}`); | ||||
| 		await expectThrow(async () => await migrationHandler().checkCanSync(), 'outdatedSyncTarget'); | ||||
| 	}), specTimeout); | ||||
|  | ||||
| 	it('should not allow syncing if the client is out-dated', asyncTest(async () => { | ||||
| 	it('should not allow syncing if the client is out-dated', (async () => { | ||||
| 		await synchronizer().start(); | ||||
| 		await fileApi().put('info.json', `{"version":${Setting.value('syncVersion') + 1}}`); | ||||
| 		await expectThrow(async () => await migrationHandler().checkCanSync(), 'outdatedClient'); | ||||
| @@ -93,7 +100,7 @@ describe('synchronizer_MigrationHandler', function() { | ||||
| 	for (const migrationVersionString in migrationTests) { | ||||
| 		const migrationVersion = Number(migrationVersionString); | ||||
|  | ||||
| 		it(`should migrate (${migrationVersion})`, asyncTest(async () => { | ||||
| 		it(`should migrate (${migrationVersion})`, (async () => { | ||||
| 			await deploySyncTargetSnapshot('normal', migrationVersion - 1); | ||||
|  | ||||
| 			const info = await migrationHandler().fetchSyncTargetInfo(); | ||||
| @@ -120,7 +127,7 @@ describe('synchronizer_MigrationHandler', function() { | ||||
| 			await expectNotThrow(async () => await checkTestData(testData)); | ||||
| 		}), specTimeout); | ||||
|  | ||||
| 		it(`should migrate (E2EE) (${migrationVersion})`, asyncTest(async () => { | ||||
| 		it(`should migrate (E2EE) (${migrationVersion})`, (async () => { | ||||
| 			// First create some test data that will be used to validate | ||||
| 			// that the migration didn't alter any data. | ||||
| 			await deploySyncTargetSnapshot('e2ee', migrationVersion - 1); | ||||
|   | ||||
							
								
								
									
										86
									
								
								packages/app-cli/tests/test-utils-synchronizer.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										86
									
								
								packages/app-cli/tests/test-utils-synchronizer.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,86 @@ | ||||
| "use strict"; | ||||
| var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||||
|     function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||||
|     return new (P || (P = Promise))(function (resolve, reject) { | ||||
|         function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||||
|         function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||||
|         function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||||
|         step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||||
|     }); | ||||
| }; | ||||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||||
| exports.localNotesFoldersSameAsRemote = exports.remoteResources = exports.remoteNotesFoldersResources = exports.remoteNotesAndFolders = exports.allNotesFolders = void 0; | ||||
| const BaseModel_1 = require("@joplin/lib/BaseModel"); | ||||
| const { fileApi } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const BaseItem = require('@joplin/lib/models/BaseItem.js'); | ||||
| function allNotesFolders() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const folders = yield Folder.all(); | ||||
|         const notes = yield Note.all(); | ||||
|         return folders.concat(notes); | ||||
|     }); | ||||
| } | ||||
| exports.allNotesFolders = allNotesFolders; | ||||
| function remoteItemsByTypes(types) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const list = yield fileApi().list('', { includeDirs: false, syncItemsOnly: true }); | ||||
|         if (list.has_more) | ||||
|             throw new Error('Not implemented!!!'); | ||||
|         const files = list.items; | ||||
|         const output = []; | ||||
|         for (const file of files) { | ||||
|             const remoteContent = yield fileApi().get(file.path); | ||||
|             const content = yield BaseItem.unserialize(remoteContent); | ||||
|             if (types.indexOf(content.type_) < 0) | ||||
|                 continue; | ||||
|             output.push(content); | ||||
|         } | ||||
|         return output; | ||||
|     }); | ||||
| } | ||||
| function remoteNotesAndFolders() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         return remoteItemsByTypes([BaseModel_1.default.TYPE_NOTE, BaseModel_1.default.TYPE_FOLDER]); | ||||
|     }); | ||||
| } | ||||
| exports.remoteNotesAndFolders = remoteNotesAndFolders; | ||||
| function remoteNotesFoldersResources() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         return remoteItemsByTypes([BaseModel_1.default.TYPE_NOTE, BaseModel_1.default.TYPE_FOLDER, BaseModel_1.default.TYPE_RESOURCE]); | ||||
|     }); | ||||
| } | ||||
| exports.remoteNotesFoldersResources = remoteNotesFoldersResources; | ||||
| function remoteResources() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         return remoteItemsByTypes([BaseModel_1.default.TYPE_RESOURCE]); | ||||
|     }); | ||||
| } | ||||
| exports.remoteResources = remoteResources; | ||||
| function localNotesFoldersSameAsRemote(locals, expect) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         let error = null; | ||||
|         try { | ||||
|             const nf = yield remoteNotesAndFolders(); | ||||
|             expect(locals.length).toBe(nf.length); | ||||
|             for (let i = 0; i < locals.length; i++) { | ||||
|                 const dbItem = locals[i]; | ||||
|                 const path = BaseItem.systemPath(dbItem); | ||||
|                 const remote = yield fileApi().stat(path); | ||||
|                 expect(!!remote).toBe(true); | ||||
|                 if (!remote) | ||||
|                     continue; | ||||
|                 let remoteContent = yield fileApi().get(path); | ||||
|                 remoteContent = dbItem.type_ == BaseModel_1.default.TYPE_NOTE ? yield Note.unserialize(remoteContent) : yield Folder.unserialize(remoteContent); | ||||
|                 expect(remoteContent.title).toBe(dbItem.title); | ||||
|             } | ||||
|         } | ||||
|         catch (e) { | ||||
|             error = e; | ||||
|         } | ||||
|         expect(error).toBe(null); | ||||
|     }); | ||||
| } | ||||
| exports.localNotesFoldersSameAsRemote = localNotesFoldersSameAsRemote; | ||||
| //# sourceMappingURL=test-utils-synchronizer.js.map | ||||
							
								
								
									
										65
									
								
								packages/app-cli/tests/test-utils-synchronizer.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										65
									
								
								packages/app-cli/tests/test-utils-synchronizer.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,65 @@ | ||||
| import BaseModel from '@joplin/lib/BaseModel'; | ||||
|  | ||||
| const { fileApi } = require('./test-utils.js'); | ||||
| const Folder = require('@joplin/lib/models/Folder.js'); | ||||
| const Note = require('@joplin/lib/models/Note.js'); | ||||
| const BaseItem = require('@joplin/lib/models/BaseItem.js'); | ||||
|  | ||||
| export async function allNotesFolders() { | ||||
| 	const folders = await Folder.all(); | ||||
| 	const notes = await Note.all(); | ||||
| 	return folders.concat(notes); | ||||
| } | ||||
|  | ||||
| async function remoteItemsByTypes(types: number[]) { | ||||
| 	const list = await fileApi().list('', { includeDirs: false, syncItemsOnly: true }); | ||||
| 	if (list.has_more) throw new Error('Not implemented!!!'); | ||||
| 	const files = list.items; | ||||
|  | ||||
| 	const output = []; | ||||
| 	for (const file of files) { | ||||
| 		const remoteContent = await fileApi().get(file.path); | ||||
| 		const content = await BaseItem.unserialize(remoteContent); | ||||
| 		if (types.indexOf(content.type_) < 0) continue; | ||||
| 		output.push(content); | ||||
| 	} | ||||
| 	return output; | ||||
| } | ||||
|  | ||||
| export async function remoteNotesAndFolders() { | ||||
| 	return remoteItemsByTypes([BaseModel.TYPE_NOTE, BaseModel.TYPE_FOLDER]); | ||||
| } | ||||
|  | ||||
| export async function remoteNotesFoldersResources() { | ||||
| 	return remoteItemsByTypes([BaseModel.TYPE_NOTE, BaseModel.TYPE_FOLDER, BaseModel.TYPE_RESOURCE]); | ||||
| } | ||||
|  | ||||
| export async function remoteResources() { | ||||
| 	return remoteItemsByTypes([BaseModel.TYPE_RESOURCE]); | ||||
| } | ||||
|  | ||||
| export async function localNotesFoldersSameAsRemote(locals: any[], expect: Function) { | ||||
| 	let error = null; | ||||
| 	try { | ||||
| 		const nf = await remoteNotesAndFolders(); | ||||
| 		expect(locals.length).toBe(nf.length); | ||||
|  | ||||
| 		for (let i = 0; i < locals.length; i++) { | ||||
| 			const dbItem = locals[i]; | ||||
| 			const path = BaseItem.systemPath(dbItem); | ||||
| 			const remote = await fileApi().stat(path); | ||||
|  | ||||
| 			expect(!!remote).toBe(true); | ||||
| 			if (!remote) continue; | ||||
|  | ||||
| 			let remoteContent = await fileApi().get(path); | ||||
|  | ||||
| 			remoteContent = dbItem.type_ == BaseModel.TYPE_NOTE ? await Note.unserialize(remoteContent) : await Folder.unserialize(remoteContent); | ||||
| 			expect(remoteContent.title).toBe(dbItem.title); | ||||
| 		} | ||||
| 	} catch (e) { | ||||
| 		error = e; | ||||
| 	} | ||||
|  | ||||
| 	expect(error).toBe(null); | ||||
| } | ||||
| @@ -139,7 +139,7 @@ setSyncTargetName('memory'); | ||||
|  | ||||
| // console.info(`Testing with sync target: ${syncTargetName_}`); | ||||
|  | ||||
| const syncDir = `${__dirname}/../tests/sync`; | ||||
| const syncDir = `${__dirname}/../tests/sync/${suiteName_}`; | ||||
|  | ||||
| // TODO: Should probably update this for Jest? | ||||
|  | ||||
| @@ -559,27 +559,6 @@ function fileContentEqual(path1, path2) { | ||||
| 	return content1 === content2; | ||||
| } | ||||
|  | ||||
| // Wrap an async test in a try/catch block so that done() is always called | ||||
| // and display a proper error message instead of "unhandled promise error" | ||||
| function asyncTest(callback) { | ||||
| 	return async function(done) { | ||||
| 		try { | ||||
| 			await callback(); | ||||
| 		} catch (error) { | ||||
| 			if (error.constructor && error.constructor.name === 'ExpectationFailed') { | ||||
| 				// OK - will be reported by Jest | ||||
| 			} else { | ||||
| 				// Better to rethrow exception as stack trace is more useful in this case | ||||
| 				throw error; | ||||
| 				// console.error(error); | ||||
| 				// expect(0).toBe(1, 'Test has thrown an exception - see above error'); | ||||
| 			} | ||||
| 		} finally { | ||||
| 			done(); | ||||
| 		} | ||||
| 	}; | ||||
| } | ||||
|  | ||||
| async function allSyncTargetItemsEncrypted() { | ||||
| 	const list = await fileApi().list('', { includeDirs: false }); | ||||
| 	const files = list.items; | ||||
| @@ -792,4 +771,4 @@ class TestApp extends BaseApplication { | ||||
| 	} | ||||
| } | ||||
|  | ||||
| module.exports = { newPluginService, newPluginScript, synchronizerStart, afterEachCleanUp, syncTargetName, setSyncTargetName, syncDir, createTempDir, isNetworkSyncTarget, kvStore, expectThrow, logger, expectNotThrow, resourceService, resourceFetcher, tempFilePath, allSyncTargetItemsEncrypted, msleep, setupDatabase, revisionService, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync, checkThrow, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker, asyncTest, currentClientId, id, ids, sortedIds, at, createNTestNotes, createNTestFolders, createNTestTags, TestApp }; | ||||
| module.exports = { newPluginService, newPluginScript, synchronizerStart, afterEachCleanUp, syncTargetName, setSyncTargetName, syncDir, createTempDir, isNetworkSyncTarget, kvStore, expectThrow, logger, expectNotThrow, resourceService, resourceFetcher, tempFilePath, allSyncTargetItemsEncrypted, msleep, setupDatabase, revisionService, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync, checkThrow, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker, currentClientId, id, ids, sortedIds, at, createNTestNotes, createNTestFolders, createNTestTags, TestApp }; | ||||
|   | ||||
| @@ -2,20 +2,16 @@ | ||||
|  | ||||
|  | ||||
| const time = require('@joplin/lib/time').default; | ||||
| const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('./test-utils.js'); | ||||
| const timeUtils = require('@joplin/lib/time'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('timeUtils', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should go back in time', asyncTest(async () => { | ||||
| 	it('should go back in time', (async () => { | ||||
| 		let startDate = new Date('3 Aug 2020'); | ||||
| 		let endDate = new Date('2 Aug 2020'); | ||||
|  | ||||
| @@ -40,7 +36,7 @@ describe('timeUtils', function() { | ||||
| 		expect(time.goBackInTime(startDate, 23, 'year')).toBe(endDate.getTime().toString()); | ||||
| 	})); | ||||
|  | ||||
| 	it('should go forward in time', asyncTest(async () => { | ||||
| 	it('should go forward in time', (async () => { | ||||
| 		let startDate = new Date('2 Aug 2020'); | ||||
| 		let endDate = new Date('3 Aug 2020'); | ||||
|  | ||||
|   | ||||
| @@ -1,18 +1,9 @@ | ||||
|  | ||||
| const { asyncTest } = require('./test-utils.js'); | ||||
| const urlUtils = require('@joplin/lib/urlUtils.js'); | ||||
|  | ||||
| process.on('unhandledRejection', (reason, p) => { | ||||
| 	console.log('Unhandled Rejection at urlUtils: Promise', p, 'reason:', reason); | ||||
| }); | ||||
|  | ||||
| describe('urlUtils', function() { | ||||
|  | ||||
| 	beforeEach(async (done) => { | ||||
| 		done(); | ||||
| 	}); | ||||
|  | ||||
| 	it('should prepend a base URL', asyncTest(async (done) => { | ||||
| 	it('should prepend a base URL', (async () => { | ||||
| 		expect(urlUtils.prependBaseUrl('testing.html', 'http://example.com')).toBe('http://example.com/testing.html'); | ||||
| 		expect(urlUtils.prependBaseUrl('testing.html', 'http://example.com/')).toBe('http://example.com/testing.html'); | ||||
| 		expect(urlUtils.prependBaseUrl('/jmp/?id=123&u=http://something.com/test', 'http://example.com/')).toBe('http://example.com/jmp/?id=123&u=http://something.com/test'); | ||||
| @@ -31,7 +22,7 @@ describe('urlUtils', function() { | ||||
| 		expect(urlUtils.prependBaseUrl('#local-anchor', 'http://example.com')).toBe('#local-anchor'); | ||||
| 	})); | ||||
|  | ||||
| 	it('should detect resource URLs', asyncTest(async (done) => { | ||||
| 	it('should detect resource URLs', (async () => { | ||||
| 		const testCases = [ | ||||
| 			[':/1234abcd1234abcd1234abcd1234abcd', { itemId: '1234abcd1234abcd1234abcd1234abcd', hash: '' }], | ||||
| 			[':/1234abcd1234abcd1234abcd1234abcd "some text"', { itemId: '1234abcd1234abcd1234abcd1234abcd', hash: '' }], | ||||
| @@ -61,7 +52,7 @@ describe('urlUtils', function() { | ||||
| 		} | ||||
| 	})); | ||||
|  | ||||
| 	it('should extract resource URLs', asyncTest(async (done) => { | ||||
| 	it('should extract resource URLs', (async () => { | ||||
| 		const testCases = [ | ||||
| 			['Bla [](:/11111111111111111111111111111111) bla [](:/22222222222222222222222222222222) bla', ['11111111111111111111111111111111', '22222222222222222222222222222222']], | ||||
| 			['Bla [](:/11111111111111111111111111111111 "Some title") bla [](:/22222222222222222222222222222222 "something else") bla', ['11111111111111111111111111111111', '22222222222222222222222222222222']], | ||||
|   | ||||
| @@ -11,10 +11,6 @@ process.env.NODE_ENV = 'production'; | ||||
| // Makes the script crash on unhandled rejections instead of silently | ||||
| // ignoring them. In the future, promise rejections that are not handled will | ||||
| // terminate the Node.js process with a non-zero exit code. | ||||
| process.on('unhandledRejection', err => { | ||||
| 	throw err; | ||||
| }); | ||||
|  | ||||
| // Ensure environment variables are read. | ||||
| require('../config/env'); | ||||
|  | ||||
|   | ||||
| @@ -7,10 +7,6 @@ process.env.NODE_ENV = 'development'; | ||||
| // Makes the script crash on unhandled rejections instead of silently | ||||
| // ignoring them. In the future, promise rejections that are not handled will | ||||
| // terminate the Node.js process with a non-zero exit code. | ||||
| process.on('unhandledRejection', err => { | ||||
| 	throw err; | ||||
| }); | ||||
|  | ||||
| // Ensure environment variables are read. | ||||
| require('../config/env'); | ||||
|  | ||||
|   | ||||
| @@ -8,10 +8,6 @@ process.env.PUBLIC_URL = ''; | ||||
| // Makes the script crash on unhandled rejections instead of silently | ||||
| // ignoring them. In the future, promise rejections that are not handled will | ||||
| // terminate the Node.js process with a non-zero exit code. | ||||
| process.on('unhandledRejection', err => { | ||||
| 	throw err; | ||||
| }); | ||||
|  | ||||
| // Ensure environment variables are read. | ||||
| require('../config/env'); | ||||
|  | ||||
|   | ||||
							
								
								
									
										9
									
								
								packages/lib/dummy.test.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								packages/lib/dummy.test.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | ||||
| // Dummy test because the Jest setup is done but there's for now no test. | ||||
|  | ||||
| describe('dummy', () => { | ||||
|  | ||||
| 	it('should pass', () => { | ||||
| 		expect(1).toBe(1); | ||||
| 	}); | ||||
|  | ||||
| }); | ||||
| @@ -1,16 +0,0 @@ | ||||
| // Wrap an async test in a try/catch block so that done() is always called | ||||
| // and display a proper error message instead of "unhandled promise error" | ||||
| function asyncTest(callback) { | ||||
| 	return async function(done) { | ||||
| 		try { | ||||
| 			await callback(); | ||||
| 		} catch (error) { | ||||
| 			console.error(error); | ||||
| 			expect('good').toBe('not good', 'Test has thrown an exception - see above error'); | ||||
| 		} finally { | ||||
| 			done(); | ||||
| 		} | ||||
| 	}; | ||||
| } | ||||
|  | ||||
| module.exports = { asyncTest }; | ||||
|   | ||||
		Reference in New Issue
	
	Block a user