From 7bafb1a54615e38a7d6bb6926f9dbe6401bec22f Mon Sep 17 00:00:00 2001 From: Laurent Cozic Date: Sun, 22 Oct 2023 16:05:00 +0100 Subject: [PATCH] update --- .eslintignore | 1 + .gitignore | 1 + packages/app-cli/tests/support/sample.txt | 1 + packages/app-cli/tests/support/sample2.txt | 1 + packages/lib/RotatingLogs.test.ts | 3 +- packages/lib/Synchronizer.ts | 11 +- packages/lib/models/BaseItem.test.js | 125 ------------------ packages/lib/models/BaseItem.test.ts | 38 ++++++ packages/lib/models/BaseItem.ts | 10 +- .../Synchronizer.resources.test.ts | 91 ++++++++++--- 10 files changed, 130 insertions(+), 152 deletions(-) create mode 100644 packages/app-cli/tests/support/sample.txt create mode 100644 packages/app-cli/tests/support/sample2.txt delete mode 100644 packages/lib/models/BaseItem.test.js create mode 100644 packages/lib/models/BaseItem.test.ts diff --git a/.eslintignore b/.eslintignore index a0ef5e900..096a50768 100644 --- a/.eslintignore +++ b/.eslintignore @@ -630,6 +630,7 @@ packages/lib/markdownUtils2.test.js packages/lib/markupLanguageUtils.js packages/lib/migrations/42.js packages/lib/models/Alarm.js +packages/lib/models/BaseItem.test.js packages/lib/models/BaseItem.js packages/lib/models/Folder.sharing.test.js packages/lib/models/Folder.test.js diff --git a/.gitignore b/.gitignore index b297ed443..eccbf1165 100644 --- a/.gitignore +++ b/.gitignore @@ -612,6 +612,7 @@ packages/lib/markdownUtils2.test.js packages/lib/markupLanguageUtils.js packages/lib/migrations/42.js packages/lib/models/Alarm.js +packages/lib/models/BaseItem.test.js packages/lib/models/BaseItem.js packages/lib/models/Folder.sharing.test.js packages/lib/models/Folder.test.js diff --git a/packages/app-cli/tests/support/sample.txt b/packages/app-cli/tests/support/sample.txt new file mode 100644 index 000000000..af230d9ed --- /dev/null +++ b/packages/app-cli/tests/support/sample.txt @@ -0,0 +1 @@ +just testing \ No newline at end of file diff --git a/packages/app-cli/tests/support/sample2.txt b/packages/app-cli/tests/support/sample2.txt new file mode 100644 index 000000000..55ecc2ed9 --- /dev/null +++ b/packages/app-cli/tests/support/sample2.txt @@ -0,0 +1 @@ +just testing 2 \ No newline at end of file diff --git a/packages/lib/RotatingLogs.test.ts b/packages/lib/RotatingLogs.test.ts index a8990a537..5181ca8a0 100644 --- a/packages/lib/RotatingLogs.test.ts +++ b/packages/lib/RotatingLogs.test.ts @@ -48,8 +48,7 @@ describe('RotatingLogs', () => { try { dir = await createTempDir(); await createTestLogFile(dir); - await msleep(100); - const rotatingLogs: RotatingLogs = new RotatingLogs(dir, 1, 100); + const rotatingLogs: RotatingLogs = new RotatingLogs(dir, 1, 5000); await rotatingLogs.cleanActiveLogFile(); await rotatingLogs.deleteNonActiveLogFiles(); const files = await readdir(dir); diff --git a/packages/lib/Synchronizer.ts b/packages/lib/Synchronizer.ts index bf18e9562..7ec0cbcc2 100644 --- a/packages/lib/Synchronizer.ts +++ b/packages/lib/Synchronizer.ts @@ -701,8 +701,15 @@ export default class Synchronizer { logger.warn(`Uploading a large resource (resourceId: ${local.id}, size:${resource.size} bytes) which may tie up the sync process.`); } - // TODO: Compare blob_updated_time to stored sync_item.updated_time?????????????? - await this.apiCall('put', remoteContentPath, null, { path: localResourceContentPath, source: 'file', shareId: resource.share_id }); + // We skip updating the blob if it hasn't + // been modified since the last sync. In + // that case, it means the resource metadata + // (title, filename, etc.) has been changed, + // but not the data blob. + const syncItem = await BaseItem.syncItem(syncTargetId, resource.id, { fields: ['sync_time', 'force_sync'] }); + if (!syncItem || syncItem.sync_time < resource.blob_updated_time || syncItem.force_sync) { + await this.apiCall('put', remoteContentPath, null, { path: localResourceContentPath, source: 'file', shareId: resource.share_id }); + } } catch (error) { if (isCannotSyncError(error)) { await handleCannotSyncItem(ItemClass, syncTargetId, local, error.message); diff --git a/packages/lib/models/BaseItem.test.js b/packages/lib/models/BaseItem.test.js deleted file mode 100644 index e1f0633de..000000000 --- a/packages/lib/models/BaseItem.test.js +++ /dev/null @@ -1,125 +0,0 @@ -const { setupDatabaseAndSynchronizer, switchClient } = require('../testing/test-utils.js'); -const Folder = require('../models/Folder').default; -const Note = require('../models/Note').default; - -describe('models/BaseItem', () => { - - beforeEach(async () => { - await setupDatabaseAndSynchronizer(1); - await switchClient(1); - }); - - // This is to handle the case where a property is removed from a BaseItem table - in that case files in - // the sync target will still have the old property but we don't need it locally. - it('should ignore properties that are present in sync file but not in database when serialising', (async () => { - const folder = await Folder.save({ title: 'folder1' }); - - let serialized = await Folder.serialize(folder); - serialized += '\nignore_me: true'; - - const unserialized = await Folder.unserialize(serialized); - - expect('ignore_me' in unserialized).toBe(false); - })); - - it('should not modify title when unserializing', (async () => { - const folder1 = await Folder.save({ title: '' }); - const folder2 = await Folder.save({ title: 'folder1' }); - - const serialized1 = await Folder.serialize(folder1); - const unserialized1 = await Folder.unserialize(serialized1); - - expect(unserialized1.title).toBe(folder1.title); - - const serialized2 = await Folder.serialize(folder2); - const unserialized2 = await Folder.unserialize(serialized2); - - expect(unserialized2.title).toBe(folder2.title); - })); - - it('should correctly unserialize note timestamps', (async () => { - const folder = await Folder.save({ title: 'folder' }); - const note = await Note.save({ title: 'note', parent_id: folder.id }); - - const serialized = await Note.serialize(note); - const unserialized = await Note.unserialize(serialized); - - expect(unserialized.created_time).toEqual(note.created_time); - expect(unserialized.updated_time).toEqual(note.updated_time); - expect(unserialized.user_created_time).toEqual(note.user_created_time); - expect(unserialized.user_updated_time).toEqual(note.user_updated_time); - })); - - it('should serialize geolocation fields', (async () => { - const folder = await Folder.save({ title: 'folder' }); - let note = await Note.save({ title: 'note', parent_id: folder.id }); - note = await Note.load(note.id); - - let serialized = await Note.serialize(note); - let unserialized = await Note.unserialize(serialized); - - expect(unserialized.latitude).toEqual('0.00000000'); - expect(unserialized.longitude).toEqual('0.00000000'); - expect(unserialized.altitude).toEqual('0.0000'); - - await Note.save({ - id: note.id, - longitude: -3.459, - altitude: 0, - latitude: 48.732, - }); - note = await Note.load(note.id); - - serialized = await Note.serialize(note); - unserialized = await Note.unserialize(serialized); - - expect(unserialized.latitude).toEqual(note.latitude); - expect(unserialized.longitude).toEqual(note.longitude); - expect(unserialized.altitude).toEqual(note.altitude); - })); - - it('should serialize and unserialize notes', (async () => { - const folder = await Folder.save({ title: 'folder' }); - const note = await Note.save({ title: 'note', parent_id: folder.id }); - await Note.save({ - id: note.id, - longitude: -3.459, - altitude: 0, - latitude: 48.732, - }); - - const noteBefore = await Note.load(note.id); - const serialized = await Note.serialize(noteBefore); - const noteAfter = await Note.unserialize(serialized); - - expect(noteAfter).toEqual(noteBefore); - })); - - it('should serialize and unserialize properties that contain new lines', (async () => { - const sourceUrl = ` -https://joplinapp.org/ \\n -`; - - const note = await Note.save({ title: 'note', source_url: sourceUrl }); - - const noteBefore = await Note.load(note.id); - const serialized = await Note.serialize(noteBefore); - const noteAfter = await Note.unserialize(serialized); - - expect(noteAfter).toEqual(noteBefore); - })); - - it('should not serialize the note title and body', (async () => { - const note = await Note.save({ title: 'my note', body: `one line -two line -three line \\n no escape` }); - - const noteBefore = await Note.load(note.id); - const serialized = await Note.serialize(noteBefore); - expect(serialized.indexOf(`my note - -one line -two line -three line \\n no escape`)).toBe(0); - })); -}); diff --git a/packages/lib/models/BaseItem.test.ts b/packages/lib/models/BaseItem.test.ts new file mode 100644 index 000000000..d55456d40 --- /dev/null +++ b/packages/lib/models/BaseItem.test.ts @@ -0,0 +1,38 @@ +import { afterAllCleanUp, setupDatabaseAndSynchronizer, switchClient, syncTargetId, synchronizerStart, msleep } from '../testing/test-utils'; +import BaseItem from './BaseItem'; +import Note from './Note'; + +describe('BaseItem', () => { + + beforeEach(async () => { + await setupDatabaseAndSynchronizer(1); + await switchClient(1); + }); + + afterAll(async () => { + await afterAllCleanUp(); + }); + + + it('should update item sync item', async () => { + const note1 = await Note.save({ }); + + const syncTime = async (itemId: string) => { + const syncItem = await BaseItem.syncItem(syncTargetId(), itemId, { fields: ['sync_time'] }); + return syncItem ? syncItem.sync_time : 0; + }; + + expect(await syncTime(note1.id)).toBe(0); + + await synchronizerStart(); + + const newTime = await syncTime(note1.id); + expect(newTime).toBeLessThanOrEqual(Date.now()); + + // Check that it doesn't change if we sync again + await msleep(1); + await synchronizerStart(); + expect(await syncTime(note1.id)).toBe(newTime); + }); + +}); diff --git a/packages/lib/models/BaseItem.ts b/packages/lib/models/BaseItem.ts index 56a6a4b15..bfe214aaf 100644 --- a/packages/lib/models/BaseItem.ts +++ b/packages/lib/models/BaseItem.ts @@ -1,5 +1,5 @@ import { ModelType, DeleteOptions } from '../BaseModel'; -import { BaseItemEntity, DeletedItemEntity, NoteEntity } from '../services/database/types'; +import { BaseItemEntity, DeletedItemEntity, NoteEntity, SyncItemEntity } from '../services/database/types'; import Setting from './Setting'; import BaseModel from '../BaseModel'; import time from '../time'; @@ -194,6 +194,14 @@ export default class BaseItem extends BaseModel { return output; } + public static async syncItem(syncTarget: number, itemId: string, options: LoadOptions = null): Promise { + options = { + fields: '*', + ...options, + }; + return await this.db().selectOne(`SELECT ${this.db().escapeFieldsToString(options.fields)} FROM sync_items WHERE sync_target = ? AND item_id = ?`, [syncTarget, itemId]); + } + public static async allSyncItems(syncTarget: number) { const output = await this.db().selectAll('SELECT * FROM sync_items WHERE sync_target = ?', [syncTarget]); return output; diff --git a/packages/lib/services/synchronizer/Synchronizer.resources.test.ts b/packages/lib/services/synchronizer/Synchronizer.resources.test.ts index 4eff62bc6..5a1a85587 100644 --- a/packages/lib/services/synchronizer/Synchronizer.resources.test.ts +++ b/packages/lib/services/synchronizer/Synchronizer.resources.test.ts @@ -1,9 +1,9 @@ import time from '../../time'; import shim from '../../shim'; import Setting from '../../models/Setting'; -import { NoteEntity } from '../../services/database/types'; +import { NoteEntity, ResourceEntity } from '../../services/database/types'; import { remoteNotesFoldersResources, remoteResources } from '../../testing/test-utils-synchronizer'; -import { synchronizerStart, tempFilePath, resourceFetcher, supportDir, setupDatabaseAndSynchronizer, synchronizer, fileApi, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, checkThrowAsync } from '../../testing/test-utils'; +import { synchronizerStart, tempFilePath, resourceFetcher, supportDir, setupDatabaseAndSynchronizer, synchronizer, fileApi, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, checkThrowAsync, msleep } from '../../testing/test-utils'; import Folder from '../../models/Folder'; import Note from '../../models/Note'; import Resource from '../../models/Resource'; @@ -27,7 +27,7 @@ describe('Synchronizer.resources', () => { insideBeforeEach = false; }); - it('should sync resources', (async () => { + it('should sync resources', async () => { while (insideBeforeEach) await time.msleep(500); const folder1 = await Folder.save({ title: 'folder1' }); @@ -58,9 +58,9 @@ describe('Synchronizer.resources', () => { const resourcePath1_2 = Resource.fullPath(resource1_2); expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true); - })); + }); - it('should handle resource download errors', (async () => { + it('should handle resource download errors', async () => { while (insideBeforeEach) await time.msleep(500); const folder1 = await Folder.save({ title: 'folder1' }); @@ -87,9 +87,9 @@ describe('Synchronizer.resources', () => { const ls = await Resource.localState(resource1); expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_ERROR); expect(ls.fetch_error).toBe('did not work'); - })); + }); - it('should set the resource file size if it is missing', (async () => { + it('should set the resource file size if it is missing', async () => { while (insideBeforeEach) await time.msleep(500); const folder1 = await Folder.save({ title: 'folder1' }); @@ -110,9 +110,9 @@ describe('Synchronizer.resources', () => { await fetcher.waitForAllFinished(); r1 = await Resource.load(r1.id); expect(r1.size).toBe(2720); - })); + }); - it('should delete resources', (async () => { + it('should delete resources', async () => { while (insideBeforeEach) await time.msleep(500); const folder1 = await Folder.save({ title: 'folder1' }); @@ -142,9 +142,9 @@ describe('Synchronizer.resources', () => { allResources = await Resource.all(); expect(allResources.length).toBe(0); expect(await shim.fsDriver().exists(resourcePath1)).toBe(false); - })); + }); - it('should encrypt resources', (async () => { + it('should encrypt resources', async () => { setEncryptionEnabled(true); const masterKey = await loadEncryptionMasterKey(); @@ -170,9 +170,9 @@ describe('Synchronizer.resources', () => { const resourcePath1_2 = Resource.fullPath(resource1_2); expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true); - })); + }); - it('should sync resource blob changes', (async () => { + it('should sync resource blob changes', async () => { const tempFile = tempFilePath('txt'); await shim.fsDriver().writeFile(tempFile, '1234', 'utf8'); const folder1 = await Folder.save({ title: 'folder1' }); @@ -204,9 +204,9 @@ describe('Synchronizer.resources', () => { const resource1_1 = (await Resource.all())[0]; expect(resource1_1.size).toBe(newSize); expect(await Resource.resourceBlobContent(resource1_1.id, 'utf8')).toBe('1234 MOD'); - })); + }); - it('should handle resource conflicts', (async () => { + it('should handle resource conflicts', async () => { { const tempFile = tempFilePath('txt'); await shim.fsDriver().writeFile(tempFile, '1234', 'utf8'); @@ -271,9 +271,9 @@ describe('Synchronizer.resources', () => { expect(resourceConflictFolder).toBeTruthy(); expect(resourceConflictFolder.parent_id).toBeFalsy(); } - })); + }); - it('should handle resource conflicts if a resource is changed locally but deleted remotely', (async () => { + it('should handle resource conflicts if a resource is changed locally but deleted remotely', async () => { { const tempFile = tempFilePath('txt'); await shim.fsDriver().writeFile(tempFile, '1234', 'utf8'); @@ -316,9 +316,9 @@ describe('Synchronizer.resources', () => { expect(originalResource.id).not.toBe(conflictResource.id); expect(conflictResource.title).toBe('modified resource'); } - })); + }); - it('should not upload a resource if it has not been fetched yet', (async () => { + it('should not upload a resource if it has not been fetched yet', async () => { // In some rare cases, the synchronizer might try to upload a resource even though it // doesn't have the resource file. It can happen in this situation: // - C1 create resource @@ -350,9 +350,9 @@ describe('Synchronizer.resources', () => { await BaseItem.saveSyncEnabled(ModelType.Resource, resource.id); await synchronizerStart(); expect((await remoteResources()).length).toBe(1); - })); + }); - it('should not download resources over the limit', (async () => { + it('should not download resources over the limit', async () => { const note1 = await Note.save({ title: 'note' }); await shim.attachFileToNote(note1, `${supportDir}/photo.jpg`); await synchronizer().start(); @@ -368,6 +368,53 @@ describe('Synchronizer.resources', () => { expect(syncItems.length).toBe(2); expect(syncItems[1].item_location).toBe(BaseItem.SYNC_ITEM_LOCATION_REMOTE); expect(syncItems[1].sync_disabled).toBe(1); - })); + }); + + it('should not upload blob if it has not changed', async () => { + const note = await Note.save({}); + await shim.attachFileToNote(note, `${supportDir}/sample.txt`); + const resource: ResourceEntity = (await Resource.all())[0]; + const resourcePath = `.resource/${resource.id}`; + + await synchronizer().api().put(resourcePath, 'before upload'); + expect(await synchronizer().api().get(resourcePath)).toBe('before upload'); + await synchronizerStart(); + expect(await synchronizer().api().get(resourcePath)).toBe('just testing'); + + // ---------------------------------------------------------------------- + // Change metadata only and check that blob is not uploaded. To do this, + // we manually overwrite the data on the sync target, then sync. If the + // synchronizer doesn't upload the blob, this manually changed data + // should remain. + // ---------------------------------------------------------------------- + + await Resource.save({ id: resource.id, title: 'my new title' }); + await synchronizer().api().put(resourcePath, 'check if changed'); + await synchronizerStart(); + expect(await synchronizer().api().get(resourcePath)).toBe('check if changed'); + + // ---------------------------------------------------------------------- + // Now change the blob, and check that the remote item has been + // overwritten. + // ---------------------------------------------------------------------- + + await Resource.updateResourceBlobContent(resource.id, `${supportDir}/sample.txt`); + await synchronizerStart(); + expect(await synchronizer().api().get(resourcePath)).toBe('just testing'); + + // ---------------------------------------------------------------------- + // Change the blob, then change the metadata, and sync. Even though + // blob_updated_time is earlier than updated_time, it should still + // update everything on the sync target, because both times are after + // the item sync_time. + // ---------------------------------------------------------------------- + + await Resource.updateResourceBlobContent(resource.id, `${supportDir}/sample2.txt`); + await msleep(1); + await Resource.save({ id: resource.id, title: 'my new title 2' }); + await synchronizerStart(); + expect(await synchronizer().api().get(resourcePath)).toBe('just testing 2'); + expect(await synchronizer().api().get(`${resource.id}.md`)).toContain('my new title 2'); + }); });