mirror of
https://github.com/laurent22/joplin.git
synced 2024-12-21 09:38:01 +02:00
All: Allow modifying a resource metadata only when synchronising (#9114)
This commit is contained in:
parent
0069069254
commit
9aed3e04f4
@ -633,6 +633,7 @@ packages/lib/markdownUtils2.test.js
|
|||||||
packages/lib/markupLanguageUtils.js
|
packages/lib/markupLanguageUtils.js
|
||||||
packages/lib/migrations/42.js
|
packages/lib/migrations/42.js
|
||||||
packages/lib/models/Alarm.js
|
packages/lib/models/Alarm.js
|
||||||
|
packages/lib/models/BaseItem.test.js
|
||||||
packages/lib/models/BaseItem.js
|
packages/lib/models/BaseItem.js
|
||||||
packages/lib/models/Folder.sharing.test.js
|
packages/lib/models/Folder.sharing.test.js
|
||||||
packages/lib/models/Folder.test.js
|
packages/lib/models/Folder.test.js
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -615,6 +615,7 @@ packages/lib/markdownUtils2.test.js
|
|||||||
packages/lib/markupLanguageUtils.js
|
packages/lib/markupLanguageUtils.js
|
||||||
packages/lib/migrations/42.js
|
packages/lib/migrations/42.js
|
||||||
packages/lib/models/Alarm.js
|
packages/lib/models/Alarm.js
|
||||||
|
packages/lib/models/BaseItem.test.js
|
||||||
packages/lib/models/BaseItem.js
|
packages/lib/models/BaseItem.js
|
||||||
packages/lib/models/Folder.sharing.test.js
|
packages/lib/models/Folder.sharing.test.js
|
||||||
packages/lib/models/Folder.test.js
|
packages/lib/models/Folder.test.js
|
||||||
|
1
packages/app-cli/tests/support/sample.txt
Normal file
1
packages/app-cli/tests/support/sample.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
just testing
|
1
packages/app-cli/tests/support/sample2.txt
Normal file
1
packages/app-cli/tests/support/sample2.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
just testing 2
|
@ -3,7 +3,7 @@ import shim from './shim';
|
|||||||
import Database from './database';
|
import Database from './database';
|
||||||
import migration42 from './services/database/migrations/42';
|
import migration42 from './services/database/migrations/42';
|
||||||
import migration43 from './services/database/migrations/43';
|
import migration43 from './services/database/migrations/43';
|
||||||
// import migration44 from './services/database/migrations/44';
|
import migration44 from './services/database/migrations/44';
|
||||||
import { SqlQuery, Migration } from './services/database/types';
|
import { SqlQuery, Migration } from './services/database/types';
|
||||||
import addMigrationFile from './services/database/addMigrationFile';
|
import addMigrationFile from './services/database/addMigrationFile';
|
||||||
|
|
||||||
@ -127,7 +127,7 @@ INSERT INTO version (version) VALUES (1);
|
|||||||
const migrations: Migration[] = [
|
const migrations: Migration[] = [
|
||||||
migration42,
|
migration42,
|
||||||
migration43,
|
migration43,
|
||||||
// migration44,
|
migration44,
|
||||||
];
|
];
|
||||||
|
|
||||||
export interface TableField {
|
export interface TableField {
|
||||||
|
@ -48,8 +48,7 @@ describe('RotatingLogs', () => {
|
|||||||
try {
|
try {
|
||||||
dir = await createTempDir();
|
dir = await createTempDir();
|
||||||
await createTestLogFile(dir);
|
await createTestLogFile(dir);
|
||||||
await msleep(100);
|
const rotatingLogs: RotatingLogs = new RotatingLogs(dir, 1, 5000);
|
||||||
const rotatingLogs: RotatingLogs = new RotatingLogs(dir, 1, 100);
|
|
||||||
await rotatingLogs.cleanActiveLogFile();
|
await rotatingLogs.cleanActiveLogFile();
|
||||||
await rotatingLogs.deleteNonActiveLogFiles();
|
await rotatingLogs.deleteNonActiveLogFiles();
|
||||||
const files = await readdir(dir);
|
const files = await readdir(dir);
|
||||||
|
@ -604,7 +604,7 @@ export default class Synchronizer {
|
|||||||
} else {
|
} else {
|
||||||
// Note: in order to know the real updated_time value, we need to load the content. In theory we could
|
// Note: in order to know the real updated_time value, we need to load the content. In theory we could
|
||||||
// rely on the file timestamp (in remote.updated_time) but in practice it's not accurate enough and
|
// rely on the file timestamp (in remote.updated_time) but in practice it's not accurate enough and
|
||||||
// can lead to conflicts (for example when the file timestamp is slightly ahead of it's real
|
// can lead to conflicts (for example when the file timestamp is slightly ahead of its real
|
||||||
// updated_time). updated_time is set and managed by clients so it's always accurate.
|
// updated_time). updated_time is set and managed by clients so it's always accurate.
|
||||||
// Same situation below for updateLocal.
|
// Same situation below for updateLocal.
|
||||||
//
|
//
|
||||||
@ -701,7 +701,15 @@ export default class Synchronizer {
|
|||||||
logger.warn(`Uploading a large resource (resourceId: ${local.id}, size:${resource.size} bytes) which may tie up the sync process.`);
|
logger.warn(`Uploading a large resource (resourceId: ${local.id}, size:${resource.size} bytes) which may tie up the sync process.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.apiCall('put', remoteContentPath, null, { path: localResourceContentPath, source: 'file', shareId: resource.share_id });
|
// We skip updating the blob if it hasn't
|
||||||
|
// been modified since the last sync. In
|
||||||
|
// that case, it means the resource metadata
|
||||||
|
// (title, filename, etc.) has been changed,
|
||||||
|
// but not the data blob.
|
||||||
|
const syncItem = await BaseItem.syncItem(syncTargetId, resource.id, { fields: ['sync_time', 'force_sync'] });
|
||||||
|
if (!syncItem || syncItem.sync_time < resource.blob_updated_time || syncItem.force_sync) {
|
||||||
|
await this.apiCall('put', remoteContentPath, null, { path: localResourceContentPath, source: 'file', shareId: resource.share_id });
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (isCannotSyncError(error)) {
|
if (isCannotSyncError(error)) {
|
||||||
await handleCannotSyncItem(ItemClass, syncTargetId, local, error.message);
|
await handleCannotSyncItem(ItemClass, syncTargetId, local, error.message);
|
||||||
|
@ -1,17 +1,22 @@
|
|||||||
const { setupDatabaseAndSynchronizer, switchClient } = require('../testing/test-utils.js');
|
import { afterAllCleanUp, setupDatabaseAndSynchronizer, switchClient, syncTargetId, synchronizerStart, msleep } from '../testing/test-utils';
|
||||||
const Folder = require('../models/Folder').default;
|
import BaseItem from './BaseItem';
|
||||||
const Note = require('../models/Note').default;
|
import Folder from './Folder';
|
||||||
|
import Note from './Note';
|
||||||
|
|
||||||
describe('models/BaseItem', () => {
|
describe('BaseItem', () => {
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
await setupDatabaseAndSynchronizer(1);
|
await setupDatabaseAndSynchronizer(1);
|
||||||
await switchClient(1);
|
await switchClient(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await afterAllCleanUp();
|
||||||
|
});
|
||||||
|
|
||||||
// This is to handle the case where a property is removed from a BaseItem table - in that case files in
|
// This is to handle the case where a property is removed from a BaseItem table - in that case files in
|
||||||
// the sync target will still have the old property but we don't need it locally.
|
// the sync target will still have the old property but we don't need it locally.
|
||||||
it('should ignore properties that are present in sync file but not in database when serialising', (async () => {
|
it('should ignore properties that are present in sync file but not in database when serialising', async () => {
|
||||||
const folder = await Folder.save({ title: 'folder1' });
|
const folder = await Folder.save({ title: 'folder1' });
|
||||||
|
|
||||||
let serialized = await Folder.serialize(folder);
|
let serialized = await Folder.serialize(folder);
|
||||||
@ -20,9 +25,9 @@ describe('models/BaseItem', () => {
|
|||||||
const unserialized = await Folder.unserialize(serialized);
|
const unserialized = await Folder.unserialize(serialized);
|
||||||
|
|
||||||
expect('ignore_me' in unserialized).toBe(false);
|
expect('ignore_me' in unserialized).toBe(false);
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should not modify title when unserializing', (async () => {
|
it('should not modify title when unserializing', async () => {
|
||||||
const folder1 = await Folder.save({ title: '' });
|
const folder1 = await Folder.save({ title: '' });
|
||||||
const folder2 = await Folder.save({ title: 'folder1' });
|
const folder2 = await Folder.save({ title: 'folder1' });
|
||||||
|
|
||||||
@ -35,9 +40,9 @@ describe('models/BaseItem', () => {
|
|||||||
const unserialized2 = await Folder.unserialize(serialized2);
|
const unserialized2 = await Folder.unserialize(serialized2);
|
||||||
|
|
||||||
expect(unserialized2.title).toBe(folder2.title);
|
expect(unserialized2.title).toBe(folder2.title);
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should correctly unserialize note timestamps', (async () => {
|
it('should correctly unserialize note timestamps', async () => {
|
||||||
const folder = await Folder.save({ title: 'folder' });
|
const folder = await Folder.save({ title: 'folder' });
|
||||||
const note = await Note.save({ title: 'note', parent_id: folder.id });
|
const note = await Note.save({ title: 'note', parent_id: folder.id });
|
||||||
|
|
||||||
@ -48,9 +53,9 @@ describe('models/BaseItem', () => {
|
|||||||
expect(unserialized.updated_time).toEqual(note.updated_time);
|
expect(unserialized.updated_time).toEqual(note.updated_time);
|
||||||
expect(unserialized.user_created_time).toEqual(note.user_created_time);
|
expect(unserialized.user_created_time).toEqual(note.user_created_time);
|
||||||
expect(unserialized.user_updated_time).toEqual(note.user_updated_time);
|
expect(unserialized.user_updated_time).toEqual(note.user_updated_time);
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should serialize geolocation fields', (async () => {
|
it('should serialize geolocation fields', async () => {
|
||||||
const folder = await Folder.save({ title: 'folder' });
|
const folder = await Folder.save({ title: 'folder' });
|
||||||
let note = await Note.save({ title: 'note', parent_id: folder.id });
|
let note = await Note.save({ title: 'note', parent_id: folder.id });
|
||||||
note = await Note.load(note.id);
|
note = await Note.load(note.id);
|
||||||
@ -76,9 +81,9 @@ describe('models/BaseItem', () => {
|
|||||||
expect(unserialized.latitude).toEqual(note.latitude);
|
expect(unserialized.latitude).toEqual(note.latitude);
|
||||||
expect(unserialized.longitude).toEqual(note.longitude);
|
expect(unserialized.longitude).toEqual(note.longitude);
|
||||||
expect(unserialized.altitude).toEqual(note.altitude);
|
expect(unserialized.altitude).toEqual(note.altitude);
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should serialize and unserialize notes', (async () => {
|
it('should serialize and unserialize notes', async () => {
|
||||||
const folder = await Folder.save({ title: 'folder' });
|
const folder = await Folder.save({ title: 'folder' });
|
||||||
const note = await Note.save({ title: 'note', parent_id: folder.id });
|
const note = await Note.save({ title: 'note', parent_id: folder.id });
|
||||||
await Note.save({
|
await Note.save({
|
||||||
@ -93,9 +98,9 @@ describe('models/BaseItem', () => {
|
|||||||
const noteAfter = await Note.unserialize(serialized);
|
const noteAfter = await Note.unserialize(serialized);
|
||||||
|
|
||||||
expect(noteAfter).toEqual(noteBefore);
|
expect(noteAfter).toEqual(noteBefore);
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should serialize and unserialize properties that contain new lines', (async () => {
|
it('should serialize and unserialize properties that contain new lines', async () => {
|
||||||
const sourceUrl = `
|
const sourceUrl = `
|
||||||
https://joplinapp.org/ \\n
|
https://joplinapp.org/ \\n
|
||||||
`;
|
`;
|
||||||
@ -107,9 +112,9 @@ https://joplinapp.org/ \\n
|
|||||||
const noteAfter = await Note.unserialize(serialized);
|
const noteAfter = await Note.unserialize(serialized);
|
||||||
|
|
||||||
expect(noteAfter).toEqual(noteBefore);
|
expect(noteAfter).toEqual(noteBefore);
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should not serialize the note title and body', (async () => {
|
it('should not serialize the note title and body', async () => {
|
||||||
const note = await Note.save({ title: 'my note', body: `one line
|
const note = await Note.save({ title: 'my note', body: `one line
|
||||||
two line
|
two line
|
||||||
three line \\n no escape` });
|
three line \\n no escape` });
|
||||||
@ -121,5 +126,27 @@ three line \\n no escape` });
|
|||||||
one line
|
one line
|
||||||
two line
|
two line
|
||||||
three line \\n no escape`)).toBe(0);
|
three line \\n no escape`)).toBe(0);
|
||||||
}));
|
});
|
||||||
|
|
||||||
|
it('should update item sync item', async () => {
|
||||||
|
const note1 = await Note.save({ });
|
||||||
|
|
||||||
|
const syncTime = async (itemId: string) => {
|
||||||
|
const syncItem = await BaseItem.syncItem(syncTargetId(), itemId, { fields: ['sync_time'] });
|
||||||
|
return syncItem ? syncItem.sync_time : 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
expect(await syncTime(note1.id)).toBe(0);
|
||||||
|
|
||||||
|
await synchronizerStart();
|
||||||
|
|
||||||
|
const newTime = await syncTime(note1.id);
|
||||||
|
expect(newTime).toBeLessThanOrEqual(Date.now());
|
||||||
|
|
||||||
|
// Check that it doesn't change if we sync again
|
||||||
|
await msleep(1);
|
||||||
|
await synchronizerStart();
|
||||||
|
expect(await syncTime(note1.id)).toBe(newTime);
|
||||||
|
});
|
||||||
|
|
||||||
});
|
});
|
@ -1,5 +1,5 @@
|
|||||||
import { ModelType, DeleteOptions } from '../BaseModel';
|
import { ModelType, DeleteOptions } from '../BaseModel';
|
||||||
import { BaseItemEntity, DeletedItemEntity, NoteEntity } from '../services/database/types';
|
import { BaseItemEntity, DeletedItemEntity, NoteEntity, SyncItemEntity } from '../services/database/types';
|
||||||
import Setting from './Setting';
|
import Setting from './Setting';
|
||||||
import BaseModel from '../BaseModel';
|
import BaseModel from '../BaseModel';
|
||||||
import time from '../time';
|
import time from '../time';
|
||||||
@ -194,6 +194,14 @@ export default class BaseItem extends BaseModel {
|
|||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static async syncItem(syncTarget: number, itemId: string, options: LoadOptions = null): Promise<SyncItemEntity> {
|
||||||
|
options = {
|
||||||
|
fields: '*',
|
||||||
|
...options,
|
||||||
|
};
|
||||||
|
return await this.db().selectOne(`SELECT ${this.db().escapeFieldsToString(options.fields)} FROM sync_items WHERE sync_target = ? AND item_id = ?`, [syncTarget, itemId]);
|
||||||
|
}
|
||||||
|
|
||||||
public static async allSyncItems(syncTarget: number) {
|
public static async allSyncItems(syncTarget: number) {
|
||||||
const output = await this.db().selectAll('SELECT * FROM sync_items WHERE sync_target = ?', [syncTarget]);
|
const output = await this.db().selectAll('SELECT * FROM sync_items WHERE sync_target = ?', [syncTarget]);
|
||||||
return output;
|
return output;
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
import { supportDir, setupDatabaseAndSynchronizer, switchClient, simulateReadOnlyShareEnv, expectThrow, createTempFile } from '../testing/test-utils';
|
import { supportDir, setupDatabaseAndSynchronizer, switchClient, simulateReadOnlyShareEnv, expectThrow, createTempFile, msleep } from '../testing/test-utils';
|
||||||
import Folder from '../models/Folder';
|
import Folder from '../models/Folder';
|
||||||
import Note from '../models/Note';
|
import Note from '../models/Note';
|
||||||
import Resource from '../models/Resource';
|
import Resource from '../models/Resource';
|
||||||
import shim from '../shim';
|
import shim from '../shim';
|
||||||
import { ErrorCode } from '../errors';
|
import { ErrorCode } from '../errors';
|
||||||
import { remove, pathExists } from 'fs-extra';
|
import { remove, pathExists } from 'fs-extra';
|
||||||
|
import { ResourceEntity } from '../services/database/types';
|
||||||
|
|
||||||
const testImagePath = `${supportDir}/photo.jpg`;
|
const testImagePath = `${supportDir}/photo.jpg`;
|
||||||
|
|
||||||
@ -95,6 +96,39 @@ describe('models/Resource', () => {
|
|||||||
expect(originalStat.size).toBe(newStat.size);
|
expect(originalStat.size).toBe(newStat.size);
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
it('should set the blob_updated_time property if the blob is updated', (async () => {
|
||||||
|
const note = await Note.save({});
|
||||||
|
await shim.attachFileToNote(note, testImagePath);
|
||||||
|
|
||||||
|
const resourceA: ResourceEntity = (await Resource.all())[0];
|
||||||
|
expect(resourceA.updated_time).toBe(resourceA.blob_updated_time);
|
||||||
|
|
||||||
|
await msleep(1);
|
||||||
|
|
||||||
|
await Resource.updateResourceBlobContent(resourceA.id, testImagePath);
|
||||||
|
|
||||||
|
const resourceB: ResourceEntity = (await Resource.all())[0];
|
||||||
|
expect(resourceB.updated_time).toBeGreaterThan(resourceA.updated_time);
|
||||||
|
expect(resourceB.blob_updated_time).toBeGreaterThan(resourceA.blob_updated_time);
|
||||||
|
}));
|
||||||
|
|
||||||
|
it('should NOT set the blob_updated_time property if the blob is NOT updated', (async () => {
|
||||||
|
const note = await Note.save({});
|
||||||
|
await shim.attachFileToNote(note, testImagePath);
|
||||||
|
|
||||||
|
const resourceA: ResourceEntity = (await Resource.all())[0];
|
||||||
|
|
||||||
|
await msleep(1);
|
||||||
|
|
||||||
|
// We only update the resource metadata - so the blob timestamp should
|
||||||
|
// not change
|
||||||
|
await Resource.save({ id: resourceA.id, title: 'new title' });
|
||||||
|
|
||||||
|
const resourceB: ResourceEntity = (await Resource.all())[0];
|
||||||
|
expect(resourceB.updated_time).toBeGreaterThan(resourceA.updated_time);
|
||||||
|
expect(resourceB.blob_updated_time).toBe(resourceA.blob_updated_time);
|
||||||
|
}));
|
||||||
|
|
||||||
it('should not allow modifying a read-only resource', async () => {
|
it('should not allow modifying a read-only resource', async () => {
|
||||||
const { cleanup, resource } = await setupFolderNoteResourceReadOnly('123456789');
|
const { cleanup, resource } = await setupFolderNoteResourceReadOnly('123456789');
|
||||||
await expectThrow(async () => Resource.save({ id: resource.id, share_id: '123456789', title: 'cannot do this!' }), ErrorCode.IsReadOnly);
|
await expectThrow(async () => Resource.save({ id: resource.id, share_id: '123456789', title: 'cannot do this!' }), ErrorCode.IsReadOnly);
|
||||||
|
@ -15,6 +15,7 @@ import JoplinError from '../JoplinError';
|
|||||||
import itemCanBeEncrypted from './utils/itemCanBeEncrypted';
|
import itemCanBeEncrypted from './utils/itemCanBeEncrypted';
|
||||||
import { getEncryptionEnabled } from '../services/synchronizer/syncInfoUtils';
|
import { getEncryptionEnabled } from '../services/synchronizer/syncInfoUtils';
|
||||||
import ShareService from '../services/share/ShareService';
|
import ShareService from '../services/share/ShareService';
|
||||||
|
import { SaveOptions } from './utils/types';
|
||||||
|
|
||||||
export default class Resource extends BaseItem {
|
export default class Resource extends BaseItem {
|
||||||
|
|
||||||
@ -372,9 +373,15 @@ export default class Resource extends BaseItem {
|
|||||||
// We first save the resource metadata because this can throw, for
|
// We first save the resource metadata because this can throw, for
|
||||||
// example if modifying a resource that is read-only
|
// example if modifying a resource that is read-only
|
||||||
|
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
const result = await Resource.save({
|
const result = await Resource.save({
|
||||||
id: resource.id,
|
id: resource.id,
|
||||||
size: fileStat.size,
|
size: fileStat.size,
|
||||||
|
updated_time: now,
|
||||||
|
blob_updated_time: now,
|
||||||
|
}, {
|
||||||
|
autoTimestamp: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
// If the above call has succeeded, we save the data blob
|
// If the above call has succeeded, we save the data blob
|
||||||
@ -442,10 +449,18 @@ export default class Resource extends BaseItem {
|
|||||||
}, { changeSource: ItemChange.SOURCE_SYNC });
|
}, { changeSource: ItemChange.SOURCE_SYNC });
|
||||||
}
|
}
|
||||||
|
|
||||||
// public static async save(o: ResourceEntity, options: SaveOptions = null): Promise<ResourceEntity> {
|
public static async save(o: ResourceEntity, options: SaveOptions = null): Promise<ResourceEntity> {
|
||||||
// const resource:ResourceEntity = await super.save(o, options);
|
const resource = { ...o };
|
||||||
// if (resource.updated_time) resource.bl
|
|
||||||
// return resource;
|
if (this.isNew(o, options)) {
|
||||||
// }
|
const now = Date.now();
|
||||||
|
options = { ...options, autoTimestamp: false };
|
||||||
|
if (!resource.created_time) resource.created_time = now;
|
||||||
|
if (!resource.updated_time) resource.updated_time = now;
|
||||||
|
if (!resource.blob_updated_time) resource.blob_updated_time = now;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await super.save(resource, options);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -256,6 +256,7 @@ export interface ResourceLocalStateEntity {
|
|||||||
'type_'?: number;
|
'type_'?: number;
|
||||||
}
|
}
|
||||||
export interface ResourceEntity {
|
export interface ResourceEntity {
|
||||||
|
'blob_updated_time'?: number;
|
||||||
'created_time'?: number;
|
'created_time'?: number;
|
||||||
'encryption_applied'?: number;
|
'encryption_applied'?: number;
|
||||||
'encryption_blob_encrypted'?: number;
|
'encryption_blob_encrypted'?: number;
|
||||||
@ -467,6 +468,7 @@ export const databaseSchema: DatabaseTables = {
|
|||||||
type_: { type: 'number' },
|
type_: { type: 'number' },
|
||||||
},
|
},
|
||||||
resources: {
|
resources: {
|
||||||
|
blob_updated_time: { type: 'number' },
|
||||||
created_time: { type: 'number' },
|
created_time: { type: 'number' },
|
||||||
encryption_applied: { type: 'number' },
|
encryption_applied: { type: 'number' },
|
||||||
encryption_blob_encrypted: { type: 'number' },
|
encryption_blob_encrypted: { type: 'number' },
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
import time from '../../time';
|
import time from '../../time';
|
||||||
import shim from '../../shim';
|
import shim from '../../shim';
|
||||||
import Setting from '../../models/Setting';
|
import Setting from '../../models/Setting';
|
||||||
import { NoteEntity } from '../../services/database/types';
|
import { NoteEntity, ResourceEntity } from '../../services/database/types';
|
||||||
import { remoteNotesFoldersResources, remoteResources } from '../../testing/test-utils-synchronizer';
|
import { remoteNotesFoldersResources, remoteResources } from '../../testing/test-utils-synchronizer';
|
||||||
import { synchronizerStart, tempFilePath, resourceFetcher, supportDir, setupDatabaseAndSynchronizer, synchronizer, fileApi, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, checkThrowAsync } from '../../testing/test-utils';
|
import { synchronizerStart, tempFilePath, resourceFetcher, supportDir, setupDatabaseAndSynchronizer, synchronizer, fileApi, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, checkThrowAsync, msleep } from '../../testing/test-utils';
|
||||||
import Folder from '../../models/Folder';
|
import Folder from '../../models/Folder';
|
||||||
import Note from '../../models/Note';
|
import Note from '../../models/Note';
|
||||||
import Resource from '../../models/Resource';
|
import Resource from '../../models/Resource';
|
||||||
@ -27,7 +27,7 @@ describe('Synchronizer.resources', () => {
|
|||||||
insideBeforeEach = false;
|
insideBeforeEach = false;
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should sync resources', (async () => {
|
it('should sync resources', async () => {
|
||||||
while (insideBeforeEach) await time.msleep(500);
|
while (insideBeforeEach) await time.msleep(500);
|
||||||
|
|
||||||
const folder1 = await Folder.save({ title: 'folder1' });
|
const folder1 = await Folder.save({ title: 'folder1' });
|
||||||
@ -58,9 +58,9 @@ describe('Synchronizer.resources', () => {
|
|||||||
|
|
||||||
const resourcePath1_2 = Resource.fullPath(resource1_2);
|
const resourcePath1_2 = Resource.fullPath(resource1_2);
|
||||||
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
|
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should handle resource download errors', (async () => {
|
it('should handle resource download errors', async () => {
|
||||||
while (insideBeforeEach) await time.msleep(500);
|
while (insideBeforeEach) await time.msleep(500);
|
||||||
|
|
||||||
const folder1 = await Folder.save({ title: 'folder1' });
|
const folder1 = await Folder.save({ title: 'folder1' });
|
||||||
@ -87,9 +87,9 @@ describe('Synchronizer.resources', () => {
|
|||||||
const ls = await Resource.localState(resource1);
|
const ls = await Resource.localState(resource1);
|
||||||
expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_ERROR);
|
expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_ERROR);
|
||||||
expect(ls.fetch_error).toBe('did not work');
|
expect(ls.fetch_error).toBe('did not work');
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should set the resource file size if it is missing', (async () => {
|
it('should set the resource file size if it is missing', async () => {
|
||||||
while (insideBeforeEach) await time.msleep(500);
|
while (insideBeforeEach) await time.msleep(500);
|
||||||
|
|
||||||
const folder1 = await Folder.save({ title: 'folder1' });
|
const folder1 = await Folder.save({ title: 'folder1' });
|
||||||
@ -110,9 +110,9 @@ describe('Synchronizer.resources', () => {
|
|||||||
await fetcher.waitForAllFinished();
|
await fetcher.waitForAllFinished();
|
||||||
r1 = await Resource.load(r1.id);
|
r1 = await Resource.load(r1.id);
|
||||||
expect(r1.size).toBe(2720);
|
expect(r1.size).toBe(2720);
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should delete resources', (async () => {
|
it('should delete resources', async () => {
|
||||||
while (insideBeforeEach) await time.msleep(500);
|
while (insideBeforeEach) await time.msleep(500);
|
||||||
|
|
||||||
const folder1 = await Folder.save({ title: 'folder1' });
|
const folder1 = await Folder.save({ title: 'folder1' });
|
||||||
@ -142,9 +142,9 @@ describe('Synchronizer.resources', () => {
|
|||||||
allResources = await Resource.all();
|
allResources = await Resource.all();
|
||||||
expect(allResources.length).toBe(0);
|
expect(allResources.length).toBe(0);
|
||||||
expect(await shim.fsDriver().exists(resourcePath1)).toBe(false);
|
expect(await shim.fsDriver().exists(resourcePath1)).toBe(false);
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should encrypt resources', (async () => {
|
it('should encrypt resources', async () => {
|
||||||
setEncryptionEnabled(true);
|
setEncryptionEnabled(true);
|
||||||
const masterKey = await loadEncryptionMasterKey();
|
const masterKey = await loadEncryptionMasterKey();
|
||||||
|
|
||||||
@ -170,9 +170,9 @@ describe('Synchronizer.resources', () => {
|
|||||||
const resourcePath1_2 = Resource.fullPath(resource1_2);
|
const resourcePath1_2 = Resource.fullPath(resource1_2);
|
||||||
|
|
||||||
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
|
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should sync resource blob changes', (async () => {
|
it('should sync resource blob changes', async () => {
|
||||||
const tempFile = tempFilePath('txt');
|
const tempFile = tempFilePath('txt');
|
||||||
await shim.fsDriver().writeFile(tempFile, '1234', 'utf8');
|
await shim.fsDriver().writeFile(tempFile, '1234', 'utf8');
|
||||||
const folder1 = await Folder.save({ title: 'folder1' });
|
const folder1 = await Folder.save({ title: 'folder1' });
|
||||||
@ -204,9 +204,9 @@ describe('Synchronizer.resources', () => {
|
|||||||
const resource1_1 = (await Resource.all())[0];
|
const resource1_1 = (await Resource.all())[0];
|
||||||
expect(resource1_1.size).toBe(newSize);
|
expect(resource1_1.size).toBe(newSize);
|
||||||
expect(await Resource.resourceBlobContent(resource1_1.id, 'utf8')).toBe('1234 MOD');
|
expect(await Resource.resourceBlobContent(resource1_1.id, 'utf8')).toBe('1234 MOD');
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should handle resource conflicts', (async () => {
|
it('should handle resource conflicts', async () => {
|
||||||
{
|
{
|
||||||
const tempFile = tempFilePath('txt');
|
const tempFile = tempFilePath('txt');
|
||||||
await shim.fsDriver().writeFile(tempFile, '1234', 'utf8');
|
await shim.fsDriver().writeFile(tempFile, '1234', 'utf8');
|
||||||
@ -271,9 +271,9 @@ describe('Synchronizer.resources', () => {
|
|||||||
expect(resourceConflictFolder).toBeTruthy();
|
expect(resourceConflictFolder).toBeTruthy();
|
||||||
expect(resourceConflictFolder.parent_id).toBeFalsy();
|
expect(resourceConflictFolder.parent_id).toBeFalsy();
|
||||||
}
|
}
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should handle resource conflicts if a resource is changed locally but deleted remotely', (async () => {
|
it('should handle resource conflicts if a resource is changed locally but deleted remotely', async () => {
|
||||||
{
|
{
|
||||||
const tempFile = tempFilePath('txt');
|
const tempFile = tempFilePath('txt');
|
||||||
await shim.fsDriver().writeFile(tempFile, '1234', 'utf8');
|
await shim.fsDriver().writeFile(tempFile, '1234', 'utf8');
|
||||||
@ -316,9 +316,9 @@ describe('Synchronizer.resources', () => {
|
|||||||
expect(originalResource.id).not.toBe(conflictResource.id);
|
expect(originalResource.id).not.toBe(conflictResource.id);
|
||||||
expect(conflictResource.title).toBe('modified resource');
|
expect(conflictResource.title).toBe('modified resource');
|
||||||
}
|
}
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should not upload a resource if it has not been fetched yet', (async () => {
|
it('should not upload a resource if it has not been fetched yet', async () => {
|
||||||
// In some rare cases, the synchronizer might try to upload a resource even though it
|
// In some rare cases, the synchronizer might try to upload a resource even though it
|
||||||
// doesn't have the resource file. It can happen in this situation:
|
// doesn't have the resource file. It can happen in this situation:
|
||||||
// - C1 create resource
|
// - C1 create resource
|
||||||
@ -350,9 +350,9 @@ describe('Synchronizer.resources', () => {
|
|||||||
await BaseItem.saveSyncEnabled(ModelType.Resource, resource.id);
|
await BaseItem.saveSyncEnabled(ModelType.Resource, resource.id);
|
||||||
await synchronizerStart();
|
await synchronizerStart();
|
||||||
expect((await remoteResources()).length).toBe(1);
|
expect((await remoteResources()).length).toBe(1);
|
||||||
}));
|
});
|
||||||
|
|
||||||
it('should not download resources over the limit', (async () => {
|
it('should not download resources over the limit', async () => {
|
||||||
const note1 = await Note.save({ title: 'note' });
|
const note1 = await Note.save({ title: 'note' });
|
||||||
await shim.attachFileToNote(note1, `${supportDir}/photo.jpg`);
|
await shim.attachFileToNote(note1, `${supportDir}/photo.jpg`);
|
||||||
await synchronizer().start();
|
await synchronizer().start();
|
||||||
@ -368,6 +368,53 @@ describe('Synchronizer.resources', () => {
|
|||||||
expect(syncItems.length).toBe(2);
|
expect(syncItems.length).toBe(2);
|
||||||
expect(syncItems[1].item_location).toBe(BaseItem.SYNC_ITEM_LOCATION_REMOTE);
|
expect(syncItems[1].item_location).toBe(BaseItem.SYNC_ITEM_LOCATION_REMOTE);
|
||||||
expect(syncItems[1].sync_disabled).toBe(1);
|
expect(syncItems[1].sync_disabled).toBe(1);
|
||||||
}));
|
});
|
||||||
|
|
||||||
|
it('should not upload blob if it has not changed', async () => {
|
||||||
|
const note = await Note.save({});
|
||||||
|
await shim.attachFileToNote(note, `${supportDir}/sample.txt`);
|
||||||
|
const resource: ResourceEntity = (await Resource.all())[0];
|
||||||
|
const resourcePath = `.resource/${resource.id}`;
|
||||||
|
|
||||||
|
await synchronizer().api().put(resourcePath, 'before upload');
|
||||||
|
expect(await synchronizer().api().get(resourcePath)).toBe('before upload');
|
||||||
|
await synchronizerStart();
|
||||||
|
expect(await synchronizer().api().get(resourcePath)).toBe('just testing');
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------
|
||||||
|
// Change metadata only and check that blob is not uploaded. To do this,
|
||||||
|
// we manually overwrite the data on the sync target, then sync. If the
|
||||||
|
// synchronizer doesn't upload the blob, this manually changed data
|
||||||
|
// should remain.
|
||||||
|
// ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
await Resource.save({ id: resource.id, title: 'my new title' });
|
||||||
|
await synchronizer().api().put(resourcePath, 'check if changed');
|
||||||
|
await synchronizerStart();
|
||||||
|
expect(await synchronizer().api().get(resourcePath)).toBe('check if changed');
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------
|
||||||
|
// Now change the blob, and check that the remote item has been
|
||||||
|
// overwritten.
|
||||||
|
// ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
await Resource.updateResourceBlobContent(resource.id, `${supportDir}/sample.txt`);
|
||||||
|
await synchronizerStart();
|
||||||
|
expect(await synchronizer().api().get(resourcePath)).toBe('just testing');
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------
|
||||||
|
// Change the blob, then change the metadata, and sync. Even though
|
||||||
|
// blob_updated_time is earlier than updated_time, it should still
|
||||||
|
// update everything on the sync target, because both times are after
|
||||||
|
// the item sync_time.
|
||||||
|
// ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
await Resource.updateResourceBlobContent(resource.id, `${supportDir}/sample2.txt`);
|
||||||
|
await msleep(1);
|
||||||
|
await Resource.save({ id: resource.id, title: 'my new title 2' });
|
||||||
|
await synchronizerStart();
|
||||||
|
expect(await synchronizer().api().get(resourcePath)).toBe('just testing 2');
|
||||||
|
expect(await synchronizer().api().get(`${resource.id}.md`)).toContain('my new title 2');
|
||||||
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
Loading…
Reference in New Issue
Block a user