mirror of
https://github.com/laurent22/joplin.git
synced 2024-12-12 08:54:00 +02:00
358 lines
13 KiB
TypeScript
358 lines
13 KiB
TypeScript
import time from '@joplin/lib/time';
|
|
import shim from '@joplin/lib/shim';
|
|
import Setting from '@joplin/lib/models/Setting';
|
|
import { NoteEntity } from '@joplin/lib/services/database/types';
|
|
import { remoteNotesFoldersResources, remoteResources } from './test-utils-synchronizer';
|
|
|
|
const { synchronizerStart, tempFilePath, resourceFetcher, setupDatabaseAndSynchronizer, synchronizer, fileApi, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, checkThrowAsync } = require('./test-utils.js');
|
|
import Folder from '@joplin/lib/models/Folder';
|
|
import Note from '@joplin/lib/models/Note';
|
|
import Resource from '@joplin/lib/models/Resource';
|
|
import ResourceFetcher from '@joplin/lib/services/ResourceFetcher';
|
|
import BaseItem from '@joplin/lib/models/BaseItem';
|
|
|
|
let insideBeforeEach = false;
|
|
|
|
describe('Synchronizer.resources', function() {
|
|
|
|
beforeEach(async (done) => {
|
|
insideBeforeEach = true;
|
|
|
|
await setupDatabaseAndSynchronizer(1);
|
|
await setupDatabaseAndSynchronizer(2);
|
|
await switchClient(1);
|
|
done();
|
|
|
|
insideBeforeEach = false;
|
|
});
|
|
|
|
it('should sync resources', (async () => {
|
|
while (insideBeforeEach) await time.msleep(500);
|
|
|
|
const folder1 = await Folder.save({ title: 'folder1' });
|
|
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
|
const resource1 = (await Resource.all())[0];
|
|
const resourcePath1 = Resource.fullPath(resource1);
|
|
await synchronizerStart();
|
|
expect((await remoteNotesFoldersResources()).length).toBe(3);
|
|
|
|
await switchClient(2);
|
|
|
|
await synchronizerStart();
|
|
const allResources = await Resource.all();
|
|
expect(allResources.length).toBe(1);
|
|
let resource1_2 = allResources[0];
|
|
let ls = await Resource.localState(resource1_2);
|
|
expect(resource1_2.id).toBe(resource1.id);
|
|
expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_IDLE);
|
|
|
|
const fetcher = new ResourceFetcher(() => { return synchronizer().api(); });
|
|
fetcher.queueDownload_(resource1_2.id);
|
|
await fetcher.waitForAllFinished();
|
|
|
|
resource1_2 = await Resource.load(resource1.id);
|
|
ls = await Resource.localState(resource1_2);
|
|
expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_DONE);
|
|
|
|
const resourcePath1_2 = Resource.fullPath(resource1_2);
|
|
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
|
|
}));
|
|
|
|
it('should handle resource download errors', (async () => {
|
|
while (insideBeforeEach) await time.msleep(500);
|
|
|
|
const folder1 = await Folder.save({ title: 'folder1' });
|
|
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
|
let resource1 = (await Resource.all())[0];
|
|
await synchronizerStart();
|
|
|
|
await switchClient(2);
|
|
|
|
await synchronizerStart();
|
|
|
|
const fetcher = new ResourceFetcher(() => {
|
|
return {
|
|
// Simulate a failed download
|
|
get: () => { return new Promise((_resolve: Function, reject: Function) => { reject(new Error('did not work')); }); },
|
|
};
|
|
});
|
|
fetcher.queueDownload_(resource1.id);
|
|
await fetcher.waitForAllFinished();
|
|
|
|
resource1 = await Resource.load(resource1.id);
|
|
const ls = await Resource.localState(resource1);
|
|
expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_ERROR);
|
|
expect(ls.fetch_error).toBe('did not work');
|
|
}));
|
|
|
|
it('should set the resource file size if it is missing', (async () => {
|
|
while (insideBeforeEach) await time.msleep(500);
|
|
|
|
const folder1 = await Folder.save({ title: 'folder1' });
|
|
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
|
await synchronizerStart();
|
|
|
|
await switchClient(2);
|
|
|
|
await synchronizerStart();
|
|
let r1 = (await Resource.all())[0];
|
|
await Resource.setFileSizeOnly(r1.id, -1);
|
|
r1 = await Resource.load(r1.id);
|
|
expect(r1.size).toBe(-1);
|
|
|
|
const fetcher = new ResourceFetcher(() => { return synchronizer().api(); });
|
|
fetcher.queueDownload_(r1.id);
|
|
await fetcher.waitForAllFinished();
|
|
r1 = await Resource.load(r1.id);
|
|
expect(r1.size).toBe(2720);
|
|
}));
|
|
|
|
it('should delete resources', (async () => {
|
|
while (insideBeforeEach) await time.msleep(500);
|
|
|
|
const folder1 = await Folder.save({ title: 'folder1' });
|
|
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
|
const resource1 = (await Resource.all())[0];
|
|
const resourcePath1 = Resource.fullPath(resource1);
|
|
await synchronizerStart();
|
|
|
|
await switchClient(2);
|
|
|
|
await synchronizerStart();
|
|
let allResources = await Resource.all();
|
|
expect(allResources.length).toBe(1);
|
|
expect((await remoteNotesFoldersResources()).length).toBe(3);
|
|
await Resource.delete(resource1.id);
|
|
await synchronizerStart();
|
|
expect((await remoteNotesFoldersResources()).length).toBe(2);
|
|
|
|
const remoteBlob = await fileApi().stat(`.resource/${resource1.id}`);
|
|
expect(!remoteBlob).toBe(true);
|
|
|
|
await switchClient(1);
|
|
|
|
expect(await shim.fsDriver().exists(resourcePath1)).toBe(true);
|
|
await synchronizerStart();
|
|
allResources = await Resource.all();
|
|
expect(allResources.length).toBe(0);
|
|
expect(await shim.fsDriver().exists(resourcePath1)).toBe(false);
|
|
}));
|
|
|
|
it('should encrypt resources', (async () => {
|
|
Setting.setValue('encryption.enabled', true);
|
|
const masterKey = await loadEncryptionMasterKey();
|
|
|
|
const folder1 = await Folder.save({ title: 'folder1' });
|
|
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
|
const resource1 = (await Resource.all())[0];
|
|
const resourcePath1 = Resource.fullPath(resource1);
|
|
await synchronizerStart();
|
|
|
|
await switchClient(2);
|
|
|
|
await synchronizerStart();
|
|
Setting.setObjectValue('encryption.passwordCache', masterKey.id, '123456');
|
|
await encryptionService().loadMasterKeysFromSettings();
|
|
|
|
const fetcher = new ResourceFetcher(() => { return synchronizer().api(); });
|
|
fetcher.queueDownload_(resource1.id);
|
|
await fetcher.waitForAllFinished();
|
|
|
|
let resource1_2 = (await Resource.all())[0];
|
|
resource1_2 = await Resource.decrypt(resource1_2);
|
|
const resourcePath1_2 = Resource.fullPath(resource1_2);
|
|
|
|
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
|
|
}));
|
|
|
|
it('should sync resource blob changes', (async () => {
|
|
const tempFile = tempFilePath('txt');
|
|
await shim.fsDriver().writeFile(tempFile, '1234', 'utf8');
|
|
const folder1 = await Folder.save({ title: 'folder1' });
|
|
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
await shim.attachFileToNote(note1, tempFile);
|
|
await synchronizerStart();
|
|
|
|
await switchClient(2);
|
|
|
|
await synchronizerStart();
|
|
await resourceFetcher().start();
|
|
await resourceFetcher().waitForAllFinished();
|
|
let resource1_2 = (await Resource.all())[0];
|
|
const modFile = tempFilePath('txt');
|
|
await shim.fsDriver().writeFile(modFile, '1234 MOD', 'utf8');
|
|
await Resource.updateResourceBlobContent(resource1_2.id, modFile);
|
|
const originalSize = resource1_2.size;
|
|
resource1_2 = (await Resource.all())[0];
|
|
const newSize = resource1_2.size;
|
|
expect(originalSize).toBe(4);
|
|
expect(newSize).toBe(8);
|
|
await synchronizerStart();
|
|
|
|
await switchClient(1);
|
|
|
|
await synchronizerStart();
|
|
await resourceFetcher().start();
|
|
await resourceFetcher().waitForAllFinished();
|
|
const resource1_1 = (await Resource.all())[0];
|
|
expect(resource1_1.size).toBe(newSize);
|
|
expect(await Resource.resourceBlobContent(resource1_1.id, 'utf8')).toBe('1234 MOD');
|
|
}));
|
|
|
|
it('should handle resource conflicts', (async () => {
|
|
{
|
|
const tempFile = tempFilePath('txt');
|
|
await shim.fsDriver().writeFile(tempFile, '1234', 'utf8');
|
|
const folder1 = await Folder.save({ title: 'folder1' });
|
|
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
await shim.attachFileToNote(note1, tempFile);
|
|
await synchronizerStart();
|
|
}
|
|
|
|
await switchClient(2);
|
|
|
|
{
|
|
await synchronizerStart();
|
|
await resourceFetcher().start();
|
|
await resourceFetcher().waitForAllFinished();
|
|
const resource = (await Resource.all())[0];
|
|
const modFile2 = tempFilePath('txt');
|
|
await shim.fsDriver().writeFile(modFile2, '1234 MOD 2', 'utf8');
|
|
await Resource.updateResourceBlobContent(resource.id, modFile2);
|
|
await synchronizerStart();
|
|
}
|
|
|
|
await switchClient(1);
|
|
|
|
{
|
|
// Going to modify a resource without syncing first, which will cause a conflict
|
|
const resource = (await Resource.all())[0];
|
|
const modFile1 = tempFilePath('txt');
|
|
await shim.fsDriver().writeFile(modFile1, '1234 MOD 1', 'utf8');
|
|
await Resource.updateResourceBlobContent(resource.id, modFile1);
|
|
await synchronizerStart(); // CONFLICT
|
|
|
|
// If we try to read the resource content now, it should throw because the local
|
|
// content has been moved to the conflict notebook, and the new local content
|
|
// has not been downloaded yet.
|
|
await checkThrowAsync(async () => await Resource.resourceBlobContent(resource.id));
|
|
|
|
// Now download resources, and our local content would have been overwritten by
|
|
// the content from client 2
|
|
await resourceFetcher().start();
|
|
await resourceFetcher().waitForAllFinished();
|
|
const localContent = await Resource.resourceBlobContent(resource.id, 'utf8');
|
|
expect(localContent).toBe('1234 MOD 2');
|
|
|
|
// Check that the Conflict note has been generated, with the conflict resource
|
|
// attached to it, and check that it has the original content.
|
|
const allNotes = await Note.all();
|
|
expect(allNotes.length).toBe(2);
|
|
const conflictNote = allNotes.find((v: NoteEntity) => {
|
|
return !!v.is_conflict;
|
|
});
|
|
expect(!!conflictNote).toBe(true);
|
|
const resourceIds = await Note.linkedResourceIds(conflictNote.body);
|
|
expect(resourceIds.length).toBe(1);
|
|
const conflictContent = await Resource.resourceBlobContent(resourceIds[0], 'utf8');
|
|
expect(conflictContent).toBe('1234 MOD 1');
|
|
}
|
|
}));
|
|
|
|
it('should handle resource conflicts if a resource is changed locally but deleted remotely', (async () => {
|
|
{
|
|
const tempFile = tempFilePath('txt');
|
|
await shim.fsDriver().writeFile(tempFile, '1234', 'utf8');
|
|
const folder1 = await Folder.save({ title: 'folder1' });
|
|
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
await shim.attachFileToNote(note1, tempFile);
|
|
await synchronizerStart();
|
|
}
|
|
|
|
await switchClient(2);
|
|
|
|
{
|
|
await synchronizerStart();
|
|
await resourceFetcher().start();
|
|
await resourceFetcher().waitForAllFinished();
|
|
}
|
|
|
|
await switchClient(1);
|
|
|
|
{
|
|
const resource = (await Resource.all())[0];
|
|
await Resource.delete(resource.id);
|
|
await synchronizerStart();
|
|
|
|
}
|
|
|
|
await switchClient(2);
|
|
|
|
{
|
|
const originalResource = (await Resource.all())[0];
|
|
await Resource.save({ id: originalResource.id, title: 'modified resource' });
|
|
await synchronizerStart(); // CONFLICT
|
|
|
|
const deletedResource = await Resource.load(originalResource.id);
|
|
expect(!deletedResource).toBe(true);
|
|
|
|
const allResources = await Resource.all();
|
|
expect(allResources.length).toBe(1);
|
|
const conflictResource = allResources[0];
|
|
expect(originalResource.id).not.toBe(conflictResource.id);
|
|
expect(conflictResource.title).toBe('modified resource');
|
|
}
|
|
}));
|
|
|
|
it('should not upload a resource if it has not been fetched yet', (async () => {
|
|
// In some rare cases, the synchronizer might try to upload a resource even though it
|
|
// doesn't have the resource file. It can happen in this situation:
|
|
// - C1 create resource
|
|
// - C1 sync
|
|
// - C2 sync
|
|
// - C2 resource metadata is received but ResourceFetcher hasn't downloaded the file yet
|
|
// - C2 enables E2EE - all the items are marked for forced sync
|
|
// - C2 sync
|
|
// The synchronizer will try to upload the resource, even though it doesn't have the file,
|
|
// so we need to make sure it doesn't. But also that once it gets the file, the resource
|
|
// does get uploaded.
|
|
|
|
const note1 = await Note.save({ title: 'note' });
|
|
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
|
const resource = (await Resource.all())[0];
|
|
await Resource.setLocalState(resource.id, { fetch_status: Resource.FETCH_STATUS_IDLE });
|
|
await synchronizerStart();
|
|
|
|
expect((await remoteResources()).length).toBe(0);
|
|
|
|
await Resource.setLocalState(resource.id, { fetch_status: Resource.FETCH_STATUS_DONE });
|
|
await synchronizerStart();
|
|
|
|
expect((await remoteResources()).length).toBe(1);
|
|
}));
|
|
|
|
it('should not download resources over the limit', (async () => {
|
|
const note1 = await Note.save({ title: 'note' });
|
|
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
|
await synchronizer().start();
|
|
|
|
await switchClient(2);
|
|
|
|
const previousMax = synchronizer().maxResourceSize_;
|
|
synchronizer().maxResourceSize_ = 1;
|
|
await synchronizerStart();
|
|
synchronizer().maxResourceSize_ = previousMax;
|
|
|
|
const syncItems = await BaseItem.allSyncItems(syncTargetId());
|
|
expect(syncItems.length).toBe(2);
|
|
expect(syncItems[1].item_location).toBe(BaseItem.SYNC_ITEM_LOCATION_REMOTE);
|
|
expect(syncItems[1].sync_disabled).toBe(1);
|
|
}));
|
|
|
|
});
|