2018-03-09 22:59:12 +02:00
|
|
|
require('app-module-path').addPath(__dirname);
|
|
|
|
|
|
|
|
const { time } = require('lib/time-utils.js');
|
2019-05-06 22:35:29 +02:00
|
|
|
const { setupDatabase, allSyncTargetItemsEncrypted, revisionService, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker, checkThrowAsync, asyncTest } = require('test-utils.js');
|
2018-03-09 22:59:12 +02:00
|
|
|
const { shim } = require('lib/shim.js');
|
|
|
|
const fs = require('fs-extra');
|
|
|
|
const Folder = require('lib/models/Folder.js');
|
|
|
|
const Note = require('lib/models/Note.js');
|
|
|
|
const Resource = require('lib/models/Resource.js');
|
2018-10-08 08:36:45 +02:00
|
|
|
const ResourceFetcher = require('lib/services/ResourceFetcher');
|
2018-03-09 22:59:12 +02:00
|
|
|
const Tag = require('lib/models/Tag.js');
|
|
|
|
const { Database } = require('lib/database.js');
|
|
|
|
const Setting = require('lib/models/Setting.js');
|
|
|
|
const MasterKey = require('lib/models/MasterKey');
|
|
|
|
const BaseItem = require('lib/models/BaseItem.js');
|
2019-05-06 22:35:29 +02:00
|
|
|
const Revision = require('lib/models/Revision.js');
|
2018-03-09 22:59:12 +02:00
|
|
|
const BaseModel = require('lib/BaseModel.js');
|
|
|
|
const SyncTargetRegistry = require('lib/SyncTargetRegistry.js');
|
2019-02-05 19:39:10 +02:00
|
|
|
const WelcomeUtils = require('lib/WelcomeUtils');
|
2018-03-09 22:59:12 +02:00
|
|
|
|
|
|
|
process.on('unhandledRejection', (reason, p) => {
|
|
|
|
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
|
2017-06-19 00:06:10 +02:00
|
|
|
});
|
|
|
|
|
2018-02-15 20:33:08 +02:00
|
|
|
jasmine.DEFAULT_TIMEOUT_INTERVAL = 60000 + 30000; // The first test is slow because the database needs to be built
|
2017-06-29 22:52:52 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
async function allNotesFolders() {
|
2017-06-25 17:17:40 +02:00
|
|
|
let folders = await Folder.all();
|
|
|
|
let notes = await Note.all();
|
|
|
|
return folders.concat(notes);
|
|
|
|
}
|
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
async function remoteItemsByTypes(types) {
|
|
|
|
const list = await fileApi().list();
|
|
|
|
if (list.has_more) throw new Error('Not implemented!!!');
|
|
|
|
const files = list.items;
|
|
|
|
|
|
|
|
const output = [];
|
|
|
|
for (const file of files) {
|
|
|
|
const remoteContent = await fileApi().get(file.path);
|
|
|
|
const content = await BaseItem.unserialize(remoteContent);
|
|
|
|
if (types.indexOf(content.type_) < 0) continue;
|
|
|
|
output.push(content);
|
|
|
|
}
|
|
|
|
return output;
|
|
|
|
}
|
|
|
|
|
|
|
|
async function remoteNotesAndFolders() {
|
|
|
|
return remoteItemsByTypes([BaseModel.TYPE_NOTE, BaseModel.TYPE_FOLDER]);
|
|
|
|
}
|
|
|
|
|
|
|
|
async function remoteNotesFoldersResources() {
|
|
|
|
return remoteItemsByTypes([BaseModel.TYPE_NOTE, BaseModel.TYPE_FOLDER, BaseModel.TYPE_RESOURCE]);
|
|
|
|
}
|
|
|
|
|
|
|
|
async function localNotesFoldersSameAsRemote(locals, expect) {
|
2018-02-12 20:15:22 +02:00
|
|
|
let error = null;
|
2017-06-18 22:19:13 +02:00
|
|
|
try {
|
2019-05-06 22:35:29 +02:00
|
|
|
const nf = await remoteNotesAndFolders();
|
|
|
|
expect(locals.length).toBe(nf.length);
|
2017-06-19 00:06:10 +02:00
|
|
|
|
2017-06-18 22:19:13 +02:00
|
|
|
for (let i = 0; i < locals.length; i++) {
|
|
|
|
let dbItem = locals[i];
|
|
|
|
let path = BaseItem.systemPath(dbItem);
|
|
|
|
let remote = await fileApi().stat(path);
|
|
|
|
|
|
|
|
expect(!!remote).toBe(true);
|
2017-07-01 00:53:22 +02:00
|
|
|
if (!remote) continue;
|
|
|
|
|
2017-06-18 22:19:13 +02:00
|
|
|
let remoteContent = await fileApi().get(path);
|
2018-02-12 20:15:22 +02:00
|
|
|
|
2017-07-03 21:50:45 +02:00
|
|
|
remoteContent = dbItem.type_ == BaseModel.TYPE_NOTE ? await Note.unserialize(remoteContent) : await Folder.unserialize(remoteContent);
|
2017-06-18 22:19:13 +02:00
|
|
|
expect(remoteContent.title).toBe(dbItem.title);
|
|
|
|
}
|
2018-02-12 20:15:22 +02:00
|
|
|
} catch (e) {
|
|
|
|
error = e;
|
2017-06-18 22:19:13 +02:00
|
|
|
}
|
2018-02-12 20:15:22 +02:00
|
|
|
|
|
|
|
expect(error).toBe(null);
|
2017-06-18 22:19:13 +02:00
|
|
|
}
|
2017-06-15 01:14:15 +02:00
|
|
|
|
2017-12-20 21:45:25 +02:00
|
|
|
let insideBeforeEach = false;
|
|
|
|
|
2018-03-09 22:59:12 +02:00
|
|
|
describe('Synchronizer', function() {
|
|
|
|
|
|
|
|
beforeEach(async (done) => {
|
2017-12-20 21:45:25 +02:00
|
|
|
insideBeforeEach = true;
|
|
|
|
|
2017-06-18 22:19:13 +02:00
|
|
|
await setupDatabaseAndSynchronizer(1);
|
|
|
|
await setupDatabaseAndSynchronizer(2);
|
2017-06-20 21:18:19 +02:00
|
|
|
await switchClient(1);
|
2017-06-18 01:49:52 +02:00
|
|
|
done();
|
2017-12-20 21:45:25 +02:00
|
|
|
|
|
|
|
insideBeforeEach = false;
|
2017-06-14 00:39:45 +02:00
|
|
|
});
|
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should create remote items', asyncTest(async () => {
|
|
|
|
let folder = await Folder.save({ title: "folder1" });
|
|
|
|
await Note.save({ title: "un", parent_id: folder.id });
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
let all = await allNotesFolders();
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
await localNotesFoldersSameAsRemote(all, expect);
|
2019-02-23 17:53:14 +02:00
|
|
|
}));
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should update remote items', asyncTest(async () => {
|
|
|
|
let folder = await Folder.save({ title: "folder1" });
|
|
|
|
let note = await Note.save({ title: "un", parent_id: folder.id });
|
|
|
|
await synchronizer().start();
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await Note.save({ title: "un UPDATE", id: note.id });
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
let all = await allNotesFolders();
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
await localNotesFoldersSameAsRemote(all, expect);
|
2019-02-23 17:53:14 +02:00
|
|
|
}));
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should create local items', asyncTest(async () => {
|
|
|
|
let folder = await Folder.save({ title: "folder1" });
|
|
|
|
await Note.save({ title: "un", parent_id: folder.id });
|
|
|
|
await synchronizer().start();
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
let all = await allNotesFolders();
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
await localNotesFoldersSameAsRemote(all, expect);
|
2019-02-23 17:53:14 +02:00
|
|
|
}));
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should update local items', asyncTest(async () => {
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: "un", parent_id: folder1.id });
|
|
|
|
await synchronizer().start();
|
2018-05-20 14:33:26 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await sleep(0.1);
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let note2 = await Note.load(note1.id);
|
|
|
|
note2.title = "Updated on client 2";
|
|
|
|
await Note.save(note2);
|
|
|
|
note2 = await Note.load(note2.id);
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
let all = await allNotesFolders();
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
await localNotesFoldersSameAsRemote(all, expect);
|
2019-02-23 17:53:14 +02:00
|
|
|
}));
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should resolve note conflicts', asyncTest(async () => {
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: "un", parent_id: folder1.id });
|
|
|
|
await synchronizer().start();
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
let note2 = await Note.load(note1.id);
|
|
|
|
note2.title = "Updated on client 2";
|
|
|
|
await Note.save(note2);
|
|
|
|
note2 = await Note.load(note2.id);
|
|
|
|
await synchronizer().start();
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let note2conf = await Note.load(note1.id);
|
|
|
|
note2conf.title = "Updated on client 1";
|
|
|
|
await Note.save(note2conf);
|
|
|
|
note2conf = await Note.load(note1.id);
|
|
|
|
await synchronizer().start();
|
|
|
|
let conflictedNotes = await Note.conflictedNotes();
|
|
|
|
expect(conflictedNotes.length).toBe(1);
|
|
|
|
|
|
|
|
// Other than the id (since the conflicted note is a duplicate), and the is_conflict property
|
|
|
|
// the conflicted and original note must be the same in every way, to make sure no data has been lost.
|
|
|
|
let conflictedNote = conflictedNotes[0];
|
|
|
|
expect(conflictedNote.id == note2conf.id).toBe(false);
|
|
|
|
for (let n in conflictedNote) {
|
|
|
|
if (!conflictedNote.hasOwnProperty(n)) continue;
|
|
|
|
if (n == 'id' || n == 'is_conflict') continue;
|
|
|
|
expect(conflictedNote[n]).toBe(note2conf[n], 'Property: ' + n);
|
|
|
|
}
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let noteUpdatedFromRemote = await Note.load(note1.id);
|
|
|
|
for (let n in noteUpdatedFromRemote) {
|
|
|
|
if (!noteUpdatedFromRemote.hasOwnProperty(n)) continue;
|
|
|
|
expect(noteUpdatedFromRemote[n]).toBe(note2[n], 'Property: ' + n);
|
|
|
|
}
|
|
|
|
}));
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should resolve folders conflicts', asyncTest(async () => {
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: "un", parent_id: folder1.id });
|
|
|
|
await synchronizer().start();
|
2017-06-19 00:06:10 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2); // ----------------------------------
|
2017-06-14 00:39:45 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-14 00:39:45 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await sleep(0.1);
|
2017-06-19 00:06:10 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let folder1_modRemote = await Folder.load(folder1.id);
|
|
|
|
folder1_modRemote.title = "folder1 UPDATE CLIENT 2";
|
|
|
|
await Folder.save(folder1_modRemote);
|
|
|
|
folder1_modRemote = await Folder.load(folder1_modRemote.id);
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1); // ----------------------------------
|
2017-06-18 22:19:13 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await sleep(0.1);
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let folder1_modLocal = await Folder.load(folder1.id);
|
|
|
|
folder1_modLocal.title = "folder1 UPDATE CLIENT 1";
|
|
|
|
await Folder.save(folder1_modLocal);
|
|
|
|
folder1_modLocal = await Folder.load(folder1.id);
|
2017-06-19 21:18:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let folder1_final = await Folder.load(folder1.id);
|
|
|
|
expect(folder1_final.title).toBe(folder1_modRemote.title);
|
|
|
|
}));
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should delete remote notes', asyncTest(async () => {
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: "un", parent_id: folder1.id });
|
|
|
|
await synchronizer().start();
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await sleep(0.1);
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await Note.delete(note1.id);
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
const remotes = await remoteNotesAndFolders();
|
|
|
|
expect(remotes.length).toBe(1);
|
|
|
|
expect(remotes[0].id).toBe(folder1.id);
|
2018-01-15 20:10:14 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let deletedItems = await BaseItem.deletedItems(syncTargetId());
|
|
|
|
expect(deletedItems.length).toBe(0);
|
|
|
|
}));
|
2018-01-15 20:10:14 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should not created deleted_items entries for items deleted via sync', asyncTest(async () => {
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: "un", parent_id: folder1.id });
|
|
|
|
await synchronizer().start();
|
2018-01-15 20:10:14 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2018-01-15 20:10:14 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
await Folder.delete(folder1.id);
|
|
|
|
await synchronizer().start();
|
2018-01-15 20:10:14 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2018-02-18 23:52:07 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
let deletedItems = await BaseItem.deletedItems(syncTargetId());
|
|
|
|
expect(deletedItems.length).toBe(0);
|
|
|
|
}));
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should delete local notes', asyncTest(async () => {
|
|
|
|
// For these tests we pass the context around for each user. This is to make sure that the "deletedItemsProcessed"
|
|
|
|
// property of the basicDelta() function is cleared properly at the end of a sync operation. If it is not cleared
|
|
|
|
// it means items will no longer be deleted locally via sync.
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: "un", parent_id: folder1.id });
|
|
|
|
let note2 = await Note.save({ title: "deux", parent_id: folder1.id });
|
|
|
|
let context1 = await synchronizer().start();
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-12-04 01:06:02 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let context2 = await synchronizer().start();
|
|
|
|
await Note.delete(note1.id);
|
|
|
|
context2 = await synchronizer().start({ context: context2 });
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
context1 = await synchronizer().start({ context: context1 });
|
2019-05-06 22:35:29 +02:00
|
|
|
let items = await allNotesFolders();
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(items.length).toBe(2);
|
|
|
|
let deletedItems = await BaseItem.deletedItems(syncTargetId());
|
|
|
|
expect(deletedItems.length).toBe(0);
|
|
|
|
await Note.delete(note2.id);
|
|
|
|
context1 = await synchronizer().start({ context: context1 });
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should delete remote folder', asyncTest(async () => {
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let folder2 = await Folder.save({ title: "folder2" });
|
|
|
|
await synchronizer().start();
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await sleep(0.1);
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await Folder.delete(folder2.id);
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
let all = await allNotesFolders();
|
|
|
|
await localNotesFoldersSameAsRemote(all, expect);
|
2019-02-23 17:53:14 +02:00
|
|
|
}));
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should delete local folder', asyncTest(async () => {
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let folder2 = await Folder.save({ title: "folder2" });
|
|
|
|
let context1 = await synchronizer().start();
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let context2 = await synchronizer().start();
|
|
|
|
await Folder.delete(folder2.id);
|
|
|
|
await synchronizer().start({ context: context2 });
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start({ context: context1 });
|
2019-05-06 22:35:29 +02:00
|
|
|
let items = await allNotesFolders();
|
|
|
|
await localNotesFoldersSameAsRemote(items, expect);
|
2019-02-23 17:53:14 +02:00
|
|
|
}));
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should resolve conflict if remote folder has been deleted, but note has been added to folder locally', asyncTest(async () => {
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
await synchronizer().start();
|
2017-06-20 00:18:24 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-07-13 20:47:31 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
await Folder.delete(folder1.id);
|
|
|
|
await synchronizer().start();
|
2018-11-14 01:17:56 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2017-07-13 20:47:31 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let note = await Note.save({ title: "note1", parent_id: folder1.id });
|
|
|
|
await synchronizer().start();
|
2019-05-06 22:35:29 +02:00
|
|
|
let items = await allNotesFolders();
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(items.length).toBe(1);
|
|
|
|
expect(items[0].title).toBe('note1');
|
|
|
|
expect(items[0].is_conflict).toBe(1);
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should resolve conflict if note has been deleted remotely and locally', asyncTest(async () => {
|
|
|
|
let folder = await Folder.save({ title: "folder" });
|
|
|
|
let note = await Note.save({ title: "note", parent_id: folder.title });
|
|
|
|
await synchronizer().start();
|
2017-07-13 20:47:31 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-07-13 20:47:31 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
await Note.delete(note.id);
|
|
|
|
await synchronizer().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2017-07-13 20:47:31 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await Note.delete(note.id);
|
|
|
|
await synchronizer().start();
|
2017-07-13 20:47:31 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
let items = await allNotesFolders();
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(items.length).toBe(1);
|
|
|
|
expect(items[0].title).toBe('folder');
|
2017-07-13 20:47:31 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
await localNotesFoldersSameAsRemote(items, expect);
|
2019-02-23 17:53:14 +02:00
|
|
|
}));
|
2017-07-13 20:47:31 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should cross delete all folders', asyncTest(async () => {
|
|
|
|
// If client1 and 2 have two folders, client 1 deletes item 1 and client
|
|
|
|
// 2 deletes item 2, they should both end up with no items after sync.
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let folder2 = await Folder.save({ title: "folder2" });
|
|
|
|
await synchronizer().start();
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await sleep(0.1);
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await Folder.delete(folder1.id);
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await Folder.delete(folder2.id);
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
let items2 = await allNotesFolders();
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
let items1 = await allNotesFolders();
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(items1.length).toBe(0);
|
|
|
|
expect(items1.length).toBe(items2.length);
|
|
|
|
}));
|
2017-07-01 17:17:49 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should handle conflict when remote note is deleted then local note is modified', asyncTest(async () => {
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: "un", parent_id: folder1.id });
|
|
|
|
await synchronizer().start();
|
2017-06-20 22:31:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-06-20 22:31:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-20 22:31:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await sleep(0.1);
|
2017-06-20 22:31:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await Note.delete(note1.id);
|
2017-06-20 22:31:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-20 22:31:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2017-06-20 22:31:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let newTitle = 'Modified after having been deleted';
|
|
|
|
await Note.save({ id: note1.id, title: newTitle });
|
2017-06-20 22:31:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-20 22:31:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let conflictedNotes = await Note.conflictedNotes();
|
2017-06-20 22:31:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(conflictedNotes.length).toBe(1);
|
|
|
|
expect(conflictedNotes[0].title).toBe(newTitle);
|
2017-07-02 23:01:37 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let unconflictedNotes = await Note.unconflictedNotes();
|
2017-07-02 23:01:37 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(unconflictedNotes.length).toBe(0);
|
|
|
|
}));
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should handle conflict when remote folder is deleted then local folder is renamed', asyncTest(async () => {
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let folder2 = await Folder.save({ title: "folder2" });
|
|
|
|
let note1 = await Note.save({ title: "un", parent_id: folder1.id });
|
|
|
|
await synchronizer().start();
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await sleep(0.1);
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await Folder.delete(folder1.id);
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await sleep(0.1);
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let newTitle = 'Modified after having been deleted';
|
|
|
|
await Folder.save({ id: folder1.id, title: newTitle });
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-20 22:31:12 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
let items = await allNotesFolders();
|
2017-07-01 00:53:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(items.length).toBe(1);
|
|
|
|
}));
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should allow duplicate folder titles', asyncTest(async () => {
|
|
|
|
let localF1 = await Folder.save({ title: "folder" });
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let remoteF2 = await Folder.save({ title: "folder" });
|
|
|
|
await synchronizer().start();
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await sleep(0.1);
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let localF2 = await Folder.load(remoteF2.id);
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(localF2.title == remoteF2.title).toBe(true);
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
// Then that folder that has been renamed locally should be set in such a way
|
|
|
|
// that synchronizing it applies the title change remotely, and that new title
|
|
|
|
// should be retrieved by client 2.
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
|
|
|
await sleep(0.1);
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2018-03-09 19:49:35 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
remoteF2 = await Folder.load(remoteF2.id);
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(remoteF2.title == localF2.title).toBe(true);
|
|
|
|
}));
|
2017-06-30 20:19:30 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
async function shoudSyncTagTest(withEncryption) {
|
|
|
|
let masterKey = null;
|
|
|
|
if (withEncryption) {
|
|
|
|
Setting.setValue('encryption.enabled', true);
|
|
|
|
masterKey = await loadEncryptionMasterKey();
|
|
|
|
}
|
2018-03-09 19:49:35 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let f1 = await Folder.save({ title: "folder" });
|
|
|
|
let n1 = await Note.save({ title: "mynote" });
|
|
|
|
let n2 = await Note.save({ title: "mynote2" });
|
|
|
|
let tag = await Tag.save({ title: 'mytag' });
|
|
|
|
let context1 = await synchronizer().start();
|
2017-07-03 20:29:19 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-12-14 02:23:32 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let context2 = await synchronizer().start();
|
|
|
|
if (withEncryption) {
|
|
|
|
const masterKey_2 = await MasterKey.load(masterKey.id);
|
|
|
|
await encryptionService().loadMasterKey(masterKey_2, '123456', true);
|
|
|
|
let t = await Tag.load(tag.id);
|
|
|
|
await Tag.decrypt(t);
|
|
|
|
}
|
|
|
|
let remoteTag = await Tag.loadByTitle(tag.title);
|
|
|
|
expect(!!remoteTag).toBe(true);
|
|
|
|
expect(remoteTag.id).toBe(tag.id);
|
|
|
|
await Tag.addNote(remoteTag.id, n1.id);
|
|
|
|
await Tag.addNote(remoteTag.id, n2.id);
|
|
|
|
let noteIds = await Tag.noteIds(tag.id);
|
|
|
|
expect(noteIds.length).toBe(2);
|
|
|
|
context2 = await synchronizer().start({ context: context2 });
|
2017-07-03 20:29:19 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2017-07-03 20:29:19 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
context1 = await synchronizer().start({ context: context1 });
|
|
|
|
let remoteNoteIds = await Tag.noteIds(tag.id);
|
|
|
|
expect(remoteNoteIds.length).toBe(2);
|
|
|
|
await Tag.removeNote(tag.id, n1.id);
|
|
|
|
remoteNoteIds = await Tag.noteIds(tag.id);
|
|
|
|
expect(remoteNoteIds.length).toBe(1);
|
|
|
|
context1 = await synchronizer().start({ context: context1 });
|
2017-07-03 20:29:19 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-07-03 20:29:19 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
context2 = await synchronizer().start({ context: context2 });
|
|
|
|
noteIds = await Tag.noteIds(tag.id);
|
|
|
|
expect(noteIds.length).toBe(1);
|
|
|
|
expect(remoteNoteIds[0]).toBe(noteIds[0]);
|
|
|
|
}
|
2017-07-03 20:29:19 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should sync tags', asyncTest(async () => {
|
|
|
|
await shoudSyncTagTest(false);
|
|
|
|
}));
|
2017-07-03 20:29:19 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should sync encrypted tags', asyncTest(async () => {
|
|
|
|
await shoudSyncTagTest(true);
|
|
|
|
}));
|
2017-07-18 00:22:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should not sync notes with conflicts', asyncTest(async () => {
|
|
|
|
let f1 = await Folder.save({ title: "folder" });
|
|
|
|
let n1 = await Note.save({ title: "mynote", parent_id: f1.id, is_conflict: 1 });
|
|
|
|
await synchronizer().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2017-07-18 21:27:10 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
let notes = await Note.all();
|
|
|
|
let folders = await Folder.all()
|
|
|
|
expect(notes.length).toBe(0);
|
|
|
|
expect(folders.length).toBe(1);
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should not try to delete on remote conflicted notes that have been deleted', asyncTest(async () => {
|
|
|
|
let f1 = await Folder.save({ title: "folder" });
|
|
|
|
let n1 = await Note.save({ title: "mynote", parent_id: f1.id });
|
|
|
|
await synchronizer().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
await Note.save({ id: n1.id, is_conflict: 1 });
|
|
|
|
await Note.delete(n1.id);
|
|
|
|
const deletedItems = await BaseItem.deletedItems(syncTargetId());
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(deletedItems.length).toBe(0);
|
|
|
|
}));
|
|
|
|
|
|
|
|
async function ignorableNoteConflictTest(withEncryption) {
|
|
|
|
if (withEncryption) {
|
|
|
|
Setting.setValue('encryption.enabled', true);
|
|
|
|
await loadEncryptionMasterKey();
|
|
|
|
}
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: "un", is_todo: 1, parent_id: folder1.id });
|
|
|
|
await synchronizer().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
if (withEncryption) {
|
|
|
|
await loadEncryptionMasterKey(null, true);
|
|
|
|
await decryptionWorker().start();
|
|
|
|
}
|
|
|
|
let note2 = await Note.load(note1.id);
|
|
|
|
note2.todo_completed = time.unixMs()-1;
|
|
|
|
await Note.save(note2);
|
|
|
|
note2 = await Note.load(note2.id);
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(1);
|
|
|
|
|
|
|
|
let note2conf = await Note.load(note1.id);
|
|
|
|
note2conf.todo_completed = time.unixMs();
|
|
|
|
await Note.save(note2conf);
|
|
|
|
note2conf = await Note.load(note1.id);
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
if (!withEncryption) {
|
|
|
|
// That was previously a common conflict:
|
|
|
|
// - Client 1 mark todo as "done", and sync
|
|
|
|
// - Client 2 doesn't sync, mark todo as "done" todo. Then sync.
|
|
|
|
// In theory it is a conflict because the todo_completed dates are different
|
|
|
|
// but in practice it doesn't matter, we can just take the date when the
|
|
|
|
// todo was marked as "done" the first time.
|
|
|
|
|
|
|
|
let conflictedNotes = await Note.conflictedNotes();
|
|
|
|
expect(conflictedNotes.length).toBe(0);
|
|
|
|
|
|
|
|
let notes = await Note.all();
|
|
|
|
expect(notes.length).toBe(1);
|
|
|
|
expect(notes[0].id).toBe(note1.id);
|
|
|
|
expect(notes[0].todo_completed).toBe(note2.todo_completed);
|
|
|
|
} else {
|
|
|
|
// If the notes are encrypted however it's not possible to do this kind of
|
|
|
|
// smart conflict resolving since we don't know the content, so in that
|
|
|
|
// case it's handled as a regular conflict.
|
|
|
|
|
|
|
|
let conflictedNotes = await Note.conflictedNotes();
|
|
|
|
expect(conflictedNotes.length).toBe(1);
|
|
|
|
|
|
|
|
let notes = await Note.all();
|
|
|
|
expect(notes.length).toBe(2);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
it('should not consider it is a conflict if neither the title nor body of the note have changed', asyncTest(async () => {
|
|
|
|
await ignorableNoteConflictTest(false);
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should always handle conflict if local or remote are encrypted', asyncTest(async () => {
|
|
|
|
await ignorableNoteConflictTest(true);
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('items should be downloaded again when user cancels in the middle of delta operation', asyncTest(async () => {
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: "un", is_todo: 1, parent_id: folder1.id });
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
synchronizer().testingHooks_ = ['cancelDeltaLoop2'];
|
|
|
|
let context = await synchronizer().start();
|
|
|
|
let notes = await Note.all();
|
|
|
|
expect(notes.length).toBe(0);
|
|
|
|
|
|
|
|
synchronizer().testingHooks_ = [];
|
|
|
|
await synchronizer().start({ context: context });
|
|
|
|
notes = await Note.all();
|
|
|
|
expect(notes.length).toBe(1);
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should skip items that cannot be synced', asyncTest(async () => {
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: "un", is_todo: 1, parent_id: folder1.id });
|
|
|
|
const noteId = note1.id;
|
|
|
|
await synchronizer().start();
|
|
|
|
let disabledItems = await BaseItem.syncDisabledItems(syncTargetId());
|
|
|
|
expect(disabledItems.length).toBe(0);
|
|
|
|
await Note.save({ id: noteId, title: "un mod", });
|
2019-05-06 22:35:29 +02:00
|
|
|
synchronizer().testingHooks_ = ['notesRejectedByTarget'];
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
synchronizer().testingHooks_ = [];
|
|
|
|
await synchronizer().start(); // Another sync to check that this item is now excluded from sync
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
let notes = await Note.all();
|
|
|
|
expect(notes.length).toBe(1);
|
|
|
|
expect(notes[0].title).toBe('un');
|
|
|
|
|
|
|
|
await switchClient(1);
|
|
|
|
|
|
|
|
disabledItems = await BaseItem.syncDisabledItems(syncTargetId());
|
|
|
|
expect(disabledItems.length).toBe(1);
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('notes and folders should get encrypted when encryption is enabled', asyncTest(async () => {
|
|
|
|
Setting.setValue('encryption.enabled', true);
|
|
|
|
const masterKey = await loadEncryptionMasterKey();
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: "un", body: 'to be encrypted', parent_id: folder1.id });
|
|
|
|
await synchronizer().start();
|
|
|
|
// After synchronisation, remote items should be encrypted but local ones remain plain text
|
|
|
|
note1 = await Note.load(note1.id);
|
|
|
|
expect(note1.title).toBe('un');
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
let folder1_2 = await Folder.load(folder1.id);
|
|
|
|
let note1_2 = await Note.load(note1.id);
|
|
|
|
let masterKey_2 = await MasterKey.load(masterKey.id);
|
|
|
|
// On this side however it should be received encrypted
|
|
|
|
expect(!note1_2.title).toBe(true);
|
|
|
|
expect(!folder1_2.title).toBe(true);
|
|
|
|
expect(!!note1_2.encryption_cipher_text).toBe(true);
|
|
|
|
expect(!!folder1_2.encryption_cipher_text).toBe(true);
|
|
|
|
// Master key is already encrypted so it does not get re-encrypted during sync
|
|
|
|
expect(masterKey_2.content).toBe(masterKey.content);
|
|
|
|
expect(masterKey_2.checksum).toBe(masterKey.checksum);
|
|
|
|
// Now load the master key we got from client 1 and try to decrypt
|
|
|
|
await encryptionService().loadMasterKey(masterKey_2, '123456', true);
|
|
|
|
// Get the decrypted items back
|
|
|
|
await Folder.decrypt(folder1_2);
|
|
|
|
await Note.decrypt(note1_2);
|
|
|
|
folder1_2 = await Folder.load(folder1.id);
|
|
|
|
note1_2 = await Note.load(note1.id);
|
|
|
|
// Check that properties match the original items. Also check
|
|
|
|
// the encryption did not affect the updated_time timestamp.
|
|
|
|
expect(note1_2.title).toBe(note1.title);
|
|
|
|
expect(note1_2.body).toBe(note1.body);
|
|
|
|
expect(note1_2.updated_time).toBe(note1.updated_time);
|
|
|
|
expect(!note1_2.encryption_cipher_text).toBe(true);
|
|
|
|
expect(folder1_2.title).toBe(folder1.title);
|
|
|
|
expect(folder1_2.updated_time).toBe(folder1.updated_time);
|
|
|
|
expect(!folder1_2.encryption_cipher_text).toBe(true);
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should enable encryption automatically when downloading new master key (and none was previously available)',asyncTest(async () => {
|
|
|
|
// Enable encryption on client 1 and sync an item
|
|
|
|
Setting.setValue('encryption.enabled', true);
|
|
|
|
await loadEncryptionMasterKey();
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
// Synchronising should enable encryption since we're going to get a master key
|
|
|
|
expect(Setting.value('encryption.enabled')).toBe(false);
|
|
|
|
await synchronizer().start();
|
|
|
|
expect(Setting.value('encryption.enabled')).toBe(true);
|
|
|
|
|
|
|
|
// Check that we got the master key from client 1
|
|
|
|
const masterKey = (await MasterKey.all())[0];
|
|
|
|
expect(!!masterKey).toBe(true);
|
|
|
|
|
|
|
|
// Since client 2 hasn't supplied a password yet, no master key is currently loaded
|
|
|
|
expect(encryptionService().loadedMasterKeyIds().length).toBe(0);
|
|
|
|
|
|
|
|
// If we sync now, nothing should be sent to target since we don't have a password.
|
|
|
|
// Technically it's incorrect to set the property of an encrypted variable but it allows confirming
|
|
|
|
// that encryption doesn't work if user hasn't supplied a password.
|
|
|
|
await BaseItem.forceSync(folder1.id);
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(1);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
folder1 = await Folder.load(folder1.id);
|
|
|
|
expect(folder1.title).toBe('folder1'); // Still at old value
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
// Now client 2 set the master key password
|
|
|
|
Setting.setObjectKey('encryption.passwordCache', masterKey.id, '123456');
|
|
|
|
await encryptionService().loadMasterKeysFromSettings();
|
|
|
|
|
|
|
|
// Now that master key should be loaded
|
|
|
|
expect(encryptionService().loadedMasterKeyIds()[0]).toBe(masterKey.id);
|
|
|
|
|
|
|
|
// Decrypt all the data. Now change the title and sync again - this time the changes should be transmitted
|
|
|
|
await decryptionWorker().start();
|
|
|
|
folder1_2 = await Folder.save({ id: folder1.id, title: "change test" });
|
|
|
|
|
|
|
|
// If we sync now, this time client 1 should get the changes we did earlier
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(1);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
// Decrypt the data we just got
|
|
|
|
await decryptionWorker().start();
|
|
|
|
folder1 = await Folder.load(folder1.id);
|
|
|
|
expect(folder1.title).toBe('change test'); // Got title from client 2
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should encrypt existing notes too when enabling E2EE', asyncTest(async () => {
|
|
|
|
// First create a folder, without encryption enabled, and sync it
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
await synchronizer().start();
|
|
|
|
let files = await fileApi().list()
|
|
|
|
let content = await fileApi().get(files.items[0].path);
|
|
|
|
expect(content.indexOf('folder1') >= 0).toBe(true)
|
|
|
|
|
|
|
|
// Then enable encryption and sync again
|
|
|
|
let masterKey = await encryptionService().generateMasterKey('123456');
|
|
|
|
masterKey = await MasterKey.save(masterKey);
|
|
|
|
await encryptionService().enableEncryption(masterKey, '123456');
|
|
|
|
await encryptionService().loadMasterKeysFromSettings();
|
|
|
|
await synchronizer().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
// Even though the folder has not been changed it should have been synced again so that
|
|
|
|
// an encrypted version of it replaces the decrypted version.
|
|
|
|
files = await fileApi().list()
|
|
|
|
expect(files.items.length).toBe(2);
|
|
|
|
// By checking that the folder title is not present, we can confirm that the item has indeed been encrypted
|
|
|
|
// One of the two items is the master key
|
|
|
|
content = await fileApi().get(files.items[0].path);
|
|
|
|
expect(content.indexOf('folder1') < 0).toBe(true);
|
|
|
|
content = await fileApi().get(files.items[1].path);
|
|
|
|
expect(content.indexOf('folder1') < 0).toBe(true);
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should sync resources', asyncTest(async () => {
|
|
|
|
while (insideBeforeEach) await time.msleep(500);
|
|
|
|
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
|
|
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
|
|
|
|
let resource1 = (await Resource.all())[0];
|
|
|
|
let resourcePath1 = Resource.fullPath(resource1);
|
2019-05-06 22:35:29 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
expect((await remoteNotesFoldersResources()).length).toBe(3);
|
2019-02-23 17:53:14 +02:00
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
let allResources = await Resource.all();
|
|
|
|
expect(allResources.length).toBe(1);
|
|
|
|
let resource1_2 = allResources[0];
|
|
|
|
let ls = await Resource.localState(resource1_2);
|
|
|
|
expect(resource1_2.id).toBe(resource1.id);
|
|
|
|
expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_IDLE);
|
|
|
|
|
|
|
|
const fetcher = new ResourceFetcher(() => { return synchronizer().api() });
|
|
|
|
fetcher.queueDownload(resource1_2.id);
|
|
|
|
await fetcher.waitForAllFinished();
|
|
|
|
|
|
|
|
resource1_2 = await Resource.load(resource1.id);
|
|
|
|
ls = await Resource.localState(resource1_2);
|
|
|
|
expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_DONE);
|
|
|
|
|
|
|
|
let resourcePath1_2 = Resource.fullPath(resource1_2);
|
|
|
|
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should handle resource download errors', asyncTest(async () => {
|
|
|
|
while (insideBeforeEach) await time.msleep(500);
|
|
|
|
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
|
|
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
|
|
|
|
let resource1 = (await Resource.all())[0];
|
|
|
|
let resourcePath1 = Resource.fullPath(resource1);
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
const fetcher = new ResourceFetcher(() => { return {
|
|
|
|
// Simulate a failed download
|
|
|
|
get: () => { return new Promise((resolve, reject) => { reject(new Error('did not work')) }); }
|
|
|
|
} });
|
|
|
|
fetcher.queueDownload(resource1.id);
|
|
|
|
await fetcher.waitForAllFinished();
|
|
|
|
|
|
|
|
resource1 = await Resource.load(resource1.id);
|
|
|
|
let ls = await Resource.localState(resource1);
|
|
|
|
expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_ERROR);
|
|
|
|
expect(ls.fetch_error).toBe('did not work');
|
|
|
|
}));
|
|
|
|
|
2019-05-12 12:41:07 +02:00
|
|
|
it('should set the resource file size if it is missing', asyncTest(async () => {
|
|
|
|
while (insideBeforeEach) await time.msleep(500);
|
|
|
|
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
|
|
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
let r1 = (await Resource.all())[0];
|
|
|
|
await Resource.setFileSizeOnly(r1.id, -1);
|
|
|
|
r1 = await Resource.load(r1.id);
|
|
|
|
expect(r1.size).toBe(-1);
|
|
|
|
|
|
|
|
const fetcher = new ResourceFetcher(() => { return synchronizer().api() });
|
|
|
|
fetcher.queueDownload(r1.id);
|
|
|
|
await fetcher.waitForAllFinished();
|
|
|
|
r1 = await Resource.load(r1.id);
|
|
|
|
expect(r1.size).toBe(2720);
|
|
|
|
}));
|
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should delete resources', asyncTest(async () => {
|
|
|
|
while (insideBeforeEach) await time.msleep(500);
|
|
|
|
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
|
|
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
|
|
|
|
let resource1 = (await Resource.all())[0];
|
|
|
|
let resourcePath1 = Resource.fullPath(resource1);
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
let allResources = await Resource.all();
|
|
|
|
expect(allResources.length).toBe(1);
|
|
|
|
let all = await fileApi().list();
|
2019-05-06 22:35:29 +02:00
|
|
|
expect((await remoteNotesFoldersResources()).length).toBe(3);
|
2019-02-23 17:53:14 +02:00
|
|
|
await Resource.delete(resource1.id);
|
|
|
|
await synchronizer().start();
|
2019-05-06 22:35:29 +02:00
|
|
|
expect((await remoteNotesFoldersResources()).length).toBe(2);
|
2019-02-23 17:53:14 +02:00
|
|
|
|
|
|
|
await switchClient(1);
|
|
|
|
|
|
|
|
expect(await shim.fsDriver().exists(resourcePath1)).toBe(true);
|
|
|
|
await synchronizer().start();
|
|
|
|
allResources = await Resource.all();
|
|
|
|
expect(allResources.length).toBe(0);
|
|
|
|
expect(await shim.fsDriver().exists(resourcePath1)).toBe(false);
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should encryt resources', asyncTest(async () => {
|
|
|
|
Setting.setValue('encryption.enabled', true);
|
|
|
|
const masterKey = await loadEncryptionMasterKey();
|
|
|
|
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
|
|
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
|
|
|
|
let resource1 = (await Resource.all())[0];
|
|
|
|
let resourcePath1 = Resource.fullPath(resource1);
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
Setting.setObjectKey('encryption.passwordCache', masterKey.id, '123456');
|
|
|
|
await encryptionService().loadMasterKeysFromSettings();
|
|
|
|
|
|
|
|
const fetcher = new ResourceFetcher(() => { return synchronizer().api() });
|
|
|
|
fetcher.queueDownload(resource1.id);
|
|
|
|
await fetcher.waitForAllFinished();
|
2018-10-08 08:36:45 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let resource1_2 = (await Resource.all())[0];
|
|
|
|
resource1_2 = await Resource.decrypt(resource1_2);
|
|
|
|
let resourcePath1_2 = Resource.fullPath(resource1_2);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
|
|
|
|
}));
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should upload decrypted items to sync target after encryption disabled', asyncTest(async () => {
|
|
|
|
Setting.setValue('encryption.enabled', true);
|
|
|
|
const masterKey = await loadEncryptionMasterKey();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
await synchronizer().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let allEncrypted = await allSyncTargetItemsEncrypted();
|
|
|
|
expect(allEncrypted).toBe(true);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await encryptionService().disableEncryption();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
allEncrypted = await allSyncTargetItemsEncrypted();
|
|
|
|
expect(allEncrypted).toBe(false);
|
|
|
|
}));
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should not upload any item if encryption was enabled, and items have not been decrypted, and then encryption disabled', asyncTest(async () => {
|
|
|
|
// For some reason I can't explain, this test is sometimes executed before beforeEach is finished
|
|
|
|
// which means it's going to fail in unexpected way. So the loop below wait for beforeEach to be done.
|
|
|
|
while (insideBeforeEach) await time.msleep(100);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
Setting.setValue('encryption.enabled', true);
|
|
|
|
const masterKey = await loadEncryptionMasterKey();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
await synchronizer().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
expect(Setting.value('encryption.enabled')).toBe(true);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
// If we try to disable encryption now, it should throw an error because some items are
|
|
|
|
// currently encrypted. They must be decrypted first so that they can be sent as
|
|
|
|
// plain text to the sync target.
|
|
|
|
//let hasThrown = await checkThrowAsync(async () => await encryptionService().disableEncryption());
|
|
|
|
//expect(hasThrown).toBe(true);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
// Now supply the password, and decrypt the items
|
|
|
|
Setting.setObjectKey('encryption.passwordCache', masterKey.id, '123456');
|
|
|
|
await encryptionService().loadMasterKeysFromSettings();
|
|
|
|
await decryptionWorker().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
// Try to disable encryption again
|
|
|
|
hasThrown = await checkThrowAsync(async () => await encryptionService().disableEncryption());
|
|
|
|
expect(hasThrown).toBe(false);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
// If we sync now the target should receive the decrypted items
|
|
|
|
await synchronizer().start();
|
|
|
|
allEncrypted = await allSyncTargetItemsEncrypted();
|
|
|
|
expect(allEncrypted).toBe(false);
|
|
|
|
}));
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-05-12 16:53:42 +02:00
|
|
|
it('should set the resource file size after decryption', asyncTest(async () => {
|
|
|
|
Setting.setValue('encryption.enabled', true);
|
|
|
|
const masterKey = await loadEncryptionMasterKey();
|
|
|
|
|
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
|
|
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
|
|
|
|
let resource1 = (await Resource.all())[0];
|
|
|
|
await Resource.setFileSizeOnly(resource1.id, -1);
|
|
|
|
let resourcePath1 = Resource.fullPath(resource1);
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
Setting.setObjectKey('encryption.passwordCache', masterKey.id, '123456');
|
|
|
|
await encryptionService().loadMasterKeysFromSettings();
|
|
|
|
|
|
|
|
const fetcher = new ResourceFetcher(() => { return synchronizer().api() });
|
|
|
|
fetcher.queueDownload(resource1.id);
|
|
|
|
await fetcher.waitForAllFinished();
|
|
|
|
await decryptionWorker().start();
|
|
|
|
|
|
|
|
const resource1_2 = await Resource.load(resource1.id);
|
|
|
|
expect(resource1_2.size).toBe(2720);
|
|
|
|
}));
|
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should encrypt remote resources after encryption has been enabled', asyncTest(async () => {
|
|
|
|
while (insideBeforeEach) await time.msleep(100);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
|
|
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
|
|
|
|
let resource1 = (await Resource.all())[0];
|
|
|
|
await synchronizer().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(await allSyncTargetItemsEncrypted()).toBe(false);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
const masterKey = await loadEncryptionMasterKey();
|
|
|
|
await encryptionService().enableEncryption(masterKey, '123456');
|
|
|
|
await encryptionService().loadMasterKeysFromSettings();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(await allSyncTargetItemsEncrypted()).toBe(true);
|
|
|
|
}));
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should upload encrypted resource, but it should not mark the blob as encrypted locally', asyncTest(async () => {
|
|
|
|
while (insideBeforeEach) await time.msleep(100);
|
2018-05-20 14:33:26 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let folder1 = await Folder.save({ title: "folder1" });
|
|
|
|
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
|
|
|
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
|
|
|
|
const masterKey = await loadEncryptionMasterKey();
|
|
|
|
await encryptionService().enableEncryption(masterKey, '123456');
|
|
|
|
await encryptionService().loadMasterKeysFromSettings();
|
|
|
|
await synchronizer().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
let resource1 = (await Resource.all())[0];
|
|
|
|
expect(resource1.encryption_blob_encrypted).toBe(0);
|
|
|
|
}));
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it('should create remote items with UTF-8 content', asyncTest(async () => {
|
|
|
|
let folder = await Folder.save({ title: "Fahrräder" });
|
|
|
|
await Note.save({ title: "Fahrräder", body: "Fahrräder", parent_id: folder.id });
|
2019-05-06 22:35:29 +02:00
|
|
|
let all = await allNotesFolders();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
await localNotesFoldersSameAsRemote(all, expect);
|
2019-02-23 17:53:14 +02:00
|
|
|
}));
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
it("should update remote items but not pull remote changes", asyncTest(async () => {
|
|
|
|
let folder = await Folder.save({ title: "folder1" });
|
|
|
|
let note = await Note.save({ title: "un", parent_id: folder.id });
|
|
|
|
await synchronizer().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
await Note.save({ title: "deux", parent_id: folder.id });
|
|
|
|
await synchronizer().start();
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(1);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await Note.save({ title: "un UPDATE", id: note.id });
|
|
|
|
await synchronizer().start({ syncSteps: ["update_remote"] });
|
2019-05-06 22:35:29 +02:00
|
|
|
let all = await allNotesFolders();
|
2019-02-23 17:53:14 +02:00
|
|
|
expect(all.length).toBe(2);
|
2018-03-09 22:59:12 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await switchClient(2);
|
2018-02-12 20:15:22 +02:00
|
|
|
|
2019-02-23 17:53:14 +02:00
|
|
|
await synchronizer().start();
|
|
|
|
let note2 = await Note.load(note.id);
|
|
|
|
expect(note2.title).toBe("un UPDATE");
|
|
|
|
}));
|
2019-02-05 19:39:10 +02:00
|
|
|
|
2019-02-23 17:47:29 +02:00
|
|
|
it("should create a new Welcome notebook on each client", asyncTest(async () => {
|
|
|
|
// Create the Welcome items on two separate clients
|
2019-02-05 19:39:10 +02:00
|
|
|
|
|
|
|
await WelcomeUtils.createWelcomeItems();
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
await WelcomeUtils.createWelcomeItems();
|
|
|
|
const beforeFolderCount = (await Folder.all()).length;
|
|
|
|
const beforeNoteCount = (await Note.all()).length;
|
2019-02-23 17:47:29 +02:00
|
|
|
expect(beforeFolderCount === 1).toBe(true);
|
2019-02-05 19:39:10 +02:00
|
|
|
expect(beforeNoteCount > 1).toBe(true);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
const afterFolderCount = (await Folder.all()).length;
|
|
|
|
const afterNoteCount = (await Note.all()).length;
|
|
|
|
|
2019-02-23 17:47:29 +02:00
|
|
|
expect(afterFolderCount).toBe(beforeFolderCount * 2);
|
|
|
|
expect(afterNoteCount).toBe(beforeNoteCount * 2);
|
2019-02-05 19:39:10 +02:00
|
|
|
|
|
|
|
// Changes to the Welcome items should be synced to all clients
|
|
|
|
|
|
|
|
const f1 = (await Folder.all())[0];
|
|
|
|
await Folder.save({ id: f1.id, title: 'Welcome MOD' });
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(1);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
|
2019-02-23 17:47:29 +02:00
|
|
|
const f1_1 = await Folder.load(f1.id);
|
2019-02-05 19:39:10 +02:00
|
|
|
expect(f1_1.title).toBe('Welcome MOD');
|
2019-02-23 17:47:29 +02:00
|
|
|
|
|
|
|
// Now check that it created the duplicate tag
|
|
|
|
|
|
|
|
const tags = await Tag.modelSelectAll('SELECT * FROM tags WHERE title = "organising"');
|
|
|
|
expect(tags.length).toBe(2);
|
2019-02-05 19:39:10 +02:00
|
|
|
}));
|
|
|
|
|
2019-05-06 22:35:29 +02:00
|
|
|
it("should not save revisions when updating a note via sync", asyncTest(async () => {
|
|
|
|
// When a note is updated, a revision of the original is created.
|
|
|
|
// Here, on client 1, the note is updated for the first time, however since it is
|
|
|
|
// via sync, we don't create a revision - that revision has already been created on client
|
|
|
|
// 2 and is going to be synced.
|
|
|
|
|
|
|
|
const n1 = await Note.save({ title: 'testing' });
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
await Note.save({ id: n1.id, title: 'mod from client 2' });
|
|
|
|
await revisionService().collectRevisions();
|
|
|
|
const allRevs1 = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id);
|
|
|
|
expect(allRevs1.length).toBe(1);
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(1);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
const allRevs2 = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id);
|
|
|
|
expect(allRevs2.length).toBe(1);
|
|
|
|
expect(allRevs2[0].id).toBe(allRevs1[0].id);
|
|
|
|
}));
|
|
|
|
|
|
|
|
it("should not save revisions when deleting a note via sync", asyncTest(async () => {
|
|
|
|
const n1 = await Note.save({ title: 'testing' });
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
await Note.delete(n1.id);
|
|
|
|
await revisionService().collectRevisions(); // REV 1
|
|
|
|
{
|
|
|
|
const allRevs = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id);
|
|
|
|
expect(allRevs.length).toBe(1);
|
|
|
|
}
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(1);
|
|
|
|
|
|
|
|
await synchronizer().start(); // The local note gets deleted here, however a new rev is *not* created
|
|
|
|
{
|
|
|
|
const allRevs = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id);
|
|
|
|
expect(allRevs.length).toBe(1);
|
|
|
|
}
|
|
|
|
|
|
|
|
const notes = await Note.all();
|
|
|
|
expect(notes.length).toBe(0);
|
|
|
|
}));
|
|
|
|
|
|
|
|
it("should not save revisions when an item_change has been generated as a result of a sync", asyncTest(async () => {
|
|
|
|
// When a note is modified an item_change object is going to be created. This
|
|
|
|
// is used for example to tell the search engine, when note should be indexed. It is
|
|
|
|
// also used by the revision service to tell what note should get a new revision.
|
|
|
|
// When a note is modified via sync, this item_change object is also created. The issue
|
|
|
|
// is that we don't want to create revisions for these particular item_changes, because
|
|
|
|
// such revision has already been created on another client (whatever client initially
|
|
|
|
// modified the note), and that rev is going to be synced.
|
|
|
|
//
|
|
|
|
// So in the end we need to make sure that we don't create these unecessary additional revisions.
|
|
|
|
|
|
|
|
const n1 = await Note.save({ title: 'testing' });
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
await Note.save({ id: n1.id, title: 'mod from client 2' });
|
|
|
|
await revisionService().collectRevisions();
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(1);
|
|
|
|
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
{
|
|
|
|
const allRevs = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id);
|
|
|
|
expect(allRevs.length).toBe(1);
|
|
|
|
}
|
|
|
|
|
|
|
|
await revisionService().collectRevisions();
|
|
|
|
|
|
|
|
{
|
|
|
|
const allRevs = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id);
|
|
|
|
expect(allRevs.length).toBe(1);
|
|
|
|
}
|
|
|
|
}));
|
|
|
|
|
|
|
|
it("should handle case when new rev is created on client, then older rev arrives later via sync", asyncTest(async () => {
|
|
|
|
// - C1 creates note 1
|
|
|
|
// - C1 modifies note 1 - REV1 created
|
|
|
|
// - C1 sync
|
|
|
|
// - C2 sync
|
|
|
|
// - C2 receives note 1
|
|
|
|
// - C2 modifies note 1 - REV2 created (but not based on REV1)
|
|
|
|
// - C2 receives REV1
|
|
|
|
//
|
|
|
|
// In that case, we need to make sure that REV1 and REV2 are both valid and can be retrieved.
|
|
|
|
// Even though REV1 was created before REV2, REV2 is *not* based on REV1. This is not ideal
|
|
|
|
// due to unecessary data being saved, but a possible edge case and we simply need to check
|
|
|
|
// all the data is valid.
|
|
|
|
|
|
|
|
const n1 = await Note.save({ title: 'note' });
|
|
|
|
await Note.save({ id: n1.id, title: 'note REV1' });
|
|
|
|
await revisionService().collectRevisions(); // REV1
|
|
|
|
expect((await Revision.allByType(BaseModel.TYPE_NOTE, n1.id)).length).toBe(1);
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
synchronizer().testingHooks_ = ['skipRevisions'];
|
|
|
|
await synchronizer().start();
|
|
|
|
synchronizer().testingHooks_ = [];
|
|
|
|
|
|
|
|
await Note.save({ id: n1.id, title: 'note REV2' });
|
|
|
|
await revisionService().collectRevisions(); // REV2
|
|
|
|
expect((await Revision.allByType(BaseModel.TYPE_NOTE, n1.id)).length).toBe(1);
|
|
|
|
await synchronizer().start(); // Sync the rev that had been skipped above with skipRevisions
|
|
|
|
|
|
|
|
const revisions = await Revision.allByType(BaseModel.TYPE_NOTE, n1.id);
|
|
|
|
expect(revisions.length).toBe(2);
|
|
|
|
|
|
|
|
expect((await revisionService().revisionNote(revisions, 0)).title).toBe('note REV1');
|
|
|
|
expect((await revisionService().revisionNote(revisions, 1)).title).toBe('note REV2');
|
|
|
|
}));
|
|
|
|
|
2019-05-12 02:15:52 +02:00
|
|
|
it("should not download resources over the limit", asyncTest(async () => {
|
|
|
|
const note1 = await Note.save({ title: 'note' });
|
|
|
|
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
|
|
|
|
await synchronizer().start();
|
|
|
|
|
|
|
|
await switchClient(2);
|
|
|
|
|
|
|
|
const previousMax = synchronizer().maxResourceSize_;
|
|
|
|
synchronizer().maxResourceSize_ = 1;
|
|
|
|
await synchronizer().start();
|
|
|
|
synchronizer().maxResourceSize_ = previousMax;
|
|
|
|
|
|
|
|
const syncItems = await BaseItem.allSyncItems(syncTargetId());
|
|
|
|
expect(syncItems.length).toBe(2);
|
|
|
|
expect(syncItems[1].item_location).toBe(BaseItem.SYNC_ITEM_LOCATION_REMOTE);
|
|
|
|
expect(syncItems[1].sync_disabled).toBe(1);
|
|
|
|
}));
|
|
|
|
|
2018-03-09 19:49:35 +02:00
|
|
|
});
|