mirror of
https://github.com/laurent22/joplin.git
synced 2024-12-24 10:27:10 +02:00
All: Simplified synchronisation of resources to simplify encryption, and implemented resource encryption
This commit is contained in:
parent
26bf7c4d46
commit
cc02c1d585
@ -11,6 +11,7 @@ mkdir -p "$BUILD_DIR/data"
|
|||||||
if [[ $TEST_FILE == "" ]]; then
|
if [[ $TEST_FILE == "" ]]; then
|
||||||
(cd "$ROOT_DIR" && npm test tests-build/synchronizer.js)
|
(cd "$ROOT_DIR" && npm test tests-build/synchronizer.js)
|
||||||
(cd "$ROOT_DIR" && npm test tests-build/encryption.js)
|
(cd "$ROOT_DIR" && npm test tests-build/encryption.js)
|
||||||
|
(cd "$ROOT_DIR" && npm test tests-build/ArrayUtils.js)
|
||||||
else
|
else
|
||||||
(cd "$ROOT_DIR" && npm test tests-build/$TEST_FILE.js)
|
(cd "$ROOT_DIR" && npm test tests-build/$TEST_FILE.js)
|
||||||
fi
|
fi
|
32
CliClient/tests/ArrayUtils.js
Normal file
32
CliClient/tests/ArrayUtils.js
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
require('app-module-path').addPath(__dirname);
|
||||||
|
|
||||||
|
const { time } = require('lib/time-utils.js');
|
||||||
|
const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('test-utils.js');
|
||||||
|
const ArrayUtils = require('lib/ArrayUtils.js');
|
||||||
|
|
||||||
|
process.on('unhandledRejection', (reason, p) => {
|
||||||
|
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Encryption', function() {
|
||||||
|
|
||||||
|
beforeEach(async (done) => {
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should remove array elements', async (done) => {
|
||||||
|
let a = ['un', 'deux', 'trois'];
|
||||||
|
a = ArrayUtils.removeElement(a, 'deux');
|
||||||
|
|
||||||
|
expect(a[0]).toBe('un');
|
||||||
|
expect(a[1]).toBe('trois');
|
||||||
|
expect(a.length).toBe(2);
|
||||||
|
|
||||||
|
a = ['un', 'deux', 'trois'];
|
||||||
|
a = ArrayUtils.removeElement(a, 'not in there');
|
||||||
|
expect(a.length).toBe(3);
|
||||||
|
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
@ -1,9 +1,11 @@
|
|||||||
require('app-module-path').addPath(__dirname);
|
require('app-module-path').addPath(__dirname);
|
||||||
|
|
||||||
const { time } = require('lib/time-utils.js');
|
const { time } = require('lib/time-utils.js');
|
||||||
const { setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey } = require('test-utils.js');
|
const { setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker } = require('test-utils.js');
|
||||||
|
const { shim } = require('lib/shim.js');
|
||||||
const Folder = require('lib/models/Folder.js');
|
const Folder = require('lib/models/Folder.js');
|
||||||
const Note = require('lib/models/Note.js');
|
const Note = require('lib/models/Note.js');
|
||||||
|
const Resource = require('lib/models/Resource.js');
|
||||||
const Tag = require('lib/models/Tag.js');
|
const Tag = require('lib/models/Tag.js');
|
||||||
const { Database } = require('lib/database.js');
|
const { Database } = require('lib/database.js');
|
||||||
const Setting = require('lib/models/Setting.js');
|
const Setting = require('lib/models/Setting.js');
|
||||||
@ -589,7 +591,7 @@ describe('Synchronizer', function() {
|
|||||||
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
async function ignorableNoteConflictTest(withEncryption) {
|
async function ignorableNoteConflictTest(withEncryption) {
|
||||||
if (withEncryption) {
|
if (withEncryption) {
|
||||||
Setting.setValue('encryption.enabled', true);
|
Setting.setValue('encryption.enabled', true);
|
||||||
@ -771,10 +773,14 @@ describe('Synchronizer', function() {
|
|||||||
// Since client 2 hasn't supplied a password yet, no master key is currently loaded
|
// Since client 2 hasn't supplied a password yet, no master key is currently loaded
|
||||||
expect(encryptionService().loadedMasterKeyIds().length).toBe(0);
|
expect(encryptionService().loadedMasterKeyIds().length).toBe(0);
|
||||||
|
|
||||||
// If we sync now, nothing should be sent to target since we don't have a password
|
// If we sync now, nothing should be sent to target since we don't have a password.
|
||||||
|
// Technically it's incorrect to set the property of an encrypted variable but it allows confirming
|
||||||
|
// that encryption doesn't work if user hasn't supplied a password.
|
||||||
let folder1_2 = await Folder.save({ id: folder1.id, title: "change test" });
|
let folder1_2 = await Folder.save({ id: folder1.id, title: "change test" });
|
||||||
await synchronizer().start();
|
await synchronizer().start();
|
||||||
|
|
||||||
await switchClient(1);
|
await switchClient(1);
|
||||||
|
|
||||||
await synchronizer().start();
|
await synchronizer().start();
|
||||||
folder1 = await Folder.load(folder1.id);
|
folder1 = await Folder.load(folder1.id);
|
||||||
expect(folder1.title).toBe('folder1'); // Still at old value
|
expect(folder1.title).toBe('folder1'); // Still at old value
|
||||||
@ -788,18 +794,18 @@ describe('Synchronizer', function() {
|
|||||||
// Now that master key should be loaded
|
// Now that master key should be loaded
|
||||||
expect(encryptionService().loadedMasterKeyIds()[0]).toBe(masterKey.id);
|
expect(encryptionService().loadedMasterKeyIds()[0]).toBe(masterKey.id);
|
||||||
|
|
||||||
|
// Decrypt all the data. Now change the title and sync again - this time the changes should be transmitted
|
||||||
|
await decryptionWorker().start();
|
||||||
|
folder1_2 = await Folder.save({ id: folder1.id, title: "change test" });
|
||||||
|
|
||||||
// If we sync now, this time client 1 should get the changes we did earlier
|
// If we sync now, this time client 1 should get the changes we did earlier
|
||||||
await synchronizer().start();
|
await synchronizer().start();
|
||||||
|
|
||||||
await switchClient(1);
|
await switchClient(1);
|
||||||
|
|
||||||
// NOTE: there might be a race condition here but can't figure it out. Up to this point all the tests
|
|
||||||
// will pass, which means the master key is loaded. However, the below test find that the title is still
|
|
||||||
// the previous value. Possible reasons are:
|
|
||||||
// - Client 2 didn't send the updated item
|
|
||||||
// - Client 1 didn't receive it
|
|
||||||
// Maybe due to sync_time/updated_time having the same value on one or both of the clients when tests run fast?
|
|
||||||
await synchronizer().start();
|
await synchronizer().start();
|
||||||
|
// Decrypt the data we just got
|
||||||
|
await decryptionWorker().start();
|
||||||
folder1 = await Folder.load(folder1.id);
|
folder1 = await Folder.load(folder1.id);
|
||||||
expect(folder1.title).toBe('change test'); // Got title from client 2
|
expect(folder1.title).toBe('change test'); // Got title from client 2
|
||||||
|
|
||||||
@ -812,7 +818,8 @@ describe('Synchronizer', function() {
|
|||||||
let folder1 = await Folder.save({ title: "folder1" });
|
let folder1 = await Folder.save({ title: "folder1" });
|
||||||
await synchronizer().start();
|
await synchronizer().start();
|
||||||
let files = await fileApi().list()
|
let files = await fileApi().list()
|
||||||
expect(files.items[0].content.indexOf('folder1') >= 0).toBe(true)
|
let content = await fileApi().get(files.items[0].path);
|
||||||
|
expect(content.indexOf('folder1') >= 0).toBe(true)
|
||||||
|
|
||||||
// Then enable encryption and sync again
|
// Then enable encryption and sync again
|
||||||
let masterKey = await service.generateMasterKey('123456');
|
let masterKey = await service.generateMasterKey('123456');
|
||||||
@ -827,11 +834,58 @@ describe('Synchronizer', function() {
|
|||||||
expect(files.items.length).toBe(2);
|
expect(files.items.length).toBe(2);
|
||||||
// By checking that the folder title is not present, we can confirm that the item has indeed been encrypted
|
// By checking that the folder title is not present, we can confirm that the item has indeed been encrypted
|
||||||
// One of the two items is the master key
|
// One of the two items is the master key
|
||||||
expect(files.items[0].content.indexOf('folder1') < 0).toBe(true);
|
content = await fileApi().get(files.items[0].path);
|
||||||
expect(files.items[1].content.indexOf('folder1') < 0).toBe(true);
|
expect(content.indexOf('folder1') < 0).toBe(true);
|
||||||
|
content = await fileApi().get(files.items[1].path);
|
||||||
|
expect(content.indexOf('folder1') < 0).toBe(true);
|
||||||
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should sync resources', async (done) => {
|
||||||
|
let folder1 = await Folder.save({ title: "folder1" });
|
||||||
|
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||||
|
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
|
||||||
|
let resource1 = (await Resource.all())[0];
|
||||||
|
let resourcePath1 = Resource.fullPath(resource1);
|
||||||
|
await synchronizer().start();
|
||||||
|
|
||||||
|
await switchClient(2);
|
||||||
|
|
||||||
|
await synchronizer().start();
|
||||||
|
let resource1_2 = (await Resource.all())[0];
|
||||||
|
let resourcePath1_2 = Resource.fullPath(resource1_2);
|
||||||
|
|
||||||
|
expect(resource1_2.id).toBe(resource1.id);
|
||||||
|
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
|
||||||
|
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should encryt resources', async (done) => {
|
||||||
|
Setting.setValue('encryption.enabled', true);
|
||||||
|
const masterKey = await loadEncryptionMasterKey();
|
||||||
|
|
||||||
|
let folder1 = await Folder.save({ title: "folder1" });
|
||||||
|
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||||
|
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
|
||||||
|
let resource1 = (await Resource.all())[0];
|
||||||
|
let resourcePath1 = Resource.fullPath(resource1);
|
||||||
|
await synchronizer().start();
|
||||||
|
|
||||||
|
await switchClient(2);
|
||||||
|
|
||||||
|
await synchronizer().start();
|
||||||
|
Setting.setObjectKey('encryption.passwordCache', masterKey.id, '123456');
|
||||||
|
await encryptionService().loadMasterKeysFromSettings();
|
||||||
|
|
||||||
|
let resource1_2 = (await Resource.all())[0];
|
||||||
|
resource1_2 = await Resource.decrypt(resource1_2);
|
||||||
|
let resourcePath1_2 = Resource.fullPath(resource1_2);
|
||||||
|
|
||||||
|
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
|
||||||
|
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
});
|
});
|
@ -23,10 +23,12 @@ const SyncTargetMemory = require('lib/SyncTargetMemory.js');
|
|||||||
const SyncTargetFilesystem = require('lib/SyncTargetFilesystem.js');
|
const SyncTargetFilesystem = require('lib/SyncTargetFilesystem.js');
|
||||||
const SyncTargetOneDrive = require('lib/SyncTargetOneDrive.js');
|
const SyncTargetOneDrive = require('lib/SyncTargetOneDrive.js');
|
||||||
const EncryptionService = require('lib/services/EncryptionService.js');
|
const EncryptionService = require('lib/services/EncryptionService.js');
|
||||||
|
const DecryptionWorker = require('lib/services/DecryptionWorker.js');
|
||||||
|
|
||||||
let databases_ = [];
|
let databases_ = [];
|
||||||
let synchronizers_ = [];
|
let synchronizers_ = [];
|
||||||
let encryptionServices_ = [];
|
let encryptionServices_ = [];
|
||||||
|
let decryptionWorkers_ = [];
|
||||||
let fileApi_ = null;
|
let fileApi_ = null;
|
||||||
let currentClient_ = 1;
|
let currentClient_ = 1;
|
||||||
|
|
||||||
@ -44,7 +46,8 @@ SyncTargetRegistry.addClass(SyncTargetMemory);
|
|||||||
SyncTargetRegistry.addClass(SyncTargetFilesystem);
|
SyncTargetRegistry.addClass(SyncTargetFilesystem);
|
||||||
SyncTargetRegistry.addClass(SyncTargetOneDrive);
|
SyncTargetRegistry.addClass(SyncTargetOneDrive);
|
||||||
|
|
||||||
const syncTargetId_ = SyncTargetRegistry.nameToId('memory');
|
//const syncTargetId_ = SyncTargetRegistry.nameToId('memory');
|
||||||
|
const syncTargetId_ = SyncTargetRegistry.nameToId('filesystem');
|
||||||
const syncDir = __dirname + '/../tests/sync';
|
const syncDir = __dirname + '/../tests/sync';
|
||||||
|
|
||||||
const sleepTime = syncTargetId_ == SyncTargetRegistry.nameToId('filesystem') ? 1001 : 400;
|
const sleepTime = syncTargetId_ == SyncTargetRegistry.nameToId('filesystem') ? 1001 : 400;
|
||||||
@ -157,11 +160,12 @@ async function setupDatabaseAndSynchronizer(id = null) {
|
|||||||
syncTarget.setFileApi(fileApi());
|
syncTarget.setFileApi(fileApi());
|
||||||
syncTarget.setLogger(logger);
|
syncTarget.setLogger(logger);
|
||||||
synchronizers_[id] = await syncTarget.synchronizer();
|
synchronizers_[id] = await syncTarget.synchronizer();
|
||||||
|
synchronizers_[id].autoStartDecryptionWorker_ = false; // For testing we disable this since it would make the tests non-deterministic
|
||||||
}
|
}
|
||||||
|
|
||||||
//if (!encryptionServices_[id]) {
|
encryptionServices_[id] = new EncryptionService();
|
||||||
encryptionServices_[id] = new EncryptionService();
|
decryptionWorkers_[id] = new DecryptionWorker();
|
||||||
//}
|
decryptionWorkers_[id].setEncryptionService(encryptionServices_[id]);
|
||||||
|
|
||||||
if (syncTargetId_ == SyncTargetRegistry.nameToId('filesystem')) {
|
if (syncTargetId_ == SyncTargetRegistry.nameToId('filesystem')) {
|
||||||
fs.removeSync(syncDir)
|
fs.removeSync(syncDir)
|
||||||
@ -186,6 +190,11 @@ function encryptionService(id = null) {
|
|||||||
return encryptionServices_[id];
|
return encryptionServices_[id];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function decryptionWorker(id = null) {
|
||||||
|
if (id === null) id = currentClient_;
|
||||||
|
return decryptionWorkers_[id];
|
||||||
|
}
|
||||||
|
|
||||||
async function loadEncryptionMasterKey(id = null, useExisting = false) {
|
async function loadEncryptionMasterKey(id = null, useExisting = false) {
|
||||||
const service = encryptionService(id);
|
const service = encryptionService(id);
|
||||||
|
|
||||||
@ -263,4 +272,4 @@ function fileContentEqual(path1, path2) {
|
|||||||
return content1 === content2;
|
return content1 === content2;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync, encryptionService, loadEncryptionMasterKey, fileContentEqual };
|
module.exports = { setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker };
|
@ -6,4 +6,11 @@ ArrayUtils.unique = function(array) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ArrayUtils.removeElement = function(array, element) {
|
||||||
|
const index = array.indexOf(element);
|
||||||
|
if (index < 0) return array;
|
||||||
|
array.splice(index, 1);
|
||||||
|
return array;
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = ArrayUtils;
|
module.exports = ArrayUtils;
|
@ -15,6 +15,14 @@ class FsDriverNode {
|
|||||||
return fs.writeFile(path, buffer);
|
return fs.writeFile(path, buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
move(source, dest) {
|
||||||
|
return fs.move(source, dest, { overwrite: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
exists(path) {
|
||||||
|
return fs.pathExists(path);
|
||||||
|
}
|
||||||
|
|
||||||
open(path, mode) {
|
open(path, mode) {
|
||||||
return fs.open(path, mode);
|
return fs.open(path, mode);
|
||||||
}
|
}
|
||||||
|
@ -14,6 +14,10 @@ class FsDriverRN {
|
|||||||
throw new Error('Not implemented');
|
throw new Error('Not implemented');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
move(source, dest) {
|
||||||
|
throw new Error('Not implemented');
|
||||||
|
}
|
||||||
|
|
||||||
async open(path, mode) {
|
async open(path, mode) {
|
||||||
// Note: RNFS.read() doesn't provide any way to know if the end of file has been reached.
|
// Note: RNFS.read() doesn't provide any way to know if the end of file has been reached.
|
||||||
// So instead we stat the file here and use stat.size to manually check for end of file.
|
// So instead we stat the file here and use stat.size to manually check for end of file.
|
||||||
|
@ -292,6 +292,7 @@ class JoplinDatabase extends Database {
|
|||||||
}
|
}
|
||||||
|
|
||||||
queries.push('ALTER TABLE sync_items ADD COLUMN force_sync INT NOT NULL DEFAULT 0');
|
queries.push('ALTER TABLE sync_items ADD COLUMN force_sync INT NOT NULL DEFAULT 0');
|
||||||
|
queries.push('ALTER TABLE resources ADD COLUMN encryption_blob_encrypted INT NOT NULL DEFAULT 0');
|
||||||
}
|
}
|
||||||
|
|
||||||
queries.push({ sql: 'UPDATE version SET version = ?', params: [targetVersion] });
|
queries.push({ sql: 'UPDATE version SET version = ?', params: [targetVersion] });
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
const BaseModel = require('lib/BaseModel.js');
|
const BaseModel = require('lib/BaseModel.js');
|
||||||
const BaseItem = require('lib/models/BaseItem.js');
|
const BaseItem = require('lib/models/BaseItem.js');
|
||||||
const Setting = require('lib/models/Setting.js');
|
const Setting = require('lib/models/Setting.js');
|
||||||
|
const ArrayUtils = require('lib/ArrayUtils.js');
|
||||||
|
const pathUtils = require('lib/path-utils.js');
|
||||||
const { mime } = require('lib/mime-utils.js');
|
const { mime } = require('lib/mime-utils.js');
|
||||||
const { filename } = require('lib/path-utils.js');
|
const { filename } = require('lib/path-utils.js');
|
||||||
const { FsDriverDummy } = require('lib/fs-driver-dummy.js');
|
const { FsDriverDummy } = require('lib/fs-driver-dummy.js');
|
||||||
@ -16,6 +18,11 @@ class Resource extends BaseItem {
|
|||||||
return BaseModel.TYPE_RESOURCE;
|
return BaseModel.TYPE_RESOURCE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static encryptionService() {
|
||||||
|
if (!this.encryptionService_) throw new Error('Resource.encryptionService_ is not set!!');
|
||||||
|
return this.encryptionService_;
|
||||||
|
}
|
||||||
|
|
||||||
static isSupportedImageMimeType(type) {
|
static isSupportedImageMimeType(type) {
|
||||||
const imageMimeTypes = ["image/jpg", "image/jpeg", "image/png", "image/gif"];
|
const imageMimeTypes = ["image/jpg", "image/jpeg", "image/png", "image/gif"];
|
||||||
return imageMimeTypes.indexOf(type.toLowerCase()) >= 0;
|
return imageMimeTypes.indexOf(type.toLowerCase()) >= 0;
|
||||||
@ -28,19 +35,67 @@ class Resource extends BaseItem {
|
|||||||
|
|
||||||
static async serialize(item, type = null, shownKeys = null) {
|
static async serialize(item, type = null, shownKeys = null) {
|
||||||
let fieldNames = this.fieldNames();
|
let fieldNames = this.fieldNames();
|
||||||
fieldNames.push('type_');
|
fieldNames.push('type_');
|
||||||
|
//fieldNames = ArrayUtils.removeElement(fieldNames, 'encryption_blob_encrypted');
|
||||||
return super.serialize(item, 'resource', fieldNames);
|
return super.serialize(item, 'resource', fieldNames);
|
||||||
}
|
}
|
||||||
|
|
||||||
static filename(resource) {
|
static filename(resource, encryptedBlob = false) {
|
||||||
let extension = resource.file_extension;
|
let extension = encryptedBlob ? 'crypted' : resource.file_extension;
|
||||||
if (!extension) extension = resource.mime ? mime.toFileExtension(resource.mime) : '';
|
if (!extension) extension = resource.mime ? mime.toFileExtension(resource.mime) : '';
|
||||||
extension = extension ? '.' + extension : '';
|
extension = extension ? ('.' + extension) : '';
|
||||||
return resource.id + extension;
|
return resource.id + extension;
|
||||||
}
|
}
|
||||||
|
|
||||||
static fullPath(resource) {
|
static fullPath(resource, encryptedBlob = false) {
|
||||||
return Setting.value('resourceDir') + '/' + this.filename(resource);
|
return Setting.value('resourceDir') + '/' + this.filename(resource, encryptedBlob);
|
||||||
|
}
|
||||||
|
|
||||||
|
// For resources, we need to decrypt the item (metadata) and the resource binary blob.
|
||||||
|
static async decrypt(item) {
|
||||||
|
const decryptedItem = await super.decrypt(item);
|
||||||
|
if (!decryptedItem.encryption_blob_encrypted) return decryptedItem;
|
||||||
|
|
||||||
|
const plainTextPath = this.fullPath(decryptedItem);
|
||||||
|
const encryptedPath = this.fullPath(decryptedItem, true);
|
||||||
|
const noExtPath = pathUtils.dirname(encryptedPath) + '/' + pathUtils.filename(encryptedPath);
|
||||||
|
|
||||||
|
// When the resource blob is downloaded by the synchroniser, it's initially a file with no
|
||||||
|
// extension (since it's encrypted, so we don't know its extension). So here rename it
|
||||||
|
// to a file with a ".crypted" extension so that it's better identified, and then decrypt it.
|
||||||
|
// Potentially plainTextPath is also a path with no extension if it's an unknown mime type.
|
||||||
|
if (await this.fsDriver().exists(noExtPath)) {
|
||||||
|
await this.fsDriver().move(noExtPath, encryptedPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.encryptionService().decryptFile(encryptedPath, plainTextPath);
|
||||||
|
item.encryption_blob_encrypted = 0;
|
||||||
|
return Resource.save(decryptedItem, { autoTimestamp: false });
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Prepare the resource by encrypting it if needed.
|
||||||
|
// The call returns the path to the physical file AND the resource object
|
||||||
|
// which may have been modified. So the caller should update their copy with this.
|
||||||
|
static async fullPathForSyncUpload(resource) {
|
||||||
|
const plainTextPath = this.fullPath(resource);
|
||||||
|
|
||||||
|
if (!Setting.value('encryption.enabled')) {
|
||||||
|
if (resource.encryption_blob_encrypted) {
|
||||||
|
resource.encryption_blob_encrypted = 0;
|
||||||
|
await Resource.save(resource, { autoTimestamp: false });
|
||||||
|
}
|
||||||
|
return { path: plainTextPath, resource: resource };
|
||||||
|
}
|
||||||
|
|
||||||
|
const encryptedPath = this.fullPath(resource, true);
|
||||||
|
if (resource.encryption_blob_encrypted) return { path: encryptedPath, resource: resource };
|
||||||
|
await this.encryptionService().encryptFile(plainTextPath, encryptedPath);
|
||||||
|
|
||||||
|
resource.encryption_blob_encrypted = 1;
|
||||||
|
await Resource.save(resource, { autoTimestamp: false });
|
||||||
|
|
||||||
|
return { path: encryptedPath, resource: resource };
|
||||||
}
|
}
|
||||||
|
|
||||||
static markdownTag(resource) {
|
static markdownTag(resource) {
|
||||||
|
@ -145,6 +145,8 @@ class EncryptionService {
|
|||||||
throw new Error('NOT TESTED');
|
throw new Error('NOT TESTED');
|
||||||
|
|
||||||
// Just putting this here in case it becomes needed
|
// Just putting this here in case it becomes needed
|
||||||
|
// Normally seeding random bytes is not needed for our use since
|
||||||
|
// we use shim.randomBytes directly to generate master keys.
|
||||||
|
|
||||||
const sjcl = shim.sjclModule;
|
const sjcl = shim.sjclModule;
|
||||||
const randomBytes = await shim.randomBytes(1024/8);
|
const randomBytes = await shim.randomBytes(1024/8);
|
||||||
|
@ -23,6 +23,7 @@ class Synchronizer {
|
|||||||
this.logger_ = new Logger();
|
this.logger_ = new Logger();
|
||||||
this.appType_ = appType;
|
this.appType_ = appType;
|
||||||
this.cancelling_ = false;
|
this.cancelling_ = false;
|
||||||
|
this.autoStartDecryptionWorker_ = true;
|
||||||
|
|
||||||
// Debug flags are used to test certain hard-to-test conditions
|
// Debug flags are used to test certain hard-to-test conditions
|
||||||
// such as cancelling in the middle of a loop.
|
// such as cancelling in the middle of a loop.
|
||||||
@ -216,7 +217,7 @@ class Synchronizer {
|
|||||||
if (donePaths.indexOf(path) > 0) throw new Error(sprintf('Processing a path that has already been done: %s. sync_time was not updated?', path));
|
if (donePaths.indexOf(path) > 0) throw new Error(sprintf('Processing a path that has already been done: %s. sync_time was not updated?', path));
|
||||||
|
|
||||||
let remote = await this.api().stat(path);
|
let remote = await this.api().stat(path);
|
||||||
let content = await ItemClass.serializeForSync(local);
|
//let content = await ItemClass.serializeForSync(local);
|
||||||
let action = null;
|
let action = null;
|
||||||
let updateSyncTimeOnly = true;
|
let updateSyncTimeOnly = true;
|
||||||
let reason = '';
|
let reason = '';
|
||||||
@ -271,23 +272,12 @@ class Synchronizer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (local.type_ == BaseModel.TYPE_RESOURCE && (action == 'createRemote' || (action == 'itemConflict' && remote))) {
|
if (local.type_ == BaseModel.TYPE_RESOURCE && (action == 'createRemote' || (action == 'itemConflict' && remote))) {
|
||||||
let remoteContentPath = this.resourceDirName_ + '/' + local.id;
|
|
||||||
try {
|
try {
|
||||||
// TODO: handle node and mobile in the same way
|
const remoteContentPath = this.resourceDirName_ + '/' + local.id;
|
||||||
if (shim.isNode()) {
|
const result = await Resource.fullPathForSyncUpload(local);
|
||||||
let resourceContent = '';
|
local = result.resource;
|
||||||
try {
|
const localResourceContentPath = result.path;
|
||||||
resourceContent = await Resource.content(local);
|
await this.api().put(remoteContentPath, null, { path: localResourceContentPath, source: 'file' });
|
||||||
} catch (error) {
|
|
||||||
error.message = 'Cannot read resource content: ' + local.id + ': ' + error.message;
|
|
||||||
this.logger().error(error);
|
|
||||||
this.progressReport_.errors.push(error);
|
|
||||||
}
|
|
||||||
await this.api().put(remoteContentPath, resourceContent);
|
|
||||||
} else {
|
|
||||||
const localResourceContentPath = Resource.fullPath(local);
|
|
||||||
await this.api().put(remoteContentPath, null, { path: localResourceContentPath, source: 'file' });
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error && error.code === 'cannotSync') {
|
if (error && error.code === 'cannotSync') {
|
||||||
await handleCannotSyncItem(syncTargetId, local, error.message);
|
await handleCannotSyncItem(syncTargetId, local, error.message);
|
||||||
@ -318,6 +308,7 @@ class Synchronizer {
|
|||||||
error.code = 'cannotSync';
|
error.code = 'cannotSync';
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
const content = await ItemClass.serializeForSync(local);
|
||||||
await this.api().put(path, content);
|
await this.api().put(path, content);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error && error.code === 'cannotSync') {
|
if (error && error.code === 'cannotSync') {
|
||||||
@ -598,7 +589,7 @@ class Synchronizer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (masterKeysAfter) {
|
if (masterKeysAfter && this.autoStartDecryptionWorker_) {
|
||||||
DecryptionWorker.instance().scheduleStart();
|
DecryptionWorker.instance().scheduleStart();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user