1
0
mirror of https://github.com/laurent22/joplin.git synced 2024-12-24 10:27:10 +02:00

All: Simplified synchronisation of resources to simplify encryption, and implemented resource encryption

This commit is contained in:
Laurent Cozic 2017-12-19 19:01:29 +00:00
parent 26bf7c4d46
commit cc02c1d585
11 changed files with 205 additions and 41 deletions

View File

@ -11,6 +11,7 @@ mkdir -p "$BUILD_DIR/data"
if [[ $TEST_FILE == "" ]]; then
(cd "$ROOT_DIR" && npm test tests-build/synchronizer.js)
(cd "$ROOT_DIR" && npm test tests-build/encryption.js)
(cd "$ROOT_DIR" && npm test tests-build/ArrayUtils.js)
else
(cd "$ROOT_DIR" && npm test tests-build/$TEST_FILE.js)
fi

View File

@ -0,0 +1,32 @@
require('app-module-path').addPath(__dirname);
const { time } = require('lib/time-utils.js');
const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('test-utils.js');
const ArrayUtils = require('lib/ArrayUtils.js');
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
describe('Encryption', function() {
beforeEach(async (done) => {
done();
});
it('should remove array elements', async (done) => {
let a = ['un', 'deux', 'trois'];
a = ArrayUtils.removeElement(a, 'deux');
expect(a[0]).toBe('un');
expect(a[1]).toBe('trois');
expect(a.length).toBe(2);
a = ['un', 'deux', 'trois'];
a = ArrayUtils.removeElement(a, 'not in there');
expect(a.length).toBe(3);
done();
});
});

View File

@ -1,9 +1,11 @@
require('app-module-path').addPath(__dirname);
const { time } = require('lib/time-utils.js');
const { setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey } = require('test-utils.js');
const { setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker } = require('test-utils.js');
const { shim } = require('lib/shim.js');
const Folder = require('lib/models/Folder.js');
const Note = require('lib/models/Note.js');
const Resource = require('lib/models/Resource.js');
const Tag = require('lib/models/Tag.js');
const { Database } = require('lib/database.js');
const Setting = require('lib/models/Setting.js');
@ -589,7 +591,7 @@ describe('Synchronizer', function() {
done();
});
async function ignorableNoteConflictTest(withEncryption) {
if (withEncryption) {
Setting.setValue('encryption.enabled', true);
@ -771,10 +773,14 @@ describe('Synchronizer', function() {
// Since client 2 hasn't supplied a password yet, no master key is currently loaded
expect(encryptionService().loadedMasterKeyIds().length).toBe(0);
// If we sync now, nothing should be sent to target since we don't have a password
// If we sync now, nothing should be sent to target since we don't have a password.
// Technically it's incorrect to set the property of an encrypted variable but it allows confirming
// that encryption doesn't work if user hasn't supplied a password.
let folder1_2 = await Folder.save({ id: folder1.id, title: "change test" });
await synchronizer().start();
await switchClient(1);
await synchronizer().start();
folder1 = await Folder.load(folder1.id);
expect(folder1.title).toBe('folder1'); // Still at old value
@ -788,18 +794,18 @@ describe('Synchronizer', function() {
// Now that master key should be loaded
expect(encryptionService().loadedMasterKeyIds()[0]).toBe(masterKey.id);
// Decrypt all the data. Now change the title and sync again - this time the changes should be transmitted
await decryptionWorker().start();
folder1_2 = await Folder.save({ id: folder1.id, title: "change test" });
// If we sync now, this time client 1 should get the changes we did earlier
await synchronizer().start();
await switchClient(1);
// NOTE: there might be a race condition here but can't figure it out. Up to this point all the tests
// will pass, which means the master key is loaded. However, the below test find that the title is still
// the previous value. Possible reasons are:
// - Client 2 didn't send the updated item
// - Client 1 didn't receive it
// Maybe due to sync_time/updated_time having the same value on one or both of the clients when tests run fast?
await synchronizer().start();
// Decrypt the data we just got
await decryptionWorker().start();
folder1 = await Folder.load(folder1.id);
expect(folder1.title).toBe('change test'); // Got title from client 2
@ -812,7 +818,8 @@ describe('Synchronizer', function() {
let folder1 = await Folder.save({ title: "folder1" });
await synchronizer().start();
let files = await fileApi().list()
expect(files.items[0].content.indexOf('folder1') >= 0).toBe(true)
let content = await fileApi().get(files.items[0].path);
expect(content.indexOf('folder1') >= 0).toBe(true)
// Then enable encryption and sync again
let masterKey = await service.generateMasterKey('123456');
@ -827,11 +834,58 @@ describe('Synchronizer', function() {
expect(files.items.length).toBe(2);
// By checking that the folder title is not present, we can confirm that the item has indeed been encrypted
// One of the two items is the master key
expect(files.items[0].content.indexOf('folder1') < 0).toBe(true);
expect(files.items[1].content.indexOf('folder1') < 0).toBe(true);
content = await fileApi().get(files.items[0].path);
expect(content.indexOf('folder1') < 0).toBe(true);
content = await fileApi().get(files.items[1].path);
expect(content.indexOf('folder1') < 0).toBe(true);
done();
});
it('should sync resources', async (done) => {
let folder1 = await Folder.save({ title: "folder1" });
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
let resource1 = (await Resource.all())[0];
let resourcePath1 = Resource.fullPath(resource1);
await synchronizer().start();
await switchClient(2);
await synchronizer().start();
let resource1_2 = (await Resource.all())[0];
let resourcePath1_2 = Resource.fullPath(resource1_2);
expect(resource1_2.id).toBe(resource1.id);
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
done();
});
it('should encryt resources', async (done) => {
Setting.setValue('encryption.enabled', true);
const masterKey = await loadEncryptionMasterKey();
let folder1 = await Folder.save({ title: "folder1" });
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
let resource1 = (await Resource.all())[0];
let resourcePath1 = Resource.fullPath(resource1);
await synchronizer().start();
await switchClient(2);
await synchronizer().start();
Setting.setObjectKey('encryption.passwordCache', masterKey.id, '123456');
await encryptionService().loadMasterKeysFromSettings();
let resource1_2 = (await Resource.all())[0];
resource1_2 = await Resource.decrypt(resource1_2);
let resourcePath1_2 = Resource.fullPath(resource1_2);
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
done();
});
});

View File

@ -23,10 +23,12 @@ const SyncTargetMemory = require('lib/SyncTargetMemory.js');
const SyncTargetFilesystem = require('lib/SyncTargetFilesystem.js');
const SyncTargetOneDrive = require('lib/SyncTargetOneDrive.js');
const EncryptionService = require('lib/services/EncryptionService.js');
const DecryptionWorker = require('lib/services/DecryptionWorker.js');
let databases_ = [];
let synchronizers_ = [];
let encryptionServices_ = [];
let decryptionWorkers_ = [];
let fileApi_ = null;
let currentClient_ = 1;
@ -44,7 +46,8 @@ SyncTargetRegistry.addClass(SyncTargetMemory);
SyncTargetRegistry.addClass(SyncTargetFilesystem);
SyncTargetRegistry.addClass(SyncTargetOneDrive);
const syncTargetId_ = SyncTargetRegistry.nameToId('memory');
//const syncTargetId_ = SyncTargetRegistry.nameToId('memory');
const syncTargetId_ = SyncTargetRegistry.nameToId('filesystem');
const syncDir = __dirname + '/../tests/sync';
const sleepTime = syncTargetId_ == SyncTargetRegistry.nameToId('filesystem') ? 1001 : 400;
@ -157,11 +160,12 @@ async function setupDatabaseAndSynchronizer(id = null) {
syncTarget.setFileApi(fileApi());
syncTarget.setLogger(logger);
synchronizers_[id] = await syncTarget.synchronizer();
synchronizers_[id].autoStartDecryptionWorker_ = false; // For testing we disable this since it would make the tests non-deterministic
}
//if (!encryptionServices_[id]) {
encryptionServices_[id] = new EncryptionService();
//}
encryptionServices_[id] = new EncryptionService();
decryptionWorkers_[id] = new DecryptionWorker();
decryptionWorkers_[id].setEncryptionService(encryptionServices_[id]);
if (syncTargetId_ == SyncTargetRegistry.nameToId('filesystem')) {
fs.removeSync(syncDir)
@ -186,6 +190,11 @@ function encryptionService(id = null) {
return encryptionServices_[id];
}
function decryptionWorker(id = null) {
if (id === null) id = currentClient_;
return decryptionWorkers_[id];
}
async function loadEncryptionMasterKey(id = null, useExisting = false) {
const service = encryptionService(id);
@ -263,4 +272,4 @@ function fileContentEqual(path1, path2) {
return content1 === content2;
}
module.exports = { setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync, encryptionService, loadEncryptionMasterKey, fileContentEqual };
module.exports = { setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker };

View File

@ -6,4 +6,11 @@ ArrayUtils.unique = function(array) {
});
}
ArrayUtils.removeElement = function(array, element) {
const index = array.indexOf(element);
if (index < 0) return array;
array.splice(index, 1);
return array;
}
module.exports = ArrayUtils;

View File

@ -15,6 +15,14 @@ class FsDriverNode {
return fs.writeFile(path, buffer);
}
move(source, dest) {
return fs.move(source, dest, { overwrite: true });
}
exists(path) {
return fs.pathExists(path);
}
open(path, mode) {
return fs.open(path, mode);
}

View File

@ -14,6 +14,10 @@ class FsDriverRN {
throw new Error('Not implemented');
}
move(source, dest) {
throw new Error('Not implemented');
}
async open(path, mode) {
// Note: RNFS.read() doesn't provide any way to know if the end of file has been reached.
// So instead we stat the file here and use stat.size to manually check for end of file.

View File

@ -292,6 +292,7 @@ class JoplinDatabase extends Database {
}
queries.push('ALTER TABLE sync_items ADD COLUMN force_sync INT NOT NULL DEFAULT 0');
queries.push('ALTER TABLE resources ADD COLUMN encryption_blob_encrypted INT NOT NULL DEFAULT 0');
}
queries.push({ sql: 'UPDATE version SET version = ?', params: [targetVersion] });

View File

@ -1,6 +1,8 @@
const BaseModel = require('lib/BaseModel.js');
const BaseItem = require('lib/models/BaseItem.js');
const Setting = require('lib/models/Setting.js');
const ArrayUtils = require('lib/ArrayUtils.js');
const pathUtils = require('lib/path-utils.js');
const { mime } = require('lib/mime-utils.js');
const { filename } = require('lib/path-utils.js');
const { FsDriverDummy } = require('lib/fs-driver-dummy.js');
@ -16,6 +18,11 @@ class Resource extends BaseItem {
return BaseModel.TYPE_RESOURCE;
}
static encryptionService() {
if (!this.encryptionService_) throw new Error('Resource.encryptionService_ is not set!!');
return this.encryptionService_;
}
static isSupportedImageMimeType(type) {
const imageMimeTypes = ["image/jpg", "image/jpeg", "image/png", "image/gif"];
return imageMimeTypes.indexOf(type.toLowerCase()) >= 0;
@ -28,19 +35,67 @@ class Resource extends BaseItem {
static async serialize(item, type = null, shownKeys = null) {
let fieldNames = this.fieldNames();
fieldNames.push('type_');
fieldNames.push('type_');
//fieldNames = ArrayUtils.removeElement(fieldNames, 'encryption_blob_encrypted');
return super.serialize(item, 'resource', fieldNames);
}
static filename(resource) {
let extension = resource.file_extension;
static filename(resource, encryptedBlob = false) {
let extension = encryptedBlob ? 'crypted' : resource.file_extension;
if (!extension) extension = resource.mime ? mime.toFileExtension(resource.mime) : '';
extension = extension ? '.' + extension : '';
extension = extension ? ('.' + extension) : '';
return resource.id + extension;
}
static fullPath(resource) {
return Setting.value('resourceDir') + '/' + this.filename(resource);
static fullPath(resource, encryptedBlob = false) {
return Setting.value('resourceDir') + '/' + this.filename(resource, encryptedBlob);
}
// For resources, we need to decrypt the item (metadata) and the resource binary blob.
static async decrypt(item) {
const decryptedItem = await super.decrypt(item);
if (!decryptedItem.encryption_blob_encrypted) return decryptedItem;
const plainTextPath = this.fullPath(decryptedItem);
const encryptedPath = this.fullPath(decryptedItem, true);
const noExtPath = pathUtils.dirname(encryptedPath) + '/' + pathUtils.filename(encryptedPath);
// When the resource blob is downloaded by the synchroniser, it's initially a file with no
// extension (since it's encrypted, so we don't know its extension). So here rename it
// to a file with a ".crypted" extension so that it's better identified, and then decrypt it.
// Potentially plainTextPath is also a path with no extension if it's an unknown mime type.
if (await this.fsDriver().exists(noExtPath)) {
await this.fsDriver().move(noExtPath, encryptedPath);
}
await this.encryptionService().decryptFile(encryptedPath, plainTextPath);
item.encryption_blob_encrypted = 0;
return Resource.save(decryptedItem, { autoTimestamp: false });
}
// Prepare the resource by encrypting it if needed.
// The call returns the path to the physical file AND the resource object
// which may have been modified. So the caller should update their copy with this.
static async fullPathForSyncUpload(resource) {
const plainTextPath = this.fullPath(resource);
if (!Setting.value('encryption.enabled')) {
if (resource.encryption_blob_encrypted) {
resource.encryption_blob_encrypted = 0;
await Resource.save(resource, { autoTimestamp: false });
}
return { path: plainTextPath, resource: resource };
}
const encryptedPath = this.fullPath(resource, true);
if (resource.encryption_blob_encrypted) return { path: encryptedPath, resource: resource };
await this.encryptionService().encryptFile(plainTextPath, encryptedPath);
resource.encryption_blob_encrypted = 1;
await Resource.save(resource, { autoTimestamp: false });
return { path: encryptedPath, resource: resource };
}
static markdownTag(resource) {

View File

@ -145,6 +145,8 @@ class EncryptionService {
throw new Error('NOT TESTED');
// Just putting this here in case it becomes needed
// Normally seeding random bytes is not needed for our use since
// we use shim.randomBytes directly to generate master keys.
const sjcl = shim.sjclModule;
const randomBytes = await shim.randomBytes(1024/8);

View File

@ -23,6 +23,7 @@ class Synchronizer {
this.logger_ = new Logger();
this.appType_ = appType;
this.cancelling_ = false;
this.autoStartDecryptionWorker_ = true;
// Debug flags are used to test certain hard-to-test conditions
// such as cancelling in the middle of a loop.
@ -216,7 +217,7 @@ class Synchronizer {
if (donePaths.indexOf(path) > 0) throw new Error(sprintf('Processing a path that has already been done: %s. sync_time was not updated?', path));
let remote = await this.api().stat(path);
let content = await ItemClass.serializeForSync(local);
//let content = await ItemClass.serializeForSync(local);
let action = null;
let updateSyncTimeOnly = true;
let reason = '';
@ -271,23 +272,12 @@ class Synchronizer {
}
if (local.type_ == BaseModel.TYPE_RESOURCE && (action == 'createRemote' || (action == 'itemConflict' && remote))) {
let remoteContentPath = this.resourceDirName_ + '/' + local.id;
try {
// TODO: handle node and mobile in the same way
if (shim.isNode()) {
let resourceContent = '';
try {
resourceContent = await Resource.content(local);
} catch (error) {
error.message = 'Cannot read resource content: ' + local.id + ': ' + error.message;
this.logger().error(error);
this.progressReport_.errors.push(error);
}
await this.api().put(remoteContentPath, resourceContent);
} else {
const localResourceContentPath = Resource.fullPath(local);
await this.api().put(remoteContentPath, null, { path: localResourceContentPath, source: 'file' });
}
const remoteContentPath = this.resourceDirName_ + '/' + local.id;
const result = await Resource.fullPathForSyncUpload(local);
local = result.resource;
const localResourceContentPath = result.path;
await this.api().put(remoteContentPath, null, { path: localResourceContentPath, source: 'file' });
} catch (error) {
if (error && error.code === 'cannotSync') {
await handleCannotSyncItem(syncTargetId, local, error.message);
@ -318,6 +308,7 @@ class Synchronizer {
error.code = 'cannotSync';
throw error;
}
const content = await ItemClass.serializeForSync(local);
await this.api().put(path, content);
} catch (error) {
if (error && error.code === 'cannotSync') {
@ -598,7 +589,7 @@ class Synchronizer {
}
}
if (masterKeysAfter) {
if (masterKeysAfter && this.autoStartDecryptionWorker_) {
DecryptionWorker.instance().scheduleStart();
}