1
0
mirror of https://github.com/laurent22/joplin.git synced 2024-12-24 10:27:10 +02:00

All: Added backend for Dropbox support

This commit is contained in:
Laurent Cozic 2018-03-24 19:35:10 +00:00
parent 74b83eb71e
commit 0f4324c2f8
11 changed files with 441 additions and 32 deletions

View File

@ -19,3 +19,4 @@ tests/sync
out.txt
linkToLocal.sh
yarn-error.log
tests/support/dropbox-auth.txt

View File

@ -339,22 +339,17 @@ describe('Synchronizer', function() {
it('should delete local folder', asyncTest(async () => {
let folder1 = await Folder.save({ title: "folder1" });
let folder2 = await Folder.save({ title: "folder2" });
await synchronizer().start();
let context1 = await synchronizer().start();
await switchClient(2);
await synchronizer().start();
await sleep(0.1);
let context2 = await synchronizer().start();
await Folder.delete(folder2.id);
await synchronizer().start();
await synchronizer().start({ context: context2 });
await switchClient(1);
await synchronizer().start();
await synchronizer().start({ context: context1 });
let items = await allItems();
await localItemsSameAsRemote(items, expect);
}));
@ -438,7 +433,7 @@ describe('Synchronizer', function() {
expect(items1.length).toBe(0);
expect(items1.length).toBe(items2.length);
}));
}));
it('should handle conflict when remote note is deleted then local note is modified', asyncTest(async () => {
let folder1 = await Folder.save({ title: "folder1" });

View File

@ -16,6 +16,7 @@ const { FileApi } = require('lib/file-api.js');
const { FileApiDriverMemory } = require('lib/file-api-driver-memory.js');
const { FileApiDriverLocal } = require('lib/file-api-driver-local.js');
const { FileApiDriverWebDav } = require('lib/file-api-driver-webdav.js');
const { FileApiDriverDropbox } = require('lib/file-api-driver-dropbox.js');
const BaseService = require('lib/services/BaseService.js');
const { FsDriverNode } = require('lib/fs-driver-node.js');
const { time } = require('lib/time-utils.js');
@ -25,9 +26,11 @@ const SyncTargetMemory = require('lib/SyncTargetMemory.js');
const SyncTargetFilesystem = require('lib/SyncTargetFilesystem.js');
const SyncTargetOneDrive = require('lib/SyncTargetOneDrive.js');
const SyncTargetNextcloud = require('lib/SyncTargetNextcloud.js');
const SyncTargetDropbox = require('lib/SyncTargetDropbox.js');
const EncryptionService = require('lib/services/EncryptionService.js');
const DecryptionWorker = require('lib/services/DecryptionWorker.js');
const WebDavApi = require('lib/WebDavApi');
const DropboxApi = require('lib/DropboxApi');
let databases_ = [];
let synchronizers_ = [];
@ -51,10 +54,12 @@ SyncTargetRegistry.addClass(SyncTargetMemory);
SyncTargetRegistry.addClass(SyncTargetFilesystem);
SyncTargetRegistry.addClass(SyncTargetOneDrive);
SyncTargetRegistry.addClass(SyncTargetNextcloud);
SyncTargetRegistry.addClass(SyncTargetDropbox);
// const syncTargetId_ = SyncTargetRegistry.nameToId("nextcloud");
const syncTargetId_ = SyncTargetRegistry.nameToId("memory");
// const syncTargetId_ = SyncTargetRegistry.nameToId("memory");
//const syncTargetId_ = SyncTargetRegistry.nameToId('filesystem');
const syncTargetId_ = SyncTargetRegistry.nameToId('dropbox');
const syncDir = __dirname + '/../tests/sync';
const sleepTime = syncTargetId_ == SyncTargetRegistry.nameToId('filesystem') ? 1001 : 100;//400;
@ -247,25 +252,15 @@ function fileApi() {
const api = new WebDavApi(options);
fileApi_ = new FileApi('', new FileApiDriverWebDav(api));
} else if (syncTargetId_ == SyncTargetRegistry.nameToId('dropbox')) {
const api = new DropboxApi();
const authTokenPath = __dirname + '/support/dropbox-auth.txt';
const authToken = fs.readFileSync(authTokenPath, 'utf8');
if (!authTokenPath) throw new Error('Dropbox auth token missing in ' + authTokenPath);
api.setAuthToken(authToken);
fileApi_ = new FileApi('', new FileApiDriverDropbox(api));
}
// } else if (syncTargetId == Setting.SYNC_TARGET_ONEDRIVE) {
// let auth = require('./onedrive-auth.json');
// if (!auth) {
// const oneDriveApiUtils = new OneDriveApiNodeUtils(oneDriveApi);
// auth = await oneDriveApiUtils.oauthDance();
// fs.writeFileSync('./onedrive-auth.json', JSON.stringify(auth));
// process.exit(1);
// } else {
// auth = JSON.parse(auth);
// }
// // const oneDriveApiUtils = new OneDriveApiNodeUtils(reg.oneDriveApi());
// // const auth = await oneDriveApiUtils.oauthDance(this);
// // Setting.setValue('sync.3.auth', auth ? JSON.stringify(auth) : null);
// // if (!auth) return;
// }
fileApi_.setLogger(logger);
fileApi_.setSyncTargetId(syncTargetId_);
fileApi_.requestRepeatCount_ = 0;

View File

@ -66,6 +66,10 @@ class BaseSyncTarget {
return this.fileApi_;
}
fileApiSync() {
return this.fileApi_;
}
// Usually each sync target should create and setup its own file API via initFileApi()
// but for testing purposes it might be convenient to provide it here so that multiple
// clients can share and sync to the same file api (see test-utils.js)

View File

@ -0,0 +1,154 @@
const { Logger } = require('lib/logger.js');
const { shim } = require('lib/shim.js');
const JoplinError = require('lib/JoplinError');
const URL = require('url-parse');
const { time } = require('lib/time-utils');
class DropboxApi {
constructor(options) {
this.logger_ = new Logger();
this.options_ = options;
this.authToken_ = null;
}
setLogger(l) {
this.logger_ = l;
}
logger() {
return this.logger_;
}
authToken() {
return this.authToken_; // Must be "Bearer XXXXXXXXXXXXXXXXXX"
}
setAuthToken(v) {
this.authToken_ = v;
}
baseUrl(endPointFormat) {
if (['content', 'api'].indexOf(endPointFormat) < 0) throw new Error('Invalid end point format: ' + endPointFormat);
return 'https://' + endPointFormat + '.dropboxapi.com/2';
}
requestToCurl_(url, options) {
let output = [];
output.push('curl');
if (options.method) output.push('-X ' + options.method);
if (options.headers) {
for (let n in options.headers) {
if (!options.headers.hasOwnProperty(n)) continue;
output.push('-H ' + "'" + n + ': ' + options.headers[n] + "'");
}
}
if (options.body) output.push('--data ' + '"' + options.body + '"');
output.push(url);
return output.join(' ');
}
async exec(method, path = '', body = null, headers = null, options = null) {
if (headers === null) headers = {};
if (options === null) options = {};
if (!options.target) options.target = 'string';
const authToken = this.authToken();
if (authToken) headers['Authorization'] = authToken;
const endPointFormat = ['files/upload', 'files/download'].indexOf(path) >= 0 ? 'content' : 'api';
if (endPointFormat === 'api') {
headers['Content-Type'] = 'application/json';
if (body && typeof body === 'object') body = JSON.stringify(body);
} else {
headers['Content-Type'] = 'application/octet-stream';
}
const fetchOptions = {};
fetchOptions.headers = headers;
fetchOptions.method = method;
if (options.path) fetchOptions.path = options.path;
if (body) fetchOptions.body = body;
const url = this.baseUrl(endPointFormat) + '/' + path;
let tryCount = 0;
while (true) {
try {
let response = null;
// console.info(this.requestToCurl_(url, fetchOptions));
const now = Date.now();
// console.info(now + ': ' + method + ' ' + url);
if (options.source == 'file' && (method == 'POST' || method == 'PUT')) {
response = await shim.uploadBlob(url, fetchOptions);
} else if (options.target == 'string') {
response = await shim.fetch(url, fetchOptions);
} else { // file
response = await shim.fetchBlob(url, fetchOptions);
}
const responseText = await response.text();
// console.info(now + ': Response: ' + responseText);
let responseJson_ = null;
const loadResponseJson = () => {
if (!responseText) return null;
if (responseJson_) return responseJson_;
try {
responseJson_ = JSON.parse(responseText);
} catch (error) {
return { error: responseText };
}
return responseJson_;
}
// Creates an error object with as much data as possible as it will appear in the log, which will make debugging easier
const newError = (message) => {
const json = loadResponseJson();
let code = '';
if (json && json.error_summary) {
code = json.error_summary;
}
// Gives a shorter response for error messages. Useful for cases where a full HTML page is accidentally loaded instead of
// JSON. That way the error message will still show there's a problem but without filling up the log or screen.
const shortResponseText = (responseText + '').substr(0, 1024);
return new JoplinError(method + ' ' + path + ': ' + message + ' (' + response.status + '): ' + shortResponseText, code);
}
if (!response.ok) {
// When using fetchBlob we only get a string (not xml or json) back
if (options.target === 'file') throw newError('fetchBlob error');
throw newError('Error');
}
if (options.responseFormat === 'text') return responseText;
return loadResponseJson();
} catch (error) {
tryCount++;
if (error.code.indexOf('too_many_write_operations') >= 0) {
this.logger().warn('too_many_write_operations ' + tryCount);
if (tryCount >= 3) {
throw error;
}
await time.sleep(tryCount * 2);
} else {
throw error;
}
}
}
}
}
module.exports = DropboxApi;

View File

@ -0,0 +1,54 @@
const BaseSyncTarget = require('lib/BaseSyncTarget.js');
const { _ } = require('lib/locale.js');
const DropboxApi = require('lib/DropboxApi');
const Setting = require('lib/models/Setting.js');
const { parameters } = require('lib/parameters.js');
const { FileApi } = require('lib/file-api.js');
const { Synchronizer } = require('lib/synchronizer.js');
const { FileApiDriverDropbox } = require('lib/file-api-driver-dropbox.js');
class SyncTargetDropbox extends BaseSyncTarget {
static id() {
return 7;
}
constructor(db, options = null) {
super(db, options);
this.api_ = null;
}
static targetName() {
return 'dropbox';
}
static label() {
return _('Dropbox');
}
isAuthenticated() {
const f = this.fileApiSync();
return f && f.driver().api().authToken();
}
syncTargetId() {
return SyncTargetDropbox.id();
}
async initFileApi() {
const api = new DropboxApi();
const appDir = '';
const fileApi = new FileApi(appDir, new FileApiDriverDropbox(api));
fileApi.setSyncTargetId(this.syncTargetId());
fileApi.setLogger(this.logger());
return fileApi;
}
async initSynchronizer() {
if (!this.isAuthenticated()) throw new Error('User is not authentified');
return new Synchronizer(this.db(), await this.fileApi(), Setting.value('appType'));
}
}
module.exports = SyncTargetDropbox;

View File

@ -0,0 +1,199 @@
const { time } = require('lib/time-utils.js');
const { shim } = require('lib/shim');
const JoplinError = require('lib/JoplinError');
const { basicDelta } = require('lib/file-api');
class FileApiDriverDropbox {
constructor(api) {
this.api_ = api;
}
api() {
return this.api_;
}
requestRepeatCount() {
return 3;
}
makePath_(path) {
if (!path) return '';
return '/' + path;
}
async stat(path) {
try {
const metadata = await this.api().exec('POST', 'files/get_metadata', {
path: this.makePath_(path),
});
return this.metadataToStat_(metadata, path);
} catch (error) {
if (error.code.indexOf('not_found') >= 0) {
// ignore
} else {
throw error;
}
}
}
metadataToStat_(md, path) {
const output = {
path: path,
updated_time: md.server_modified ? new Date(md.server_modified) : new Date(),
isDir: md['.tag'] === 'folder',
};
if (md['.tag'] === 'deleted') output.isDeleted = true;
return output;
}
metadataToStats_(mds) {
const output = [];
for (let i = 0; i < mds.length; i++) {
output.push(this.metadataToStat_(mds[i], mds[i].name));
}
return output;
}
async setTimestamp(path, timestampMs) {
throw new Error('Not implemented'); // Not needed anymore
}
async delta(path, options) {
const context = options ? options.context : null;
let cursor = context ? context.cursor : null;
const urlPath = cursor ? 'files/list_folder/continue' : 'files/list_folder';
const body = cursor ? { cursor: cursor } : { path: this.makePath_(path), include_deleted: true };
const response = await this.api().exec('POST', urlPath, body);
const output = {
items: this.metadataToStats_(response.entries),
hasMore: response.has_more,
context: { cursor: response.cursor },
}
return output;
// TODO: handle error - reset cursor
}
async list(path, options) {
let response = await this.api().exec('POST', 'files/list_folder', {
path: this.makePath_(path),
});
let output = this.metadataToStats_(response.entries);
while (response.has_more) {
response = await this.api().exec('POST', 'files/list_folder/continue', {
cursor: response.cursor,
});
output = output.concat(this.metadataToStats_(response.entries));
}
return {
items: output,
hasMore: false,
context: { cursor: response.cursor },
};
}
async get(path, options) {
if (!options) options = {};
if (!options.responseFormat) options.responseFormat = 'text';
try {
const response = await this.api().exec('POST', 'files/download', null, {
'Dropbox-API-Arg': JSON.stringify({ "path": this.makePath_(path) }),
}, options);
return response;
} catch (error) {
if (error.code.indexOf('not_found') >= 0) {
return null;
} else {
throw error;
}
}
}
async mkdir(path) {
try {
await this.api().exec('POST', 'files/create_folder_v2', {
path: this.makePath_(path),
});
} catch (error) {
if (error.code.indexOf('path/conflict') >= 0) {
// Ignore
} else {
throw error;
}
}
}
async put(path, content, options = null) {
// See https://github.com/facebook/react-native/issues/14445#issuecomment-352965210
if (typeof content === 'string') content = Buffer.from(content, 'utf8')
await this.api().exec('POST', 'files/upload', content, {
'Dropbox-API-Arg': JSON.stringify({
path: this.makePath_(path),
mode: 'overwrite',
mute: true, // Don't send a notification to user since there can be many of these updates
})}, options);
}
async delete(path) {
try {
await this.api().exec('POST', 'files/delete_v2', {
path: this.makePath_(path),
});
} catch (error) {
if (error.code.indexOf('not_found') >= 0) {
// ignore
} else {
throw error;
}
}
}
async move(oldPath, newPath) {
throw new Error('Not supported');
}
format() {
throw new Error('Not supported');
}
async clearRoot() {
const entries = await this.list('');
const batchDelete = [];
for (let i = 0; i < entries.items.length; i++) {
batchDelete.push({ path: this.makePath_(entries.items[i].path) });
}
const response = await this.api().exec('POST', 'files/delete_batch', { entries: batchDelete });
const jobId = response.async_job_id;
while (true) {
const check = await this.api().exec('POST', 'files/delete_batch/check', { async_job_id: jobId });
if (check['.tag'] === 'complete') break;
// It returns "failed" if it didn't work but anyway throw an error if it's anything other than complete or in_progress
if (check['.tag'] !== 'in_progress') {
throw new Error('Batch delete failed? ' + JSON.stringify(check));
}
await time.sleep(2);
}
}
}
module.exports = { FileApiDriverDropbox };

View File

@ -293,6 +293,7 @@ class FileApiDriverWebDav {
return response;
} catch (error) {
if (error.code !== 404) throw error;
return null;
}
}

View File

@ -111,10 +111,9 @@ function shimInit() {
const urlParse = require('url').parse;
url = urlParse(url.trim());
const method = options.method ? options.method : 'GET';
const http = url.protocol.toLowerCase() == 'http:' ? require('follow-redirects').http : require('follow-redirects').https;
const headers = options.headers ? options.headers : {};
const method = options.method ? options.method : 'GET';
if (method != 'GET') throw new Error('Only GET is supported');
const filePath = options.path;
function makeResponse(response) {
@ -143,7 +142,7 @@ function shimInit() {
// Note: relative paths aren't supported
const file = fs.createWriteStream(filePath);
const request = http.get(requestOptions, function(response) {
const request = http.request(requestOptions, function(response) {
response.pipe(file);
file.on('finish', function() {
@ -157,6 +156,8 @@ function shimInit() {
fs.unlink(filePath);
reject(error);
});
request.end();
} catch(error) {
fs.unlink(filePath);
reject(error);
@ -180,6 +181,8 @@ function shimInit() {
return Buffer.byteLength(string, 'utf-8');
}
shim.Buffer = Buffer;
}
module.exports = { shimInit };

View File

@ -116,6 +116,8 @@ function shimInit() {
shim.stringByteLength = function(string) {
return Buffer.byteLength(string, 'utf-8');
}
shim.Buffer = Buffer;
}
module.exports = { shimInit };

View File

@ -129,5 +129,6 @@ shim.clearInterval = function(id) {
shim.stringByteLength = function(string) { throw new Error('Not implemented'); }
shim.detectAndSetLocale = null;
shim.attachFileToNote = async (note, filePath) => {}
shim.Buffer = null;
module.exports = { shim };