1
0
mirror of https://github.com/laurent22/joplin.git synced 2024-12-24 10:27:10 +02:00

All: Fix integration test harness issues. (#2723)

This commit is contained in:
mic704b 2020-03-16 13:30:54 +11:00 committed by GitHub
parent 0863f0d564
commit 8cd26c9380
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 424 additions and 291 deletions

View File

@ -218,7 +218,7 @@ async function main() {
logger.info(await execCommand(client, 'version'));
await db.open({ name: `${client.profileDir}/database.sqlite` });
BaseModel.db_ = db;
BaseModel.setDb(db);
await Setting.load();
let onlyThisTest = 'testMv';

View File

@ -17,15 +17,10 @@ const { ALL_NOTES_FILTER_ID } = require('lib/reserved-ids.js');
// - inject the event to be tested
// - check the resulting application state
//
// Important: sleep must be used after TestApp dispatch to allow the async processing
// to complete
// Important: TestApp.wait() must be used after TestApp dispatch to allow the async
// processing to complete
//
// use this until Javascript arr.flat() function works in Travis
function flatten(arr) {
return (arr.reduce((acc, val) => acc.concat(val), []));
}
let testApp = null;
describe('integration_ShowAllNotes', function() {
@ -46,14 +41,15 @@ describe('integration_ShowAllNotes', function() {
// setup
const folders = await createNTestFolders(3);
Folder.moveToFolder(id(folders[2]), id(folders[1])); // subfolder
await time.msleep(100);
await testApp.wait();
const notes0 = await createNTestNotes(3, folders[0]);
const notes1 = await createNTestNotes(3, folders[1]);
const notes2 = await createNTestNotes(3, folders[2]);
await testApp.wait();
// TEST ACTION: View all-notes
testApp.dispatch({ type: 'SMART_FILTER_SELECT', id: ALL_NOTES_FILTER_ID });
await time.msleep(100);
await testApp.wait();
// check: all the notes are shown
const state = testApp.store().getState();
@ -67,10 +63,12 @@ describe('integration_ShowAllNotes', function() {
const folders = await createNTestFolders(2);
const notes0 = await createNTestNotes(3, folders[0]);
const notes1 = await createNTestNotes(3, folders[1]);
await testApp.wait();
testApp.dispatch({ type: 'FOLDER_SELECT', id: id(folders[1]) });
await time.msleep(100);
await testApp.wait();
testApp.dispatch({ type: 'NOTE_SELECT', id: id(notes1[1]) });
await time.msleep(100);
await testApp.wait();
// check the state is set up as expected
let state = testApp.store().getState();
@ -81,7 +79,7 @@ describe('integration_ShowAllNotes', function() {
// TEST ACTION: View all-notes
testApp.dispatch({ type: 'SMART_FILTER_SELECT', id: ALL_NOTES_FILTER_ID });
await time.msleep(100);
await testApp.wait();
// check: all the notes are shown
state = testApp.store().getState();

View File

@ -1,44 +1,12 @@
/* eslint-disable no-unused-vars */
require('app-module-path').addPath(__dirname);
const { setupDatabaseAndSynchronizer, switchClient, asyncTest, TestApp } = require('test-utils.js');
const { setupDatabaseAndSynchronizer, switchClient, asyncTest, createNTestFolders, createNTestNotes, createNTestTags, TestApp } = require('test-utils.js');
const Setting = require('lib/models/Setting.js');
const Folder = require('lib/models/Folder.js');
const Note = require('lib/models/Note.js');
const Tag = require('lib/models/Tag.js');
const { time } = require('lib/time-utils.js');
async function createNTestFolders(n) {
const folders = [];
for (let i = 0; i < n; i++) {
const folder = await Folder.save({ title: 'folder' });
folders.push(folder);
}
return folders;
}
async function createNTestNotes(n, folder) {
const notes = [];
for (let i = 0; i < n; i++) {
const note = await Note.save({ title: 'note', parent_id: folder.id, is_conflict: 0 });
notes.push(note);
}
return notes;
}
async function createNTestTags(n) {
const tags = [];
for (let i = 0; i < n; i++) {
const tag = await Tag.save({ title: 'tag' });
tags.push(tag);
}
return tags;
}
// use this until Javascript arr.flat() function works in Travis
function flatten(arr) {
return (arr.reduce((acc, val) => acc.concat(val), []));
}
let testApp = null;
describe('integration_TagList', function() {
@ -61,20 +29,16 @@ describe('integration_TagList', function() {
const folders = await createNTestFolders(1);
const notes = await createNTestNotes(5, folders[0]);
const tags = await createNTestTags(3);
await testApp.wait();
await Tag.addNote(tags[2].id, notes[2].id);
await testApp.wait();
testApp.dispatch({
type: 'FOLDER_SELECT',
id: folders[0].id,
});
await time.msleep(100);
testApp.dispatch({ type: 'FOLDER_SELECT', id: folders[0].id });
await testApp.wait();
testApp.dispatch({
type: 'NOTE_SELECT',
id: notes[2].id,
});
await time.msleep(100);
testApp.dispatch({ type: 'NOTE_SELECT', id: notes[2].id });
await testApp.wait();
// check the tag list is correct
let state = testApp.store().getState();
@ -82,11 +46,8 @@ describe('integration_TagList', function() {
expect(state.selectedNoteTags[0].id).toEqual(tags[2].id);
// delete the note
testApp.dispatch({
type: 'NOTE_DELETE',
id: notes[2].id,
});
await time.msleep(100);
testApp.dispatch({ type: 'NOTE_DELETE', id: notes[2].id });
await testApp.wait();
// check the tag list is updated
state = testApp.store().getState();
@ -99,22 +60,18 @@ describe('integration_TagList', function() {
const folders = await createNTestFolders(1);
const notes = await createNTestNotes(5, folders[0]);
const tags = await createNTestTags(3);
await testApp.wait();
await Tag.addNote(tags[1].id, notes[1].id);
await Tag.addNote(tags[0].id, notes[0].id);
await Tag.addNote(tags[2].id, notes[0].id);
await testApp.wait();
testApp.dispatch({
type: 'FOLDER_SELECT',
id: folders[0].id,
});
await time.msleep(100);
testApp.dispatch({ type: 'FOLDER_SELECT', id: folders[0].id });
await testApp.wait();
testApp.dispatch({
type: 'NOTE_SELECT',
id: notes[1].id,
});
await time.msleep(100);
testApp.dispatch({ type: 'NOTE_SELECT', id: notes[1].id });
await testApp.wait();
// check the tag list is correct
let state = testApp.store().getState();
@ -122,11 +79,8 @@ describe('integration_TagList', function() {
expect(state.selectedNoteTags[0].id).toEqual(tags[1].id);
// delete the note
testApp.dispatch({
type: 'NOTE_DELETE',
id: notes[1].id,
});
await time.msleep(100);
testApp.dispatch({ type: 'NOTE_DELETE', id: notes[1].id });
await testApp.wait();
// check the tag list is updated
state = testApp.store().getState();

View File

@ -50,9 +50,11 @@ describe('services_Revision', function() {
expect(rev2.title).toBe('hello welcome');
expect(rev2.author).toBe('');
await time.sleep(0.5);
const time_rev2 = Date.now();
await time.msleep(10);
await service.deleteOldRevisions(400);
const ttl = Date.now() - time_rev2 - 1;
await service.deleteOldRevisions(ttl);
const revisions2 = await Revision.allByType(BaseModel.TYPE_NOTE, n1_v1.id);
expect(revisions2.length).toBe(0);
}));
@ -63,12 +65,16 @@ describe('services_Revision', function() {
const n1_v0 = await Note.save({ title: '' });
const n1_v1 = await Note.save({ id: n1_v0.id, title: 'hello' });
await service.collectRevisions();
await time.sleep(1);
const time_v1 = Date.now();
await time.msleep(100);
const n1_v2 = await Note.save({ id: n1_v1.id, title: 'hello welcome' });
await service.collectRevisions();
expect((await Revision.allByType(BaseModel.TYPE_NOTE, n1_v1.id)).length).toBe(2);
await service.deleteOldRevisions(1000);
const ttl = Date.now() - time_v1 - 1;
await service.deleteOldRevisions(ttl);
const revisions = await Revision.allByType(BaseModel.TYPE_NOTE, n1_v1.id);
expect(revisions.length).toBe(1);
@ -82,15 +88,20 @@ describe('services_Revision', function() {
const n1_v0 = await Note.save({ title: '' });
const n1_v1 = await Note.save({ id: n1_v0.id, title: 'one' });
await service.collectRevisions();
await time.sleep(1);
const time_v1 = Date.now();
await time.msleep(100);
const n1_v2 = await Note.save({ id: n1_v1.id, title: 'one two' });
await service.collectRevisions();
await time.sleep(1);
const time_v2 = Date.now();
await time.msleep(100);
const n1_v3 = await Note.save({ id: n1_v1.id, title: 'one two three' });
await service.collectRevisions();
{
await service.deleteOldRevisions(2000);
const ttl = Date.now() - time_v1 - 1;
await service.deleteOldRevisions(ttl);
const revisions = await Revision.allByType(BaseModel.TYPE_NOTE, n1_v1.id);
expect(revisions.length).toBe(2);
@ -102,7 +113,8 @@ describe('services_Revision', function() {
}
{
await service.deleteOldRevisions(1000);
const ttl = Date.now() - time_v2 - 1;
await service.deleteOldRevisions(ttl);
const revisions = await Revision.allByType(BaseModel.TYPE_NOTE, n1_v1.id);
expect(revisions.length).toBe(1);
@ -119,14 +131,17 @@ describe('services_Revision', function() {
const n2_v0 = await Note.save({ title: '' });
const n2_v1 = await Note.save({ id: n2_v0.id, title: 'note 2' });
await service.collectRevisions();
await time.sleep(1);
const time_n2_v1 = Date.now();
await time.msleep(100);
const n1_v2 = await Note.save({ id: n1_v1.id, title: 'note 1 (v2)' });
const n2_v2 = await Note.save({ id: n2_v1.id, title: 'note 2 (v2)' });
await service.collectRevisions();
expect((await Revision.all()).length).toBe(4);
await service.deleteOldRevisions(1000);
const ttl = Date.now() - time_n2_v1 - 1;
await service.deleteOldRevisions(ttl);
{
const revisions = await Revision.allByType(BaseModel.TYPE_NOTE, n1_v1.id);
@ -183,7 +198,7 @@ describe('services_Revision', function() {
const n1 = await Note.save({ title: 'hello' });
const noteId = n1.id;
await sleep(0.1);
await time.msleep(100);
// Set the interval in such a way that the note is considered an old one.
Setting.setValue('revisionService.oldNoteInterval', 50);
@ -332,7 +347,9 @@ describe('services_Revision', function() {
const n1_v0 = await Note.save({ title: '' });
const n1_v1 = await Note.save({ id: n1_v0.id, title: 'hello' });
await revisionService().collectRevisions(); // REV 1
await time.sleep(0.5);
const timeRev1 = Date.now();
await time.msleep(100);
const n1_v2 = await Note.save({ id: n1_v1.id, title: 'hello welcome' });
await revisionService().collectRevisions(); // REV 2
@ -341,7 +358,8 @@ describe('services_Revision', function() {
const revisions = await Revision.all();
await Revision.save({ id: revisions[0].id, encryption_applied: 1 });
await revisionService().deleteOldRevisions(500);
const ttl = Date.now() - timeRev1 - 1;
await revisionService().deleteOldRevisions(ttl);
expect((await Revision.all()).length).toBe(2);
}));
@ -353,7 +371,9 @@ describe('services_Revision', function() {
const n1_v0 = await Note.save({ title: '' });
const n1_v1 = await Note.save({ id: n1_v0.id, title: 'hello' });
await revisionService().collectRevisions(); // REV 1
await time.sleep(0.5);
const timeRev1 = Date.now();
await time.msleep(100);
const n1_v2 = await Note.save({ id: n1_v1.id, title: 'hello welcome' });
await revisionService().collectRevisions(); // REV 2
@ -362,12 +382,14 @@ describe('services_Revision', function() {
const revisions = await Revision.all();
await Revision.save({ id: revisions[1].id, encryption_applied: 1 });
await revisionService().deleteOldRevisions(500);
let ttl = Date.now() - timeRev1 - 1;
await revisionService().deleteOldRevisions(ttl);
expect((await Revision.all()).length).toBe(2);
await Revision.save({ id: revisions[1].id, encryption_applied: 0 });
await revisionService().deleteOldRevisions(500);
ttl = Date.now() - timeRev1 - 1;
await revisionService().deleteOldRevisions(ttl);
expect((await Revision.all()).length).toBe(1);
}));
@ -406,17 +428,20 @@ describe('services_Revision', function() {
const n1_v0 = await Note.save({ title: '' });
const n1_v1 = await Note.save({ id: n1_v0.id, title: 'hello' });
await revisionService().collectRevisions(); // REV 1
const timeRev1 = Date.now();
await time.sleep(2);
const timeRev2 = Date.now();
const n1_v2 = await Note.save({ id: n1_v0.id, title: 'hello 2' });
await revisionService().collectRevisions(); // REV 2
expect((await Revision.all()).length).toBe(2);
Setting.setValue('revisionService.intervalBetweenRevisions', 1000);
const interval = Date.now() - timeRev1 + 1;
Setting.setValue('revisionService.intervalBetweenRevisions', interval);
const n1_v3 = await Note.save({ id: n1_v0.id, title: 'hello 3' });
await revisionService().collectRevisions(); // No rev because there's already a rev that is less than 1000 ms old
await revisionService().collectRevisions(); // No rev because time since last rev is less than the required 'interval between revisions'
expect(Date.now() - interval < timeRev2).toBe(true); // check the computer is not too slow for this test
expect((await Revision.all()).length).toBe(2);
}));
});

View File

@ -126,6 +126,10 @@ function sleep(n) {
});
}
function currentClientId() {
return currentClient_;
}
async function switchClient(id) {
if (!databases_[id]) throw new Error(`Call setupDatabaseAndSynchronizer(${id}) first!!`);
@ -133,12 +137,7 @@ async function switchClient(id) {
await Setting.saveAll();
currentClient_ = id;
BaseModel.db_ = databases_[id];
Folder.db_ = databases_[id];
Note.db_ = databases_[id];
BaseItem.db_ = databases_[id];
Setting.db_ = databases_[id];
Resource.db_ = databases_[id];
BaseModel.setDb(databases_[id]);
BaseItem.encryptionService_ = encryptionServices_[id];
Resource.encryptionService_ = encryptionServices_[id];
@ -190,6 +189,7 @@ async function setupDatabase(id = null) {
Setting.cache_ = null;
if (databases_[id]) {
BaseModel.setDb(databases_[id]);
await clearDatabase(id);
await Setting.load();
if (!Setting.value('clientId')) Setting.setValue('clientId', uuid.create());
@ -208,7 +208,7 @@ async function setupDatabase(id = null) {
databases_[id].setLogger(dbLogger);
await databases_[id].open({ name: filePath });
BaseModel.db_ = databases_[id];
BaseModel.setDb(databases_[id]);
await Setting.load();
if (!Setting.value('clientId')) Setting.setValue('clientId', uuid.create());
}
@ -221,6 +221,8 @@ function resourceDir(id = null) {
async function setupDatabaseAndSynchronizer(id = null) {
if (id === null) id = currentClient_;
BaseService.logger_ = logger;
await setupDatabase(id);
EncryptionService.instance_ = null;
@ -439,6 +441,7 @@ async function createNTestFolders(n) {
for (let i = 0; i < n; i++) {
const folder = await Folder.save({ title: 'folder' });
folders.push(folder);
await time.msleep(10);
}
return folders;
}
@ -449,10 +452,12 @@ async function createNTestNotes(n, folder, tagIds = null, title = 'note') {
const title_ = n > 1 ? `${title}${i}` : title;
const note = await Note.save({ title: title_, parent_id: folder.id, is_conflict: 0 });
notes.push(note);
await time.msleep(10);
}
if (tagIds) {
for (let i = 0; i < notes.length; i++) {
await Tag.setNoteTagsByIds(notes[i].id, tagIds);
await time.msleep(10);
}
}
return notes;
@ -463,25 +468,43 @@ async function createNTestTags(n) {
for (let i = 0; i < n; i++) {
const tag = await Tag.save({ title: 'tag' });
tags.push(tag);
await time.msleep(10);
}
return tags;
}
// Integration test application
// Application for feature integration testing
class TestApp extends BaseApplication {
constructor() {
constructor(hasGui = true) {
super();
this.hasGui_ = hasGui;
this.middlewareCalls_ = [];
this.logger_ = super.logger();
}
hasGui() {
return this.hasGui_;
}
async start(argv) {
await clearDatabase(); // not sure why we need this as we use our own database
this.logger_.info('Test app starting...');
argv = argv.concat(['--profile', `tests-build/profile-${uuid.create()}`]);
if (!argv.includes('--profile')) {
argv = argv.concat(['--profile', `tests-build/profile/${uuid.create()}`]);
}
argv = await super.start(['',''].concat(argv));
// For now, disable sync and encryption to avoid spurious intermittent failures
// caused by them interupting processing and causing delays.
Setting.setValue('sync.interval', 0);
Setting.setValue('encryption.enabled', false);
this.initRedux();
Setting.dispatchUpdateAll();
await time.msleep(100);
await ItemChange.waitForAllSaved();
await this.wait();
this.logger_.info('Test app started...');
}
async generalMiddleware(store, next, action) {
@ -493,7 +516,7 @@ class TestApp extends BaseApplication {
}
}
async waitForMiddleware_() {
async wait() {
return new Promise((resolve) => {
const iid = setInterval(() => {
if (!this.middlewareCalls_.length) {
@ -504,13 +527,18 @@ class TestApp extends BaseApplication {
});
}
async profileDir() {
return await Setting.value('profileDir');
}
async destroy() {
await this.waitForMiddleware_();
this.logger_.info('Test app stopping...');
await this.wait();
await ItemChange.waitForAllSaved();
this.deinitRedux();
await super.destroy();
await time.msleep(100);
}
}
module.exports = { kvStore, resourceService, allSyncTargetItemsEncrypted, setupDatabase, revisionService, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker, asyncTest, id, ids, sortedIds, at, createNTestNotes, createNTestFolders, createNTestTags, TestApp };
module.exports = { kvStore, resourceService, allSyncTargetItemsEncrypted, setupDatabase, revisionService, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker, asyncTest, currentClientId, id, ids, sortedIds, at, createNTestNotes, createNTestFolders, createNTestTags, TestApp };

View File

@ -34,6 +34,7 @@ const EncryptionService = require('lib/services/EncryptionService');
const ResourceFetcher = require('lib/services/ResourceFetcher');
const SearchEngineUtils = require('lib/services/SearchEngineUtils');
const RevisionService = require('lib/services/RevisionService');
const ResourceService = require('lib/services/RevisionService');
const DecryptionWorker = require('lib/services/DecryptionWorker');
const BaseService = require('lib/services/BaseService');
const SearchEngine = require('lib/services/SearchEngine');
@ -63,12 +64,24 @@ class BaseApplication {
await SearchEngine.instance().destroy();
await DecryptionWorker.instance().destroy();
await FoldersScreenUtils.cancelTimers();
await BaseItem.revisionService_.cancelTimers();
await ResourceService.instance().cancelTimers();
await reg.cancelTimers();
this.eventEmitter_.removeAllListeners();
BaseModel.db_ = null;
KvStore.instance_ = null;
BaseModel.setDb(null);
reg.setDb(null);
BaseItem.revisionService_ = null;
RevisionService.instance_ = null;
ResourceService.instance_ = null;
ResourceService.isRunningInBackground = false;
ResourceFetcher.instance_ = null;
EncryptionService.instance_ = null;
DecryptionWorker.instance_ = null;
this.logger_.info('Base application terminated...');
this.logger_ = null;
this.dbLogger_ = null;
this.eventEmitter_ = null;
@ -320,16 +333,7 @@ class BaseApplication {
}
async decryptionWorker_resourceMetadataButNotBlobDecrypted() {
this.scheduleAutoAddResources();
}
scheduleAutoAddResources() {
if (this.scheduleAutoAddResourcesIID_) return;
this.scheduleAutoAddResourcesIID_ = setTimeout(() => {
this.scheduleAutoAddResourcesIID_ = null;
ResourceFetcher.instance().autoAddResources();
}, 1000);
ResourceFetcher.instance().scheduleAutoAddResources();
}
reducerActionToString(action) {
@ -645,7 +649,7 @@ class BaseApplication {
// if (Setting.value('env') === 'dev') await this.database_.clearForTesting();
reg.setDb(this.database_);
BaseModel.db_ = this.database_;
BaseModel.setDb(this.database_);
await Setting.load();

View File

@ -12,6 +12,10 @@ class BaseModel {
throw new Error('Must be overriden');
}
static setDb(db) {
this.db_ = db;
}
static addModelMd(model) {
if (!model) return model;

View File

@ -56,140 +56,152 @@ reg.syncTarget = (syncTargetId = null) => {
// sure it gets synced. So we wait for the current sync operation to
// finish (if one is running), then we trigger a sync just after.
reg.waitForSyncFinishedThenSync = async () => {
const synchronizer = await reg.syncTarget().synchronizer();
await synchronizer.waitForSyncToFinish();
await reg.scheduleSync(0);
};
reg.scheduleSync_ = async (delay = null, syncOptions = null) => {
if (delay === null) delay = 1000 * 10;
if (syncOptions === null) syncOptions = {};
let promiseResolve = null;
const promise = new Promise((resolve) => {
promiseResolve = resolve;
});
if (reg.scheduleSyncId_) {
clearTimeout(reg.scheduleSyncId_);
reg.scheduleSyncId_ = null;
reg.waitForReSyncCalls_.push(true);
try {
const synchronizer = await reg.syncTarget().synchronizer();
await synchronizer.waitForSyncToFinish();
await reg.scheduleSync(0);
} finally {
reg.waitForReSyncCalls_.pop();
}
reg.logger().info('Scheduling sync operation...');
if (Setting.value('env') === 'dev' && delay !== 0) {
reg.logger().info('Schedule sync DISABLED!!!');
return;
}
const timeoutCallback = async () => {
reg.scheduleSyncId_ = null;
reg.logger().info('Preparing scheduled sync');
const syncTargetId = Setting.value('sync.target');
if (!(await reg.syncTarget(syncTargetId).isAuthenticated())) {
reg.logger().info('Synchroniser is missing credentials - manual sync required to authenticate.');
promiseResolve();
return;
}
try {
const sync = await reg.syncTarget(syncTargetId).synchronizer();
const contextKey = `sync.${syncTargetId}.context`;
let context = Setting.value(contextKey);
try {
context = context ? JSON.parse(context) : {};
} catch (error) {
// Clearing the context is inefficient since it means all items are going to be re-downloaded
// however it won't result in duplicate items since the synchroniser is going to compare each
// item to the current state.
reg.logger().warn(`Could not parse JSON sync context ${contextKey}:`, context);
reg.logger().info('Clearing context and starting from scratch');
context = null;
}
try {
reg.logger().info('Starting scheduled sync');
const options = Object.assign({}, syncOptions, { context: context });
if (!options.saveContextHandler) {
options.saveContextHandler = newContext => {
Setting.setValue(contextKey, JSON.stringify(newContext));
};
}
const newContext = await sync.start(options);
Setting.setValue(contextKey, JSON.stringify(newContext));
} catch (error) {
if (error.code == 'alreadyStarted') {
reg.logger().info(error.message);
} else {
promiseResolve();
throw error;
}
}
} catch (error) {
reg.logger().info('Could not run background sync:');
reg.logger().info(error);
// Special case to display OneDrive Business error. This is the full error that's received when trying to use a OneDrive Business account:
//
// {"error":"invalid_client","error_description":"AADSTS50011: The reply address 'http://localhost:1917' does not match the reply addresses configured for
// the application: 'cbabb902-d276-4ea4-aa88-062a5889d6dc'. More details: not specified\r\nTrace ID: 6e63dac6-8b37-47e2-bd1b-4768f8713400\r\nCorrelation
// ID: acfd6503-8d97-4349-ae2e-e7a19dd7b6bc\r\nTimestamp: 2017-12-01 13:35:55Z","error_codes":[50011],"timestamp":"2017-12-01 13:35:55Z","trace_id":
// "6e63dac6-8b37-47e2-bd1b-4768f8713400","correlation_id":"acfd6503-8d97-4349-ae2e-e7a19dd7b6bc"}: TOKEN: null Error: {"error":"invalid_client",
// "error_description":"AADSTS50011: The reply address 'http://localhost:1917' does not match the reply addresses configured for the application:
// 'cbabb902-d276-4ea4-aa88-062a5889d6dc'. More details: not specified\r\nTrace ID: 6e63dac6-8b37-47e2-bd1b-4768f8713400\r\nCorrelation ID
// acfd6503-8d97-4349-ae2e-e7a19dd7b6bc\r\nTimestamp: 2017-12-01 13:35:55Z","error_codes":[50011],"timestamp":"2017-12-01 13:35:55Z","trace_id":
// "6e63dac6-8b37-47e2-bd1b-4768f8713400","correlation_id":"acfd6503-8d97-4349-ae2e-e7a19dd7b6bc"}
if (error && error.message && error.message.indexOf('"invalid_client"') >= 0) {
reg.showErrorMessageBox(_('Could not synchronize with OneDrive.\n\nThis error often happens when using OneDrive for Business, which unfortunately cannot be supported.\n\nPlease consider using a regular OneDrive account.'));
}
}
reg.setupRecurrentSync();
promiseResolve();
};
if (delay === 0) {
timeoutCallback();
} else {
reg.scheduleSyncId_ = setTimeout(timeoutCallback, delay);
}
return promise;
};
reg.scheduleSync = async (delay = null, syncOptions = null) => {
reg.syncCalls_.push(true);
reg.schedSyncCalls_.push(true);
try {
await reg.scheduleSync_(delay, syncOptions);
if (delay === null) delay = 1000 * 10;
if (syncOptions === null) syncOptions = {};
let promiseResolve = null;
const promise = new Promise((resolve) => {
promiseResolve = resolve;
});
if (reg.scheduleSyncId_) {
clearTimeout(reg.scheduleSyncId_);
reg.scheduleSyncId_ = null;
}
reg.logger().info('Scheduling sync operation...', delay);
if (Setting.value('env') === 'dev' && delay !== 0) {
reg.logger().info('Schedule sync DISABLED!!!');
return;
}
const timeoutCallback = async () => {
reg.timerCallbackCalls_.push(true);
try {
reg.scheduleSyncId_ = null;
reg.logger().info('Preparing scheduled sync');
const syncTargetId = Setting.value('sync.target');
if (!(await reg.syncTarget(syncTargetId).isAuthenticated())) {
reg.logger().info('Synchroniser is missing credentials - manual sync required to authenticate.');
promiseResolve();
return;
}
try {
const sync = await reg.syncTarget(syncTargetId).synchronizer();
const contextKey = `sync.${syncTargetId}.context`;
let context = Setting.value(contextKey);
try {
context = context ? JSON.parse(context) : {};
} catch (error) {
// Clearing the context is inefficient since it means all items are going to be re-downloaded
// however it won't result in duplicate items since the synchroniser is going to compare each
// item to the current state.
reg.logger().warn(`Could not parse JSON sync context ${contextKey}:`, context);
reg.logger().info('Clearing context and starting from scratch');
context = null;
}
try {
reg.logger().info('Starting scheduled sync');
const options = Object.assign({}, syncOptions, { context: context });
if (!options.saveContextHandler) {
options.saveContextHandler = newContext => {
Setting.setValue(contextKey, JSON.stringify(newContext));
};
}
const newContext = await sync.start(options);
Setting.setValue(contextKey, JSON.stringify(newContext));
} catch (error) {
if (error.code == 'alreadyStarted') {
reg.logger().info(error.message);
} else {
promiseResolve();
throw error;
}
}
} catch (error) {
reg.logger().info('Could not run background sync:');
reg.logger().info(error);
// Special case to display OneDrive Business error. This is the full error that's received when trying to use a OneDrive Business account:
//
// {"error":"invalid_client","error_description":"AADSTS50011: The reply address 'http://localhost:1917' does not match the reply addresses configured for
// the application: 'cbabb902-d276-4ea4-aa88-062a5889d6dc'. More details: not specified\r\nTrace ID: 6e63dac6-8b37-47e2-bd1b-4768f8713400\r\nCorrelation
// ID: acfd6503-8d97-4349-ae2e-e7a19dd7b6bc\r\nTimestamp: 2017-12-01 13:35:55Z","error_codes":[50011],"timestamp":"2017-12-01 13:35:55Z","trace_id":
// "6e63dac6-8b37-47e2-bd1b-4768f8713400","correlation_id":"acfd6503-8d97-4349-ae2e-e7a19dd7b6bc"}: TOKEN: null Error: {"error":"invalid_client",
// "error_description":"AADSTS50011: The reply address 'http://localhost:1917' does not match the reply addresses configured for the application:
// 'cbabb902-d276-4ea4-aa88-062a5889d6dc'. More details: not specified\r\nTrace ID: 6e63dac6-8b37-47e2-bd1b-4768f8713400\r\nCorrelation ID
// acfd6503-8d97-4349-ae2e-e7a19dd7b6bc\r\nTimestamp: 2017-12-01 13:35:55Z","error_codes":[50011],"timestamp":"2017-12-01 13:35:55Z","trace_id":
// "6e63dac6-8b37-47e2-bd1b-4768f8713400","correlation_id":"acfd6503-8d97-4349-ae2e-e7a19dd7b6bc"}
if (error && error.message && error.message.indexOf('"invalid_client"') >= 0) {
reg.showErrorMessageBox(_('Could not synchronize with OneDrive.\n\nThis error often happens when using OneDrive for Business, which unfortunately cannot be supported.\n\nPlease consider using a regular OneDrive account.'));
}
}
reg.setupRecurrentSync();
promiseResolve();
} finally {
reg.timerCallbackCalls_.pop();
}
};
if (delay === 0) {
timeoutCallback();
} else {
reg.scheduleSyncId_ = setTimeout(timeoutCallback, delay);
}
return promise;
} finally {
reg.syncCalls_.pop();
reg.schedSyncCalls_.pop();
}
};
reg.setupRecurrentSync = () => {
if (reg.recurrentSyncId_) {
shim.clearInterval(reg.recurrentSyncId_);
reg.recurrentSyncId_ = null;
}
reg.setupRecurrentCalls_.push(true);
if (!Setting.value('sync.interval')) {
reg.logger().debug('Recurrent sync is disabled');
} else {
reg.logger().debug(`Setting up recurrent sync with interval ${Setting.value('sync.interval')}`);
if (Setting.value('env') === 'dev') {
reg.logger().info('Recurrent sync operation DISABLED!!!');
return;
try {
if (reg.recurrentSyncId_) {
shim.clearInterval(reg.recurrentSyncId_);
reg.recurrentSyncId_ = null;
}
reg.recurrentSyncId_ = shim.setInterval(() => {
reg.logger().info('Running background sync on timer...');
reg.scheduleSync(0);
}, 1000 * Setting.value('sync.interval'));
if (!Setting.value('sync.interval')) {
reg.logger().debug('Recurrent sync is disabled');
} else {
reg.logger().debug(`Setting up recurrent sync with interval ${Setting.value('sync.interval')}`);
if (Setting.value('env') === 'dev') {
reg.logger().info('Recurrent sync operation DISABLED!!!');
return;
}
reg.recurrentSyncId_ = shim.setInterval(() => {
reg.logger().info('Running background sync on timer...');
reg.scheduleSync(0);
}, 1000 * Setting.value('sync.interval'));
}
} finally {
reg.setupRecurrentCalls_.pop();
}
};
@ -201,15 +213,26 @@ reg.db = () => {
return reg.db_;
};
reg.cancelTimers = async () => {
reg.cancelTimers_ = () => {
if (this.recurrentSyncId_) {
clearTimeout(this.recurrentSyncId_);
shim.clearInterval(reg.recurrentSyncId_);
this.recurrentSyncId_ = null;
}
if (reg.scheduleSyncId_) {
clearTimeout(reg.scheduleSyncId_);
reg.scheduleSyncId_ = null;
}
};
reg.cancelTimers = async () => {
reg.logger().info('Cancelling sync timers');
reg.cancelTimers_();
return new Promise((resolve) => {
const iid = setInterval(() => {
if (!reg.syncCalls_.length) {
clearInterval(iid);
setInterval(() => {
// ensure processing complete
if (!reg.setupRecurrentCalls_.length && !reg.schedSyncCalls_.length && !reg.timerCallbackCalls_.length && !reg.waitForReSyncCalls_.length) {
reg.cancelTimers_();
resolve();
}
}, 100);
@ -217,5 +240,9 @@ reg.cancelTimers = async () => {
};
reg.syncCalls_ = [];
reg.schedSyncCalls_ = [];
reg.waitForReSyncCalls_ = [];
reg.setupRecurrentCalls_ = [];
reg.timerCallbackCalls_ = [];
module.exports = { reg };

View File

@ -21,6 +21,7 @@ class ResourceFetcher extends BaseService {
this.maxDownloads_ = 3;
this.addingResources_ = false;
this.eventEmitter_ = new EventEmitter();
this.autoAddResourcesCalls_ = [];
}
static instance() {
@ -197,7 +198,12 @@ class ResourceFetcher extends BaseService {
async waitForAllFinished() {
return new Promise((resolve) => {
const iid = setInterval(() => {
if (!this.updateReportIID_ && !this.scheduleQueueProcessIID_ && !this.addingResources_ && !this.queue_.length && !Object.getOwnPropertyNames(this.fetchingItems_).length) {
if (!this.updateReportIID_ &&
!this.scheduleQueueProcessIID_ &&
!this.queue_.length &&
!this.autoAddResourcesCalls_.length &&
!Object.getOwnPropertyNames(this.fetchingItems_).length) {
clearInterval(iid);
resolve();
}
@ -206,25 +212,31 @@ class ResourceFetcher extends BaseService {
}
async autoAddResources(limit = null) {
if (limit === null) limit = 10;
this.autoAddResourcesCalls_.push(true);
try {
if (limit === null) limit = 10;
if (this.addingResources_) return;
this.addingResources_ = true;
if (this.addingResources_) return;
this.addingResources_ = true;
this.logger().info(`ResourceFetcher: Auto-add resources: Mode: ${Setting.value('sync.resourceDownloadMode')}`);
this.logger().info(`ResourceFetcher: Auto-add resources: Mode: ${Setting.value('sync.resourceDownloadMode')}`);
let count = 0;
const resources = await Resource.needToBeFetched(Setting.value('sync.resourceDownloadMode'), limit);
for (let i = 0; i < resources.length; i++) {
const added = this.queueDownload_(resources[i].id);
if (added) count++;
let count = 0;
const resources = await Resource.needToBeFetched(Setting.value('sync.resourceDownloadMode'), limit);
for (let i = 0; i < resources.length; i++) {
const added = this.queueDownload_(resources[i].id);
if (added) count++;
}
this.logger().info(`ResourceFetcher: Auto-added resources: ${count}`);
const errorCount = await Resource.downloadStatusCounts(Resource.FETCH_STATUS_ERROR);
if (errorCount) this.dispatch({ type: 'SYNC_HAS_DISABLED_SYNC_ITEMS' });
} finally {
this.addingResources_ = false;
this.autoAddResourcesCalls_.pop();
}
this.logger().info(`ResourceFetcher: Auto-added resources: ${count}`);
this.addingResources_ = false;
const errorCount = await Resource.downloadStatusCounts(Resource.FETCH_STATUS_ERROR);
if (errorCount) this.dispatch({ type: 'SYNC_HAS_DISABLED_SYNC_ITEMS' });
}
async start() {
@ -244,6 +256,15 @@ class ResourceFetcher extends BaseService {
}, 100);
}
scheduleAutoAddResources() {
if (this.scheduleAutoAddResourcesIID_) return;
this.scheduleAutoAddResourcesIID_ = setTimeout(() => {
this.scheduleAutoAddResourcesIID_ = null;
ResourceFetcher.instance().autoAddResources();
}, 1000);
}
async fetchAll() {
await Resource.resetStartedFetchStatus();
this.autoAddResources(null);
@ -255,10 +276,13 @@ class ResourceFetcher extends BaseService {
clearTimeout(this.scheduleQueueProcessIID_);
this.scheduleQueueProcessIID_ = null;
}
if (this.scheduleAutoAddResourcesIID_) {
clearTimeout(this.scheduleAutoAddResourcesIID_);
this.scheduleAutoAddResourcesIID_ = null;
}
await this.waitForAllFinished();
this.eventEmitter_ = null;
ResourceFetcher.instance_ = null;
return await this.waitForAllFinished();
}
}

View File

@ -11,6 +11,20 @@ const ItemChangeUtils = require('lib/services/ItemChangeUtils');
const { sprintf } = require('sprintf-js');
class ResourceService extends BaseService {
constructor() {
super();
this.maintenanceCalls_ = [];
this.maintenanceTimer1_ = null;
this.maintenanceTimer2_ = null;
}
static instance() {
if (this.instance_) return this.instance_;
this.instance_ = new ResourceService();
return this.instance_;
}
async indexNoteResources() {
this.logger().info('ResourceService::indexNoteResources: Start');
@ -131,24 +145,49 @@ class ResourceService extends BaseService {
}
async maintenance() {
await this.indexNoteResources();
await this.deleteOrphanResources();
this.maintenanceCalls_.push(true);
try {
await this.indexNoteResources();
await this.deleteOrphanResources();
} finally {
this.maintenanceCalls_.pop();
}
}
static runInBackground() {
if (this.isRunningInBackground_) return;
this.isRunningInBackground_ = true;
const service = new ResourceService();
const service = this.instance();
setTimeout(() => {
service.maintenanceTimer1_ = setTimeout(() => {
service.maintenance();
}, 1000 * 30);
shim.setInterval(() => {
service.maintenanceTimer2_ = shim.setInterval(() => {
service.maintenance();
}, 1000 * 60 * 60 * 4);
}
async cancelTimers() {
if (this.maintenanceTimer1_) {
clearTimeout(this.maintenanceTimer1);
this.maintenanceTimer1_ = null;
}
if (this.maintenanceTimer2_) {
shim.clearInterval(this.maintenanceTimer2);
this.maintenanceTimer2_ = null;
}
return new Promise((resolve) => {
const iid = setInterval(() => {
if (!this.maintenanceCalls_.length) {
clearInterval(iid);
resolve();
}
}, 100);
});
}
}
module.exports = ResourceService;

View File

@ -19,6 +19,10 @@ class RevisionService extends BaseService {
// the original note is saved. The goal is to have at least one revision in case the note
// is deleted or modified as a result of a bug or user mistake.
this.isOldNotesCache_ = {};
this.maintenanceCalls_ = [];
this.maintenanceTimer1_ = null;
this.maintenanceTimer2_ = null;
}
static instance() {
@ -235,22 +239,27 @@ class RevisionService extends BaseService {
}
async maintenance() {
const startTime = Date.now();
this.logger().info('RevisionService::maintenance: Starting...');
this.maintenanceCalls_.push(true);
try {
const startTime = Date.now();
this.logger().info('RevisionService::maintenance: Starting...');
if (!Setting.value('revisionService.enabled')) {
this.logger().info('RevisionService::maintenance: Service is disabled');
// We do as if we had processed all the latest changes so that they can be cleaned up
// later on by ItemChangeUtils.deleteProcessedChanges().
Setting.setValue('revisionService.lastProcessedChangeId', await ItemChange.lastChangeId());
await this.deleteOldRevisions(Setting.value('revisionService.ttlDays') * 24 * 60 * 60 * 1000);
} else {
this.logger().info('RevisionService::maintenance: Service is enabled');
await this.collectRevisions();
await this.deleteOldRevisions(Setting.value('revisionService.ttlDays') * 24 * 60 * 60 * 1000);
if (!Setting.value('revisionService.enabled')) {
this.logger().info('RevisionService::maintenance: Service is disabled');
// We do as if we had processed all the latest changes so that they can be cleaned up
// later on by ItemChangeUtils.deleteProcessedChanges().
Setting.setValue('revisionService.lastProcessedChangeId', await ItemChange.lastChangeId());
await this.deleteOldRevisions(Setting.value('revisionService.ttlDays') * 24 * 60 * 60 * 1000);
} else {
this.logger().info('RevisionService::maintenance: Service is enabled');
await this.collectRevisions();
await this.deleteOldRevisions(Setting.value('revisionService.ttlDays') * 24 * 60 * 60 * 1000);
this.logger().info(`RevisionService::maintenance: Done in ${Date.now() - startTime}ms`);
}
} finally {
this.maintenanceCalls_.pop();
}
this.logger().info(`RevisionService::maintenance: Done in ${Date.now() - startTime}ms`);
}
runInBackground(collectRevisionInterval = null) {
@ -261,14 +270,34 @@ class RevisionService extends BaseService {
this.logger().info(`RevisionService::runInBackground: Starting background service with revision collection interval ${collectRevisionInterval}`);
setTimeout(() => {
this.maintenanceTimer1_ = setTimeout(() => {
this.maintenance();
}, 1000 * 4);
shim.setInterval(() => {
this.maintenanceTImer2_ = shim.setInterval(() => {
this.maintenance();
}, collectRevisionInterval);
}
async cancelTimers() {
if (this.maintenanceTimer1_) {
clearTimeout(this.maintenanceTimer1);
this.maintenanceTimer1_ = null;
}
if (this.maintenanceTimer2_) {
shim.clearInterval(this.maintenanceTimer2);
this.maintenanceTimer2_ = null;
}
return new Promise((resolve) => {
const iid = setInterval(() => {
if (!this.maintenanceCalls_.length) {
clearInterval(iid);
resolve();
}
}, 100);
});
}
}
module.exports = RevisionService;

View File

@ -424,6 +424,7 @@ class SearchEngine {
const iid = setInterval(() => {
if (!this.syncCalls_.length) {
clearInterval(iid);
this.instance_ = null;
resolve();
}
}, 100);

View File

@ -412,7 +412,7 @@ async function initialize(dispatch) {
FoldersScreenUtils.dispatch = dispatch;
BaseSyncTarget.dispatch = dispatch;
NavService.dispatch = dispatch;
BaseModel.db_ = db;
BaseModel.setDb(db);
KvStore.instance().setDb(reg.db());