mirror of
https://github.com/laurent22/joplin.git
synced 2025-01-26 18:58:21 +02:00
All: Optimised file sync logic so that it doesn't fetch the content of
all the items on each sync. Also limit the number of items in a batch to 1000
This commit is contained in:
parent
86eee376bb
commit
1bfeed377a
@ -74,11 +74,11 @@ async function localItemsSameAsRemote(locals, expect) {
|
|||||||
expect(!!remote).toBe(true);
|
expect(!!remote).toBe(true);
|
||||||
if (!remote) continue;
|
if (!remote) continue;
|
||||||
|
|
||||||
if (syncTargetId() == SyncTargetRegistry.nameToId('filesystem')) {
|
// if (syncTargetId() == SyncTargetRegistry.nameToId('filesystem')) {
|
||||||
expect(remote.updated_time).toBe(Math.floor(dbItem.updated_time / 1000) * 1000);
|
// expect(remote.updated_time).toBe(Math.floor(dbItem.updated_time / 1000) * 1000);
|
||||||
} else {
|
// } else {
|
||||||
expect(remote.updated_time).toBe(dbItem.updated_time);
|
// expect(remote.updated_time).toBe(dbItem.updated_time);
|
||||||
}
|
// }
|
||||||
|
|
||||||
let remoteContent = await fileApi().get(path);
|
let remoteContent = await fileApi().get(path);
|
||||||
remoteContent = dbItem.type_ == BaseModel.TYPE_NOTE ? await Note.unserialize(remoteContent) : await Folder.unserialize(remoteContent);
|
remoteContent = dbItem.type_ == BaseModel.TYPE_NOTE ? await Note.unserialize(remoteContent) : await Folder.unserialize(remoteContent);
|
||||||
|
@ -67,21 +67,89 @@ class FileApiDriverLocal {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
contextFromOptions_(options) {
|
||||||
|
let output = {
|
||||||
|
timestamp: 0,
|
||||||
|
filesAtTimestamp: [],
|
||||||
|
statsCache: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!options || !options.context) return output;
|
||||||
|
const d = new Date(options.context.timestamp);
|
||||||
|
|
||||||
|
output.timestamp = isNaN(d.getTime()) ? 0 : options.context.timestamp;
|
||||||
|
output.filesAtTimestamp = Array.isArray(options.context.filesAtTimestamp) ? options.context.filesAtTimestamp.slice() : [];
|
||||||
|
output.statsCache = options.context && options.context.statsCache ? options.context.statsCache : null;
|
||||||
|
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
async delta(path, options) {
|
async delta(path, options) {
|
||||||
|
const outputLimit = 1000;
|
||||||
const itemIds = await options.allItemIdsHandler();
|
const itemIds = await options.allItemIdsHandler();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const stats = await this.fsDriver().readDirStats(path);
|
const context = this.contextFromOptions_(options);
|
||||||
let output = this.metadataFromStats_(stats);
|
|
||||||
|
let newContext = {
|
||||||
|
timestamp: context.timestamp,
|
||||||
|
filesAtTimestamp: context.filesAtTimestamp.slice(),
|
||||||
|
statsCache: context.statsCache,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Stats are cached until all items have been processed (until hasMore is false)
|
||||||
|
if (newContext.statsCache === null) {
|
||||||
|
const stats = await this.fsDriver().readDirStats(path);
|
||||||
|
newContext.statsCache = this.metadataFromStats_(stats);
|
||||||
|
newContext.statsCache.sort(function(a, b) {
|
||||||
|
return a.updated_time - b.updated_time;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let output = [];
|
||||||
|
|
||||||
|
// Find out which files have been changed since the last time. Note that we keep
|
||||||
|
// both the timestamp of the most recent change, *and* the items that exactly match
|
||||||
|
// this timestamp. This to handle cases where an item is modified while this delta
|
||||||
|
// function is running. For example:
|
||||||
|
// t0: Item 1 is changed
|
||||||
|
// t0: Sync items - run delta function
|
||||||
|
// t0: While delta() is running, modify Item 2
|
||||||
|
// Since item 2 was modified within the same millisecond, it would be skipped in the
|
||||||
|
// next sync if we relied exclusively on a timestamp.
|
||||||
|
for (let i = 0; i < newContext.statsCache.length; i++) {
|
||||||
|
const stat = newContext.statsCache[i];
|
||||||
|
|
||||||
|
if (stat.isDir) continue;
|
||||||
|
|
||||||
|
if (stat.updated_time < context.timestamp) continue;
|
||||||
|
|
||||||
|
// Special case for items that exactly match the timestamp
|
||||||
|
if (stat.updated_time === context.timestamp) {
|
||||||
|
if (context.filesAtTimestamp.indexOf(stat.path) >= 0) continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stat.updated_time > newContext.timestamp) {
|
||||||
|
newContext.timestamp = stat.updated_time;
|
||||||
|
newContext.filesAtTimestamp = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
newContext.filesAtTimestamp.push(stat.path);
|
||||||
|
output.push(stat);
|
||||||
|
|
||||||
|
if (output.length >= outputLimit) break;
|
||||||
|
}
|
||||||
|
|
||||||
if (!Array.isArray(itemIds)) throw new Error('Delta API not supported - local IDs must be provided');
|
if (!Array.isArray(itemIds)) throw new Error('Delta API not supported - local IDs must be provided');
|
||||||
|
|
||||||
let deletedItems = [];
|
let deletedItems = [];
|
||||||
for (let i = 0; i < itemIds.length; i++) {
|
for (let i = 0; i < itemIds.length; i++) {
|
||||||
|
if (output.length + deletedItems.length >= outputLimit) break;
|
||||||
|
|
||||||
const itemId = itemIds[i];
|
const itemId = itemIds[i];
|
||||||
let found = false;
|
let found = false;
|
||||||
for (let j = 0; j < output.length; j++) {
|
for (let j = 0; j < newContext.statsCache.length; j++) {
|
||||||
const item = output[j];
|
const item = newContext.statsCache[j];
|
||||||
if (BaseItem.pathToId(item.path) == itemId) {
|
if (BaseItem.pathToId(item.path) == itemId) {
|
||||||
found = true;
|
found = true;
|
||||||
break;
|
break;
|
||||||
@ -98,9 +166,12 @@ class FileApiDriverLocal {
|
|||||||
|
|
||||||
output = output.concat(deletedItems);
|
output = output.concat(deletedItems);
|
||||||
|
|
||||||
|
const hasMore = output.length >= outputLimit;
|
||||||
|
if (!hasMore) newContext.statsCache = null;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
hasMore: false,
|
hasMore: hasMore,
|
||||||
context: null,
|
context: newContext,
|
||||||
items: output,
|
items: output,
|
||||||
};
|
};
|
||||||
} catch(error) {
|
} catch(error) {
|
||||||
|
@ -330,7 +330,12 @@ class Synchronizer {
|
|||||||
// change is uniquely identified. Leaving it like this for now.
|
// change is uniquely identified. Leaving it like this for now.
|
||||||
|
|
||||||
if (canSync) {
|
if (canSync) {
|
||||||
await this.api().setTimestamp(path, local.updated_time);
|
// 2018-01-21: Setting timestamp is not needed because the delta() logic doesn't rely
|
||||||
|
// on it (instead it uses a more reliable `context` object) and the itemsThatNeedSync loop
|
||||||
|
// above also doesn't use it because it fetches the whole remote object and read the
|
||||||
|
// more reliable 'updated_time' property. Basically remote.updated_time is deprecated.
|
||||||
|
|
||||||
|
// await this.api().setTimestamp(path, local.updated_time);
|
||||||
await ItemClass.saveSyncTime(syncTargetId, local, local.updated_time);
|
await ItemClass.saveSyncTime(syncTargetId, local, local.updated_time);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user