mirror of
https://github.com/vrtmrz/obsidian-livesync.git
synced 2025-01-29 18:53:43 +02:00
Refactored
This commit is contained in:
parent
b35052a485
commit
f8c1474700
2
src/lib
2
src/lib
@ -1 +1 @@
|
|||||||
Subproject commit 3ca623780c1c26b153fd663e997b53e32c4de021
|
Subproject commit 5c01ce3262b285c79473b61cc524c51183d13af6
|
90
src/main.ts
90
src/main.ts
@ -17,8 +17,8 @@ import {
|
|||||||
setNoticeClass,
|
setNoticeClass,
|
||||||
NewNotice,
|
NewNotice,
|
||||||
getLocks,
|
getLocks,
|
||||||
Parallels,
|
|
||||||
WrappedNotice,
|
WrappedNotice,
|
||||||
|
Semaphore,
|
||||||
} from "./lib/src/utils";
|
} from "./lib/src/utils";
|
||||||
import { Logger, setLogger } from "./lib/src/logger";
|
import { Logger, setLogger } from "./lib/src/logger";
|
||||||
import { LocalPouchDB } from "./LocalPouchDB";
|
import { LocalPouchDB } from "./LocalPouchDB";
|
||||||
@ -784,27 +784,25 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
|||||||
return await runWithLock("batchSave", false, async () => {
|
return await runWithLock("batchSave", false, async () => {
|
||||||
const batchItems = JSON.parse(JSON.stringify(this.batchFileChange)) as string[];
|
const batchItems = JSON.parse(JSON.stringify(this.batchFileChange)) as string[];
|
||||||
this.batchFileChange = [];
|
this.batchFileChange = [];
|
||||||
const limit = 3;
|
const semaphore = Semaphore(3);
|
||||||
const p = Parallels();
|
|
||||||
|
|
||||||
for (const e of batchItems) {
|
const batchProcesses = batchItems.map(e => (async (e) => {
|
||||||
const w = (async () => {
|
const releaser = await semaphore.acquire(1, "batch");
|
||||||
try {
|
try {
|
||||||
const f = this.app.vault.getAbstractFileByPath(normalizePath(e));
|
const f = this.app.vault.getAbstractFileByPath(normalizePath(e));
|
||||||
if (f && f instanceof TFile) {
|
if (f && f instanceof TFile) {
|
||||||
await this.updateIntoDB(f);
|
await this.updateIntoDB(f);
|
||||||
Logger(`Batch save:${e}`);
|
Logger(`Batch save:${e}`);
|
||||||
}
|
|
||||||
} catch (ex) {
|
|
||||||
Logger(`Batch save error:${e}`, LOG_LEVEL.NOTICE);
|
|
||||||
Logger(ex, LOG_LEVEL.VERBOSE);
|
|
||||||
}
|
}
|
||||||
})();
|
} catch (ex) {
|
||||||
p.add(w);
|
Logger(`Batch save error:${e}`, LOG_LEVEL.NOTICE);
|
||||||
await p.wait(limit)
|
Logger(ex, LOG_LEVEL.VERBOSE);
|
||||||
}
|
} finally {
|
||||||
this.refreshStatusText();
|
releaser();
|
||||||
await p.all();
|
}
|
||||||
|
})(e))
|
||||||
|
await Promise.all(batchProcesses);
|
||||||
|
|
||||||
this.refreshStatusText();
|
this.refreshStatusText();
|
||||||
return;
|
return;
|
||||||
});
|
});
|
||||||
@ -1618,20 +1616,17 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
|||||||
const count = objects.length;
|
const count = objects.length;
|
||||||
Logger(procedurename);
|
Logger(procedurename);
|
||||||
let i = 0;
|
let i = 0;
|
||||||
const p = Parallels();
|
const semaphore = Semaphore(10);
|
||||||
const limit = 10;
|
|
||||||
|
|
||||||
Logger(`${procedurename} exec.`);
|
Logger(`${procedurename} exec.`);
|
||||||
for (const v of objects) {
|
if (!this.localDatabase.isReady) throw Error("Database is not ready!");
|
||||||
if (!this.localDatabase.isReady) throw Error("Database is not ready!");
|
const procs = objects.map(e => (async (v) => {
|
||||||
const addProc = (p: () => Promise<void>): Promise<unknown> => {
|
const releaser = await semaphore.acquire(1, procedurename);
|
||||||
return p();
|
|
||||||
}
|
try {
|
||||||
p.add(addProc(async () => {
|
await callback(v);
|
||||||
try {
|
|
||||||
await callback(v);
|
|
||||||
i++;
|
i++;
|
||||||
if (i % 50 == 0) {
|
if (i % 50 == 0) {
|
||||||
const notify = `${procedurename} : ${i}/${count}`;
|
const notify = `${procedurename} : ${i}/${count}`;
|
||||||
if (showingNotice) {
|
if (showingNotice) {
|
||||||
Logger(notify, LOG_LEVEL.NOTICE, "syncAll");
|
Logger(notify, LOG_LEVEL.NOTICE, "syncAll");
|
||||||
@ -1640,14 +1635,16 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
|||||||
}
|
}
|
||||||
this.setStatusBarText(notify);
|
this.setStatusBarText(notify);
|
||||||
}
|
}
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
Logger(`Error while ${procedurename}`, LOG_LEVEL.NOTICE);
|
Logger(`Error while ${procedurename}`, LOG_LEVEL.NOTICE);
|
||||||
Logger(ex);
|
Logger(ex);
|
||||||
}
|
} finally {
|
||||||
}));
|
releaser();
|
||||||
await p.wait(limit);
|
}
|
||||||
}
|
}
|
||||||
await p.all();
|
)(e));
|
||||||
|
await Promise.all(procs);
|
||||||
|
|
||||||
Logger(`${procedurename} done.`);
|
Logger(`${procedurename} done.`);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -2545,8 +2542,6 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
|||||||
const fileCount = allFileNames.length;
|
const fileCount = allFileNames.length;
|
||||||
let processed = 0;
|
let processed = 0;
|
||||||
let filesChanged = 0;
|
let filesChanged = 0;
|
||||||
const p = Parallels();
|
|
||||||
const limit = 10;
|
|
||||||
// count updated files up as like this below:
|
// count updated files up as like this below:
|
||||||
// .obsidian: 2
|
// .obsidian: 2
|
||||||
// .obsidian/workspace: 1
|
// .obsidian/workspace: 1
|
||||||
@ -2569,6 +2564,8 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
|||||||
c = pieces.shift();
|
c = pieces.shift();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const p = [] as Promise<void>[];
|
||||||
|
const semaphore = Semaphore(15);
|
||||||
// Cache update time information for files which have already been processed (mainly for files that were skipped due to the same content)
|
// Cache update time information for files which have already been processed (mainly for files that were skipped due to the same content)
|
||||||
let caches: { [key: string]: { storageMtime: number; docMtime: number } } = {};
|
let caches: { [key: string]: { storageMtime: number; docMtime: number } } = {};
|
||||||
caches = await this.localDatabase.kvDB.get<{ [key: string]: { storageMtime: number; docMtime: number } }>("diff-caches-internal") || {};
|
caches = await this.localDatabase.kvDB.get<{ [key: string]: { storageMtime: number; docMtime: number } }>("diff-caches-internal") || {};
|
||||||
@ -2579,12 +2576,20 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
|||||||
|
|
||||||
const fileOnStorage = files.find(e => e.path == filename);
|
const fileOnStorage = files.find(e => e.path == filename);
|
||||||
const fileOnDatabase = filesOnDB.find(e => e._id == filename2idInternalChunk(id2path(filename)));
|
const fileOnDatabase = filesOnDB.find(e => e._id == filename2idInternalChunk(id2path(filename)));
|
||||||
const addProc = (p: () => Promise<void>): Promise<unknown> => {
|
const addProc = async (p: () => Promise<void>): Promise<void> => {
|
||||||
return p();
|
const releaser = await semaphore.acquire(1);
|
||||||
|
try {
|
||||||
|
return p();
|
||||||
|
} catch (ex) {
|
||||||
|
Logger("Some process failed", logLevel)
|
||||||
|
Logger(ex);
|
||||||
|
} finally {
|
||||||
|
releaser();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const cache = filename in caches ? caches[filename] : { storageMtime: 0, docMtime: 0 };
|
const cache = filename in caches ? caches[filename] : { storageMtime: 0, docMtime: 0 };
|
||||||
|
|
||||||
p.add(addProc(async () => {
|
p.push(addProc(async () => {
|
||||||
if (fileOnStorage && fileOnDatabase) {
|
if (fileOnStorage && fileOnDatabase) {
|
||||||
// Both => Synchronize
|
// Both => Synchronize
|
||||||
if (fileOnDatabase.mtime == cache.docMtime && fileOnStorage.mtime == cache.storageMtime) {
|
if (fileOnDatabase.mtime == cache.docMtime && fileOnStorage.mtime == cache.storageMtime) {
|
||||||
@ -2624,9 +2629,8 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
|||||||
// Something corrupted?
|
// Something corrupted?
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
await p.wait(limit);
|
|
||||||
}
|
}
|
||||||
await p.all();
|
await Promise.all(p);
|
||||||
await this.localDatabase.kvDB.set("diff-caches-internal", caches);
|
await this.localDatabase.kvDB.set("diff-caches-internal", caches);
|
||||||
|
|
||||||
// When files has been retreived from the database. they must be reloaded.
|
// When files has been retreived from the database. they must be reloaded.
|
||||||
|
@ -9,8 +9,19 @@
|
|||||||
// "importsNotUsedAsValues": "error",
|
// "importsNotUsedAsValues": "error",
|
||||||
"importHelpers": false,
|
"importHelpers": false,
|
||||||
"alwaysStrict": true,
|
"alwaysStrict": true,
|
||||||
"lib": ["es2018", "DOM", "ES5", "ES6", "ES7"]
|
"lib": [
|
||||||
|
"es2018",
|
||||||
|
"DOM",
|
||||||
|
"ES5",
|
||||||
|
"ES6",
|
||||||
|
"ES7",
|
||||||
|
"es2019.array"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"include": ["**/*.ts"],
|
"include": [
|
||||||
"exclude": ["pouchdb-browser-webpack"]
|
"**/*.ts"
|
||||||
}
|
],
|
||||||
|
"exclude": [
|
||||||
|
"pouchdb-browser-webpack"
|
||||||
|
]
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user