1
0
mirror of https://github.com/vrtmrz/obsidian-livesync.git synced 2024-12-12 09:04:06 +02:00

New features:

- Now remote database cleaning-up will be detected automatically.
- A solution selection dialogue will be shown if synchronisation is rejected after cleaning or rebuilding the remote database.
- During fetching or rebuilding, we can configure `Hidden file synchronisation` on the spot.
This commit is contained in:
vorotamoroz 2023-04-14 17:39:09 +09:00
parent fae0a9d76a
commit 09f35a2af4
7 changed files with 596 additions and 232 deletions

View File

@ -613,7 +613,7 @@ export class HiddenFileSync extends LiveSyncCommands {
showJSONMergeDialogAndMerge(docA: LoadedEntry, docB: LoadedEntry): Promise<boolean> {
return new Promise((res) => {
return runWithLock("conflict:merge-data", false, () => new Promise((res) => {
Logger("Opening data-merging dialog", LOG_LEVEL.VERBOSE);
const docs = [docA, docB];
const path = stripAllPrefixes(docA.path);
@ -624,6 +624,8 @@ export class HiddenFileSync extends LiveSyncCommands {
let needFlush = false;
if (!result && !keep) {
Logger(`Skipped merging: ${filename}`);
res(false);
return;
}
//Delete old revisions
if (result || keep) {
@ -665,7 +667,7 @@ export class HiddenFileSync extends LiveSyncCommands {
}
});
modal.open();
});
}));
}
async scanInternalFiles(): Promise<InternalFileInfo[]> {

View File

@ -6,6 +6,7 @@ import { askSelectString, askYesNo, askString } from "./utils";
import { decrypt, encrypt } from "./lib/src/e2ee_v2";
import { LiveSyncCommands } from "./LiveSyncCommands";
import { delay } from "./lib/src/utils";
import { confirmWithMessage } from "./dialogs";
export class SetupLiveSync extends LiveSyncCommands {
onunload() { }
@ -188,6 +189,54 @@ export class SetupLiveSync extends LiveSyncCommands {
this.plugin.settings.usePluginSync = false;
this.plugin.settings.autoSweepPlugins = false;
}
async askHiddenFileConfiguration(opt: { enableFetch?: boolean, enableOverwrite?: boolean }) {
this.plugin.addOnSetup.suspendExtraSync();
const message = `Would you like to enable \`Hidden File Synchronization\`?
${opt.enableFetch ? " - Fetch: Use files stored from other devices. \n" : ""}${opt.enableOverwrite ? "- Overwrite: Use files from this device. \n" : ""}- Keep it disabled: Do not use hidden file synchronization.
Of course, we are able to disable this feature.`
const CHOICE_FETCH = "Fetch";
const CHOICE_OVERWRITE = "Overwrite";
const CHOICE_DISMISS = "keep it disabled";
const choices = [];
if (opt?.enableFetch) {
choices.push(CHOICE_FETCH);
}
if (opt?.enableOverwrite) {
choices.push(CHOICE_OVERWRITE);
}
choices.push(CHOICE_DISMISS);
const ret = await confirmWithMessage(this.plugin, "Hidden file sync", message, choices, CHOICE_DISMISS, 40);
if (ret == CHOICE_FETCH) {
await this.configureHiddenFileSync("FETCH");
} else if (ret == CHOICE_OVERWRITE) {
await this.configureHiddenFileSync("OVERWRITE");
} else if (ret == CHOICE_DISMISS) {
await this.configureHiddenFileSync("DISABLE");
}
}
async configureHiddenFileSync(mode: "FETCH" | "OVERWRITE" | "MERGE" | "DISABLE") {
this.plugin.addOnSetup.suspendExtraSync();
if (mode == "DISABLE") {
this.plugin.settings.syncInternalFiles = false;
await this.plugin.saveSettings();
return;
}
Logger("Gathering files for enabling Hidden File Sync", LOG_LEVEL.NOTICE);
if (mode == "FETCH") {
await this.plugin.addOnHiddenFileSync.syncInternalFilesAndDatabase("pullForce", true);
} else if (mode == "OVERWRITE") {
await this.plugin.addOnHiddenFileSync.syncInternalFilesAndDatabase("pushForce", true);
} else if (mode == "MERGE") {
await this.plugin.addOnHiddenFileSync.syncInternalFilesAndDatabase("safe", true);
}
this.plugin.settings.syncInternalFiles = true;
await this.plugin.saveSettings();
Logger(`Done! Restarting the app is strongly recommended!`, LOG_LEVEL.NOTICE);
}
suspendAllSync() {
this.plugin.settings.liveSync = false;
this.plugin.settings.periodicReplication = false;
@ -207,6 +256,9 @@ export class SetupLiveSync extends LiveSyncCommands {
this.plugin.isReady = true;
await delay(500);
await this.plugin.replicateAllFromServer(true);
await delay(1000);
await this.plugin.replicateAllFromServer(true);
await this.askHiddenFileConfiguration({ enableFetch: true });
}
async rebuildRemote() {
this.suspendExtraSync();
@ -215,6 +267,10 @@ export class SetupLiveSync extends LiveSyncCommands {
await this.plugin.tryResetRemoteDatabase();
await this.plugin.markRemoteLocked();
await delay(500);
await this.askHiddenFileConfiguration({ enableOverwrite: true });
await delay(1000);
await this.plugin.replicateAllToServer(true);
await delay(1000);
await this.plugin.replicateAllToServer(true);
}
async rebuildEverything() {
@ -227,6 +283,11 @@ export class SetupLiveSync extends LiveSyncCommands {
await this.plugin.tryResetRemoteDatabase();
await this.plugin.markRemoteLocked();
await delay(500);
await this.askHiddenFileConfiguration({ enableOverwrite: true });
await delay(1000);
await this.plugin.replicateAllToServer(true);
await delay(1000);
await this.plugin.replicateAllToServer(true);
}
}

View File

@ -1,5 +1,5 @@
import { App, PluginSettingTab, Setting, sanitizeHTMLToDom, RequestUrlParam, requestUrl, TextAreaComponent, MarkdownRenderer, stringifyYaml } from "./deps";
import { DEFAULT_SETTINGS, LOG_LEVEL, ObsidianLiveSyncSettings, ConfigPassphraseStore, RemoteDBSettings, NewEntry } from "./lib/src/types";
import { App, PluginSettingTab, Setting, sanitizeHTMLToDom, TextAreaComponent, MarkdownRenderer, stringifyYaml } from "./deps";
import { DEFAULT_SETTINGS, LOG_LEVEL, ObsidianLiveSyncSettings, ConfigPassphraseStore, RemoteDBSettings } from "./lib/src/types";
import { delay } from "./lib/src/utils";
import { Semaphore } from "./lib/src/semaphore";
import { versionNumberString2Number } from "./lib/src/strbin";
@ -7,27 +7,9 @@ import { Logger } from "./lib/src/logger";
import { checkSyncInfo, isCloudantURI } from "./lib/src/utils_couchdb.js";
import { testCrypt } from "./lib/src/e2ee_v2";
import ObsidianLiveSyncPlugin from "./main";
import { balanceChunks, localDatabaseCleanUp, performRebuildDB, remoteDatabaseCleanup, requestToCouchDB } from "./utils";
const _requestToCouchDB = async (baseUri: string, username: string, password: string, origin: string, path?: string, body?: any, method?: string) => {
const utf8str = String.fromCharCode.apply(null, new TextEncoder().encode(`${username}:${password}`));
const encoded = window.btoa(utf8str);
const authHeader = "Basic " + encoded;
// const origin = "capacitor://localhost";
const transformedHeaders: Record<string, string> = { authorization: authHeader, origin: origin };
const uri = `${baseUri}/${path}`;
const requestParam: RequestUrlParam = {
url: uri,
method: method || (body ? "PUT" : "GET"),
headers: transformedHeaders,
contentType: "application/json",
body: body ? JSON.stringify(body) : undefined,
};
return await requestUrl(requestParam);
}
const requestToCouchDB = async (baseUri: string, username: string, password: string, origin: string, key?: string, body?: string, method?: string) => {
const uri = `_node/_local/_config${key ? "/" + key : ""}`;
return await _requestToCouchDB(baseUri, username, password, origin, uri, body, method);
};
export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
plugin: ObsidianLiveSyncPlugin;
selectedScreen = "";
@ -496,15 +478,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
// @ts-ignore
this.plugin.app.setting.close();
await delay(2000);
if (method == "localOnly") {
await this.plugin.addOnSetup.fetchLocal();
}
if (method == "remoteOnly") {
await this.plugin.addOnSetup.rebuildRemote();
}
if (method == "rebuildBothByThisDevice") {
await this.plugin.addOnSetup.rebuildEverything();
}
await performRebuildDB(this.plugin, method);
}
new Setting(containerRemoteDatabaseEl)
@ -1067,42 +1041,25 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
.addButton((button) => {
button.setButtonText("Merge")
.onClick(async () => {
this.plugin.addOnSetup.suspendExtraSync();
this.display();
Logger("Gathering files for enabling Hidden File Sync", LOG_LEVEL.NOTICE);
await this.plugin.addOnHiddenFileSync.syncInternalFilesAndDatabase("safe", true);
this.plugin.settings.syncInternalFiles = true;
await this.plugin.saveSettings();
Logger(`Done!`, LOG_LEVEL.NOTICE);
// @ts-ignore
this.plugin.app.setting.close()
await this.plugin.addOnSetup.configureHiddenFileSync("MERGE");
})
})
.addButton((button) => {
button.setButtonText("Fetch")
.onClick(async () => {
this.plugin.addOnSetup.suspendExtraSync();
// @ts-ignore
this.plugin.app.setting.close()
Logger("Gathering files for enabling Hidden File Sync", LOG_LEVEL.NOTICE);
await this.plugin.addOnHiddenFileSync.syncInternalFilesAndDatabase("pullForce", true);
this.plugin.settings.syncInternalFiles = true;
await this.plugin.saveSettings();
Logger(`Done! Restarting the app is strongly recommended!`, LOG_LEVEL.NOTICE);
// this.display();
await this.plugin.addOnSetup.configureHiddenFileSync("FETCH");
})
})
.addButton((button) => {
button.setButtonText("Overwrite")
.onClick(async () => {
this.plugin.addOnSetup.suspendExtraSync();
// @ts-ignore
this.plugin.app.setting.close()
Logger("Gathering files for enabling Hidden File Sync", LOG_LEVEL.NOTICE);
// this.display();
await this.plugin.addOnHiddenFileSync.syncInternalFilesAndDatabase("pushForce", true);
this.plugin.settings.syncInternalFiles = true;
await this.plugin.saveSettings();
Logger(`Done!`, LOG_LEVEL.NOTICE);
// this.display();
await this.plugin.addOnSetup.configureHiddenFileSync("OVERWRITE");
})
});
}
@ -1642,59 +1599,8 @@ ${stringifyYaml(pluginConfig)}`;
})
);
const localDatabaseCleanUp = async (force: boolean) => {
const usedMap = new Map();
const existMap = new Map();
const db = this.plugin.localDatabase.localDatabase;
if ((db as any)?.adapter != "indexeddb") {
if (force) {
Logger("Fetch from the remote database", LOG_LEVEL.NOTICE, "clean-up-db");
await rebuildDB("localOnly");
return;
} else {
Logger("This feature requires enabling `Use new adapter`. Please enable it", LOG_LEVEL.NOTICE, "clean-up-db");
return;
}
}
Logger(`The remote database locked for garbage collection`, LOG_LEVEL.NOTICE, "clean-up-db");
Logger(`Gathering chunk usage information`, LOG_LEVEL.NOTICE, "clean-up-db");
const xx = await db.allDocs({ startkey: "h:", endkey: `h:\u{10ffff}` });
for (const xxd of xx.rows) {
const chunk = xxd.id
existMap.set(chunk, xxd.value.rev);
}
const x = await db.find({ limit: 999999999, selector: { children: { $exists: true, $type: "array" } }, fields: ["_id", "mtime", "children"] });
for (const temp of x.docs) {
for (const chunk of (temp as NewEntry).children) {
usedMap.set(chunk, (usedMap.has(chunk) ? usedMap.get(chunk) : 0) + 1);
existMap.delete(chunk);
}
}
const payload = {} as Record<string, string[]>;
for (const [id, rev] of existMap) {
payload[id] = [rev];
}
const removeItems = Object.keys(payload).length;
if (removeItems == 0) {
Logger(`No unreferenced chunks found`, LOG_LEVEL.NOTICE, "clean-up-db");
return;
}
Logger(`Deleting unreferenced chunks: ${removeItems}`, LOG_LEVEL.NOTICE, "clean-up-db");
for (const [id, rev] of existMap) {
//@ts-ignore
const ret = await db.purge(id, rev);
Logger(ret, LOG_LEVEL.VERBOSE);
}
this.plugin.localDatabase.refreshSettings();
Logger(`Compacting local database...`, LOG_LEVEL.NOTICE, "clean-up-db");
await db.compact();
Logger("Done!", LOG_LEVEL.NOTICE, "clean-up-db");
}
addScreenElement("50", containerHatchEl);
// With great respect, thank you TfTHacker!
@ -1805,75 +1711,23 @@ ${stringifyYaml(pluginConfig)}`;
)
new Setting(containerMaintenanceEl)
.setName("(Experimental) Clean the remote database")
.setName("(Beta) Clean the remote database")
.setDesc("")
.addButton((button) =>
button.setButtonText("Count")
.setDisabled(false)
.onClick(async () => {
await remoteDatabaseCleanup(this.plugin, true);
})
).addButton((button) =>
button.setButtonText("Perform cleaning")
.setDisabled(false)
.setWarning()
.onClick(async () => {
// @ts-ignore
this.plugin.app.setting.close()
try {
const usedMap = new Map();
const existMap = new Map();
const ret = await this.plugin.replicator.connectRemoteCouchDBWithSetting(this.plugin.settings, this.plugin.isMobile);
if (typeof ret === "string") {
Logger(`Connect error: ${ret}`, LOG_LEVEL.NOTICE, "clean-up-db");
return;
}
const info = ret.info;
Logger(JSON.stringify(info), LOG_LEVEL.VERBOSE, "clean-up-db");
Logger(`Database data-size:${(info as any)?.data_size ?? "-"}, disk-size: ${(info as any)?.disk_size ?? "-"}`);
Logger(`The remote database locked for garbage collection`, LOG_LEVEL.NOTICE, "clean-up-db");
await this.plugin.markRemoteLocked();
Logger(`Gathering chunk usage information`, LOG_LEVEL.NOTICE, "clean-up-db");
const db = ret.db;
const xx = await db.allDocs({ startkey: "h:", endkey: `h:\u{10ffff}` });
for (const xxd of xx.rows) {
const chunk = xxd.id
existMap.set(chunk, xxd.value.rev);
}
const x = await db.find({ limit: 999999999, selector: { children: { $exists: true, $type: "array" } }, fields: ["_id", "mtime", "children"] });
for (const temp of x.docs) {
for (const chunk of (temp as NewEntry).children) {
usedMap.set(chunk, (usedMap.has(chunk) ? usedMap.get(chunk) : 0) + 1);
existMap.delete(chunk);
}
}
const payload = {} as Record<string, string[]>;
for (const [id, rev] of existMap) {
payload[id] = [rev];
}
const removeItems = Object.keys(payload).length;
if (removeItems == 0) {
Logger(`No unreferenced chunk found`, LOG_LEVEL.NOTICE, "clean-up-db");
return;
}
Logger(`Deleting unreferenced chunks: ${removeItems}`, LOG_LEVEL.NOTICE, "clean-up-db");
const rets = await _requestToCouchDB(
`${this.plugin.settings.couchDB_URI}/${this.plugin.settings.couchDB_DBNAME}`,
this.plugin.settings.couchDB_USER,
this.plugin.settings.couchDB_PASSWORD,
undefined,
"_purge",
payload, "POST");
// const result = await rets();
Logger(JSON.stringify(rets.text), LOG_LEVEL.VERBOSE);
Logger(`Compacting database...`, LOG_LEVEL.NOTICE, "clean-up-db");
await db.compact();
const endInfo = await db.info();
Logger(`Result database data-size:${(endInfo as any)?.data_size ?? "-"}, disk-size: ${(endInfo as any)?.disk_size ?? "-"}`);
Logger(JSON.stringify(endInfo), LOG_LEVEL.VERBOSE, "clean-up-db");
Logger(`Local database cleaning up...`);
await localDatabaseCleanUp(true);
} catch (ex) {
Logger("Failed to clean up db.")
Logger(ex, LOG_LEVEL.VERBOSE);
}
await remoteDatabaseCleanup(this.plugin, false);
await balanceChunks(this.plugin, false);
})
);
@ -1893,17 +1747,22 @@ ${stringifyYaml(pluginConfig)}`;
)
new Setting(containerMaintenanceEl)
.setName("(Experimental) Clean the local database")
.setDesc("")
.setName("(Beta) Clean the local database")
.setDesc("This feature requires enabling 'Use new Adapter'")
.addButton((button) =>
button.setButtonText("Count")
.setDisabled(false)
.onClick(async () => {
await localDatabaseCleanUp(this.plugin, false, true);
})
).addButton((button) =>
button.setButtonText("Perform cleaning")
.setDisabled(false)
.setWarning()
.onClick(async () => {
// @ts-ignore
this.plugin.app.setting.close()
await localDatabaseCleanUp(false);
await this.plugin.markRemoteResolved();
await localDatabaseCleanUp(this.plugin, false, false);
})
);
@ -1935,6 +1794,18 @@ ${stringifyYaml(pluginConfig)}`;
})
)
new Setting(containerMaintenanceEl)
.setName("(Beta) Complement each other with possible missing chunks.")
.setDesc("")
.addButton((button) =>
button
.setButtonText("Balance")
.setWarning()
.setDisabled(false)
.onClick(async () => {
await balanceChunks(this.plugin, false);
})
)
applyDisplayEnabled();
addScreenElement("70", containerMaintenanceEl);

View File

@ -1,4 +1,5 @@
import { App, FuzzySuggestModal, Modal, Setting } from "./deps";
import { ButtonComponent } from "obsidian";
import { App, FuzzySuggestModal, MarkdownRenderer, Modal, Plugin, Setting } from "./deps";
import ObsidianLiveSyncPlugin from "./main";
//@ts-ignore
@ -123,4 +124,103 @@ export class PopoverSelectString extends FuzzySuggestModal<string> {
}
}, 100);
}
}
}
export class MessageBox extends Modal {
plugin: Plugin;
title: string;
contentMd: string;
buttons: string[];
result: string;
isManuallyClosed = false;
defaultAction: string | undefined;
timeout: number | undefined;
timer: ReturnType<typeof setInterval> = undefined;
defaultButtonComponent: ButtonComponent | undefined;
onSubmit: (result: string | boolean) => void;
constructor(plugin: Plugin, title: string, contentMd: string, buttons: string[], defaultAction: (typeof buttons)[number], timeout: number, onSubmit: (result: (typeof buttons)[number] | false) => void) {
super(plugin.app);
this.plugin = plugin;
this.title = title;
this.contentMd = contentMd;
this.buttons = buttons;
this.onSubmit = onSubmit;
this.defaultAction = defaultAction;
this.timeout = timeout;
if (this.timeout) {
this.timer = setInterval(() => {
this.timeout--;
if (this.timeout < 0) {
if (this.timer) {
clearInterval(this.timer);
this.timer = undefined;
}
this.result = defaultAction;
this.isManuallyClosed = true;
this.close();
} else {
this.defaultButtonComponent.setButtonText(`( ${this.timeout} ) ${defaultAction}`);
}
}, 1000);
}
}
onOpen() {
const { contentEl } = this;
contentEl.addEventListener("click", () => {
if (this.timer) {
clearInterval(this.timer);
this.timer = undefined;
}
})
contentEl.createEl("h1", { text: this.title });
const div = contentEl.createDiv();
MarkdownRenderer.renderMarkdown(this.contentMd, div, "/", null);
const buttonSetting = new Setting(contentEl);
for (const button of this.buttons) {
buttonSetting.addButton((btn) => {
btn
.setButtonText(button)
.onClick(() => {
this.isManuallyClosed = true;
this.result = button;
if (this.timer) {
clearInterval(this.timer);
this.timer = undefined;
}
this.close();
})
if (button == this.defaultAction) {
this.defaultButtonComponent = btn;
}
return btn;
}
)
}
}
onClose() {
const { contentEl } = this;
contentEl.empty();
if (this.timer) {
clearInterval(this.timer);
this.timer = undefined;
}
if (this.isManuallyClosed) {
this.onSubmit(this.result);
} else {
this.onSubmit(false);
}
}
}
export function confirmWithMessage(plugin: Plugin, title: string, contentMd: string, buttons: string[], defaultAction?: (typeof buttons)[number], timeout?: number): Promise<(typeof buttons)[number] | false> {
return new Promise((res) => {
const dialog = new MessageBox(plugin, title, contentMd, buttons, defaultAction, timeout, (result) => res(result));
dialog.open();
});
};

@ -1 +1 @@
Subproject commit f5db618612e7d3f78b988a73690488eebf161220
Subproject commit c14ab28b4d4843db4ba9768d8f7e60c102ef7e53

View File

@ -11,7 +11,7 @@ import { LogDisplayModal } from "./LogDisplayModal";
import { ConflictResolveModal } from "./ConflictResolveModal";
import { ObsidianLiveSyncSettingTab } from "./ObsidianLiveSyncSettingTab";
import { DocumentHistoryModal } from "./DocumentHistoryModal";
import { applyPatch, cancelAllPeriodicTask, cancelAllTasks, cancelTask, generatePatchObj, id2path, isObjectMargeApplicable, isSensibleMargeApplicable, flattenObject, path2id, scheduleTask, tryParseJSON, createFile, modifyFile, isValidPath, getAbstractFileByPath, touch, recentlyTouched, isIdOfInternalMetadata, isPluginMetadata, stripInternalMetadataPrefix, isChunk, askSelectString, askYesNo, askString, PeriodicProcessor, clearTouched, getPath, getPathWithoutPrefix, getPathFromTFile } from "./utils";
import { applyPatch, cancelAllPeriodicTask, cancelAllTasks, cancelTask, generatePatchObj, id2path, isObjectMargeApplicable, isSensibleMargeApplicable, flattenObject, path2id, scheduleTask, tryParseJSON, createFile, modifyFile, isValidPath, getAbstractFileByPath, touch, recentlyTouched, isIdOfInternalMetadata, isPluginMetadata, stripInternalMetadataPrefix, isChunk, askSelectString, askYesNo, askString, PeriodicProcessor, clearTouched, getPath, getPathWithoutPrefix, getPathFromTFile, localDatabaseCleanUp, balanceChunks, performRebuildDB } from "./utils";
import { encrypt, tryDecrypt } from "./lib/src/e2ee_v2";
import { enableEncryption, isCloudantURI, isErrorOfMissingDoc, isValidRemoteCouchDBURI } from "./lib/src/utils_couchdb";
import { getGlobalStore, ObservableStore, observeStores } from "./lib/src/store";
@ -29,6 +29,7 @@ import { LiveSyncCommands } from "./LiveSyncCommands";
import { PluginAndTheirSettings } from "./CmdPluginAndTheirSettings";
import { HiddenFileSync } from "./CmdHiddenFileSync";
import { SetupLiveSync } from "./CmdSetupLiveSync";
import { confirmWithMessage } from "./dialogs";
setNoticeClass(Notice);
@ -1542,7 +1543,43 @@ export default class ObsidianLiveSyncPlugin extends Plugin
await this.applyBatchChange();
await Promise.all(this.addOns.map(e => e.beforeReplicate(showMessage)));
await this.loadQueuedFiles();
return await this.replicator.openReplication(this.settings, false, showMessage);
const ret = await this.replicator.openReplication(this.settings, false, showMessage);
if (!ret) {
if (this.replicator.remoteLockedAndDeviceNotAccepted) {
if (this.replicator.remoteCleaned) {
const message = `
The remote database has been cleaned up.
To synchronize, this device must also be cleaned up or fetch everything again once.
Fetching may takes some time. Cleaning up is not stable yet but fast.
`
const CHOICE_CLEANUP = "Clean up";
const CHOICE_FETCH = "Fetch again";
const CHOICE_DISMISS = "Dismiss";
const ret = await confirmWithMessage(this, "Locked", message, [CHOICE_CLEANUP, CHOICE_FETCH, CHOICE_DISMISS], CHOICE_DISMISS, 10);
if (ret == CHOICE_CLEANUP) {
await localDatabaseCleanUp(this, true, false);
await balanceChunks(this, false);
}
if (ret == CHOICE_FETCH) {
await performRebuildDB(this, "localOnly");
}
} else {
const message = `
The remote database has been rebuilt.
To synchronize, this device must fetch everything again once.
Or if you are sure know what had been happened, we can unlock the database from the setting dialog.
`
const CHOICE_FETCH = "Fetch again";
const CHOICE_DISMISS = "Dismiss";
const ret = await confirmWithMessage(this, "Locked", message, [CHOICE_FETCH, CHOICE_DISMISS], CHOICE_DISMISS, 10);
if (ret == CHOICE_FETCH) {
await performRebuildDB(this, "localOnly");
}
}
}
}
return ret;
}
async initializeDatabase(showingNotice?: boolean, reopenDatabase = true) {
@ -1573,12 +1610,12 @@ export default class ObsidianLiveSyncPlugin extends Plugin
return await this.replicator.replicateAllFromServer(this.settings, showingNotice);
}
async markRemoteLocked() {
return await this.replicator.markRemoteLocked(this.settings, true);
async markRemoteLocked(lockByClean?: boolean) {
return await this.replicator.markRemoteLocked(this.settings, true, lockByClean);
}
async markRemoteUnlocked() {
return await this.replicator.markRemoteLocked(this.settings, false);
return await this.replicator.markRemoteLocked(this.settings, false, false);
}
async markRemoteResolved() {
@ -2038,60 +2075,62 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
showMergeDialog(filename: FilePathWithPrefix, conflictCheckResult: diff_result): Promise<boolean> {
return new Promise((res, rej) => {
Logger("open conflict dialog", LOG_LEVEL.VERBOSE);
new ConflictResolveModal(this.app, filename, conflictCheckResult, async (selected) => {
const testDoc = await this.localDatabase.getDBEntry(filename, { conflicts: true }, false, false, true);
if (testDoc === false) {
Logger("Missing file..", LOG_LEVEL.VERBOSE);
return res(true);
}
if (!testDoc._conflicts) {
Logger("Nothing have to do with this conflict", LOG_LEVEL.VERBOSE);
return res(true);
}
const toDelete = selected;
const toKeep = conflictCheckResult.left.rev != toDelete ? conflictCheckResult.left.rev : conflictCheckResult.right.rev;
if (toDelete == "") {
// concat both,
// delete conflicted revision and write a new file, store it again.
const p = conflictCheckResult.diff.map((e) => e[1]).join("");
await this.localDatabase.deleteDBEntry(filename, { rev: testDoc._conflicts[0] });
const file = getAbstractFileByPath(stripAllPrefixes(filename)) as TFile;
if (file) {
await this.app.vault.modify(file, p);
await this.updateIntoDB(file);
return runWithLock("resolve-conflict:" + filename, false, () =>
new Promise((res, rej) => {
Logger("open conflict dialog", LOG_LEVEL.VERBOSE);
new ConflictResolveModal(this.app, filename, conflictCheckResult, async (selected) => {
const testDoc = await this.localDatabase.getDBEntry(filename, { conflicts: true }, false, false, true);
if (testDoc === false) {
Logger("Missing file..", LOG_LEVEL.VERBOSE);
return res(true);
}
if (!testDoc._conflicts) {
Logger("Nothing have to do with this conflict", LOG_LEVEL.VERBOSE);
return res(true);
}
const toDelete = selected;
const toKeep = conflictCheckResult.left.rev != toDelete ? conflictCheckResult.left.rev : conflictCheckResult.right.rev;
if (toDelete == "") {
// concat both,
// delete conflicted revision and write a new file, store it again.
const p = conflictCheckResult.diff.map((e) => e[1]).join("");
await this.localDatabase.deleteDBEntry(filename, { rev: testDoc._conflicts[0] });
const file = getAbstractFileByPath(stripAllPrefixes(filename)) as TFile;
if (file) {
await this.app.vault.modify(file, p);
await this.updateIntoDB(file);
} else {
const newFile = await this.app.vault.create(filename, p);
await this.updateIntoDB(newFile);
}
await this.pullFile(filename);
Logger("concat both file");
if (this.settings.syncAfterMerge && !this.suspended) {
await this.replicate();
}
setTimeout(() => {
//resolved, check again.
this.showIfConflicted(filename);
}, 500);
} else if (toDelete == null) {
Logger("Leave it still conflicted");
} else {
const newFile = await this.app.vault.create(filename, p);
await this.updateIntoDB(newFile);
await this.localDatabase.deleteDBEntry(filename, { rev: toDelete });
await this.pullFile(filename, null, true, toKeep);
Logger(`Conflict resolved:${filename}`);
if (this.settings.syncAfterMerge && !this.suspended) {
await this.replicate();
}
setTimeout(() => {
//resolved, check again.
this.showIfConflicted(filename);
}, 500);
}
await this.pullFile(filename);
Logger("concat both file");
if (this.settings.syncAfterMerge && !this.suspended) {
await this.replicate();
}
setTimeout(() => {
//resolved, check again.
this.showIfConflicted(filename);
}, 500);
} else if (toDelete == null) {
Logger("Leave it still conflicted");
} else {
await this.localDatabase.deleteDBEntry(filename, { rev: toDelete });
await this.pullFile(filename, null, true, toKeep);
Logger(`Conflict resolved:${filename}`);
if (this.settings.syncAfterMerge && !this.suspended) {
await this.replicate();
}
setTimeout(() => {
//resolved, check again.
this.showIfConflicted(filename);
}, 500);
}
return res(true);
}).open();
});
return res(true);
}).open();
})
);
}
conflictedCheckFiles: FilePath[] = [];

View File

@ -1,10 +1,12 @@
import { DataWriteOptions, normalizePath, TFile, Platform, TAbstractFile, App, Plugin_2 } from "./deps";
import { DataWriteOptions, normalizePath, TFile, Platform, TAbstractFile, App, Plugin_2, RequestUrlParam, requestUrl } from "./deps";
import { path2id_base, id2path_base, isValidFilenameInLinux, isValidFilenameInDarwin, isValidFilenameInWidows, isValidFilenameInAndroid, stripAllPrefixes } from "./lib/src/path";
import { Logger } from "./lib/src/logger";
import { AnyEntry, DocumentID, EntryHasPath, FilePath, FilePathWithPrefix, LOG_LEVEL } from "./lib/src/types";
import { AnyEntry, DocumentID, EntryDoc, EntryHasPath, FilePath, FilePathWithPrefix, LOG_LEVEL, NewEntry } from "./lib/src/types";
import { CHeader, ICHeader, ICHeaderLength, PSCHeader } from "./types";
import { InputStringDialog, PopoverSelectString } from "./dialogs";
import ObsidianLiveSyncPlugin from "./main";
import { runWithLock } from "./lib/src/lock";
// For backward compatibility, using the path for determining id.
// Only CouchDB unacceptable ID (that starts with an underscore) has been prefixed with "/".
@ -428,3 +430,292 @@ export class PeriodicProcessor {
if (this._timer) clearInterval(this._timer);
}
}
function sizeToHumanReadable(size: number | undefined) {
if (!size) return "-";
const i = Math.floor(Math.log(size) / Math.log(1024));
return Number.parseInt((size / Math.pow(1024, i)).toFixed(2)) + ' ' + ['B', 'kB', 'MB', 'GB', 'TB'][i];
}
export const _requestToCouchDBFetch = async (baseUri: string, username: string, password: string, path?: string, body?: string | any, method?: string) => {
const utf8str = String.fromCharCode.apply(null, new TextEncoder().encode(`${username}:${password}`));
const encoded = window.btoa(utf8str);
const authHeader = "Basic " + encoded;
const transformedHeaders: Record<string, string> = { authorization: authHeader, "content-type": "application/json" };
const uri = `${baseUri}/${path}`;
const requestParam = {
url: uri,
method: method || (body ? "PUT" : "GET"),
headers: new Headers(transformedHeaders),
contentType: "application/json",
body: JSON.stringify(body),
};
return await fetch(uri, requestParam);
}
export const _requestToCouchDB = async (baseUri: string, username: string, password: string, origin: string, path?: string, body?: any, method?: string) => {
const utf8str = String.fromCharCode.apply(null, new TextEncoder().encode(`${username}:${password}`));
const encoded = window.btoa(utf8str);
const authHeader = "Basic " + encoded;
const transformedHeaders: Record<string, string> = { authorization: authHeader, origin: origin };
const uri = `${baseUri}/${path}`;
const requestParam: RequestUrlParam = {
url: uri,
method: method || (body ? "PUT" : "GET"),
headers: transformedHeaders,
contentType: "application/json",
body: body ? JSON.stringify(body) : undefined,
};
return await requestUrl(requestParam);
}
export const requestToCouchDB = async (baseUri: string, username: string, password: string, origin: string, key?: string, body?: string, method?: string) => {
const uri = `_node/_local/_config${key ? "/" + key : ""}`;
return await _requestToCouchDB(baseUri, username, password, origin, uri, body, method);
};
export async function performRebuildDB(plugin: ObsidianLiveSyncPlugin, method: "localOnly" | "remoteOnly" | "rebuildBothByThisDevice") {
if (method == "localOnly") {
await plugin.addOnSetup.fetchLocal();
}
if (method == "remoteOnly") {
await plugin.addOnSetup.rebuildRemote();
}
if (method == "rebuildBothByThisDevice") {
await plugin.addOnSetup.rebuildEverything();
}
}
export const gatherChunkUsage = async (db: PouchDB.Database<EntryDoc>) => {
const used = new Map();
const unreferenced = new Map();
const removed = new Map();
const missing = new Map();
const xx = await db.allDocs({ startkey: "h:", endkey: `h:\u{10ffff}` });
for (const xxd of xx.rows) {
const chunk = xxd.id
unreferenced.set(chunk, xxd.value.rev);
}
const x = await db.find({ limit: 999999999, selector: { children: { $exists: true, $type: "array" } }, fields: ["_id", "path", "mtime", "children"] });
for (const temp of x.docs) {
for (const chunk of (temp as NewEntry).children) {
used.set(chunk, (used.has(chunk) ? used.get(chunk) : 0) + 1);
if (unreferenced.has(chunk)) {
removed.set(chunk, unreferenced.get(chunk));
unreferenced.delete(chunk);
} else {
if (!removed.has(chunk)) {
if (!missing.has(temp._id)) {
missing.set(temp._id, []);
}
missing.get(temp._id).push(chunk);
}
}
}
}
return { used, unreferenced, missing };
}
export const localDatabaseCleanUp = async (plugin: ObsidianLiveSyncPlugin, force: boolean, dryRun: boolean) => {
await runWithLock("clean-up:local", true, async () => {
const db = plugin.localDatabase.localDatabase;
if ((db as any)?.adapter != "indexeddb") {
if (force && !dryRun) {
Logger("Fetch from the remote database", LOG_LEVEL.NOTICE, "clean-up-db");
await performRebuildDB(plugin, "localOnly");
return;
} else {
Logger("This feature requires enabling `Use new adapter`. Please enable it", LOG_LEVEL.NOTICE, "clean-up-db");
return;
}
}
Logger(`The remote database has been locked for garbage collection`, LOG_LEVEL.NOTICE, "clean-up-db");
Logger(`Gathering chunk usage information`, LOG_LEVEL.NOTICE, "clean-up-db");
const { unreferenced, missing } = await gatherChunkUsage(db);
if (missing.size != 0) {
Logger(`Some chunks are not found! We have to rescue`, LOG_LEVEL.NOTICE);
Logger(missing, LOG_LEVEL.VERBOSE);
} else {
Logger(`All chunks are OK`, LOG_LEVEL.NOTICE);
}
const payload = {} as Record<string, string[]>;
for (const [id, rev] of unreferenced) {
payload[id] = [rev];
}
const removeItems = Object.keys(payload).length;
if (removeItems == 0) {
Logger(`No unreferenced chunks found (Local)`, LOG_LEVEL.NOTICE);
await plugin.markRemoteResolved();
}
if (dryRun) {
Logger(`There are ${removeItems} unreferenced chunks (Local)`, LOG_LEVEL.NOTICE);
return;
}
Logger(`Deleting unreferenced chunks: ${removeItems}`, LOG_LEVEL.NOTICE, "clean-up-db");
for (const [id, rev] of unreferenced) {
//@ts-ignore
const ret = await db.purge(id, rev);
Logger(ret, LOG_LEVEL.VERBOSE);
}
plugin.localDatabase.refreshSettings();
Logger(`Compacting local database...`, LOG_LEVEL.NOTICE, "clean-up-db");
await db.compact();
await plugin.markRemoteResolved();
Logger("Done!", LOG_LEVEL.NOTICE, "clean-up-db");
})
}
export const balanceChunks = async (plugin: ObsidianLiveSyncPlugin, dryRun: boolean) => {
await runWithLock("clean-up:balance", true, async () => {
const localDB = plugin.localDatabase.localDatabase;
Logger(`Gathering chunk usage information`, LOG_LEVEL.NOTICE, "clean-up-db");
const ret = await plugin.replicator.connectRemoteCouchDBWithSetting(plugin.settings, plugin.isMobile);
if (typeof ret === "string") {
Logger(`Connect error: ${ret}`, LOG_LEVEL.NOTICE, "clean-up-db");
return;
}
const localChunks = new Map<string, string>();
const xx = await localDB.allDocs({ startkey: "h:", endkey: `h:\u{10ffff}` });
for (const xxd of xx.rows) {
const chunk = xxd.id
localChunks.set(chunk, xxd.value.rev);
}
// const info = ret.info;
const remoteDB = ret.db;
const remoteChunks = new Map<string, string>();
const xxr = await remoteDB.allDocs({ startkey: "h:", endkey: `h:\u{10ffff}` });
for (const xxd of xxr.rows) {
const chunk = xxd.id
remoteChunks.set(chunk, xxd.value.rev);
}
const localToRemote = new Map<string, string>([...localChunks]);
const remoteToLocal = new Map<string, string>([...remoteChunks]);
for (const id of new Set([...localChunks.keys(), ...remoteChunks.keys()])) {
if (remoteChunks.has(id)) {
localToRemote.delete(id);
}
if (localChunks.has(id)) {
remoteToLocal.delete(id);
}
}
function arrayToChunkedArray<T>(src: T[], size = 25) {
const ret = [] as T[][];
let i = 0;
while (i < src.length) {
ret.push(src.slice(i, i += size));
}
return ret;
}
if (localToRemote.size == 0) {
Logger(`No chunks need to be sent`, LOG_LEVEL.NOTICE);
} else {
Logger(`${localToRemote.size} chunks need to be sent`, LOG_LEVEL.NOTICE);
if (!dryRun) {
const w = arrayToChunkedArray([...localToRemote]);
for (const chunk of w) {
for (const [id,] of chunk) {
const queryRet = await localDB.allDocs({ keys: [id], include_docs: true });
const docs = queryRet.rows.filter(e => !("error" in e)).map(x => x.doc);
const ret = await remoteDB.bulkDocs(docs, { new_edits: false });
Logger(ret, LOG_LEVEL.VERBOSE);
}
}
Logger(`Done! ${remoteToLocal.size} chunks have been sent`, LOG_LEVEL.NOTICE);
}
}
if (remoteToLocal.size == 0) {
Logger(`No chunks need to be retrieved`, LOG_LEVEL.NOTICE);
} else {
Logger(`${remoteToLocal.size} chunks need to be retrieved`, LOG_LEVEL.NOTICE);
if (!dryRun) {
const w = arrayToChunkedArray([...remoteToLocal]);
for (const chunk of w) {
for (const [id,] of chunk) {
const queryRet = await remoteDB.allDocs({ keys: [id], include_docs: true });
const docs = queryRet.rows.filter(e => !("error" in e)).map(x => x.doc);
const ret = await localDB.bulkDocs(docs, { new_edits: false });
Logger(ret, LOG_LEVEL.VERBOSE);
}
}
Logger(`Done! ${remoteToLocal.size} chunks have been retrieved`, LOG_LEVEL.NOTICE);
}
}
})
}
export const remoteDatabaseCleanup = async (plugin: ObsidianLiveSyncPlugin, dryRun: boolean) => {
const getSize = function (info: PouchDB.Core.DatabaseInfo, key: "active" | "external" | "file") {
return Number.parseInt((info as any)?.sizes?.[key] ?? 0);
}
await runWithLock("clean-up:remote", true, async () => {
try {
const ret = await plugin.replicator.connectRemoteCouchDBWithSetting(plugin.settings, plugin.isMobile);
if (typeof ret === "string") {
Logger(`Connect error: ${ret}`, LOG_LEVEL.NOTICE, "clean-up-db");
return;
}
const info = ret.info;
Logger(JSON.stringify(info), LOG_LEVEL.VERBOSE, "clean-up-db");
Logger(`Database active-size: ${sizeToHumanReadable(getSize(info, "active"))}, external-size:${sizeToHumanReadable(getSize(info, "external"))}, file-size: ${sizeToHumanReadable(getSize(info, "file"))}`, LOG_LEVEL.NOTICE);
if (!dryRun) {
Logger(`The remote database has been locked for garbage collection`, LOG_LEVEL.NOTICE, "clean-up-db");
await plugin.markRemoteLocked(true);
}
Logger(`Gathering chunk usage information`, LOG_LEVEL.NOTICE, "clean-up-db");
const db = ret.db;
const { unreferenced, missing } = await gatherChunkUsage(db);
if (missing.size != 0) {
Logger(`Some chunks are not found! We have to rescue`, LOG_LEVEL.NOTICE);
Logger(missing, LOG_LEVEL.VERBOSE);
} else {
Logger(`All chunks are OK`, LOG_LEVEL.NOTICE);
}
const payload = {} as Record<string, string[]>;
for (const [id, rev] of unreferenced) {
payload[id] = [rev];
}
const removeItems = Object.keys(payload).length;
if (removeItems == 0) {
Logger(`No unreferenced chunk found (Remote)`, LOG_LEVEL.NOTICE);
return;
}
if (dryRun) {
Logger(`There are ${removeItems} unreferenced chunks (Remote)`, LOG_LEVEL.NOTICE);
return;
}
Logger(`Deleting unreferenced chunks: ${removeItems}`, LOG_LEVEL.NOTICE, "clean-up-db");
const rets = await _requestToCouchDBFetch(
`${plugin.settings.couchDB_URI}/${plugin.settings.couchDB_DBNAME}`,
plugin.settings.couchDB_USER,
plugin.settings.couchDB_PASSWORD,
"_purge",
payload, "POST");
// const result = await rets();
Logger(JSON.stringify(await rets.json()), LOG_LEVEL.VERBOSE);
Logger(`Compacting database...`, LOG_LEVEL.NOTICE, "clean-up-db");
await db.compact();
const endInfo = await db.info();
Logger(`Processed database active-size: ${sizeToHumanReadable(getSize(endInfo, "active"))}, external-size:${sizeToHumanReadable(getSize(endInfo, "external"))}, file-size: ${sizeToHumanReadable(getSize(endInfo, "file"))}`, LOG_LEVEL.NOTICE);
Logger(`Reduced sizes: active-size: ${sizeToHumanReadable(getSize(info, "active") - getSize(endInfo, "active"))}, external-size:${sizeToHumanReadable(getSize(info, "external") - getSize(endInfo, "external"))}, file-size: ${sizeToHumanReadable(getSize(info, "file") - getSize(endInfo, "file"))}`, LOG_LEVEL.NOTICE);
Logger(JSON.stringify(endInfo), LOG_LEVEL.VERBOSE, "clean-up-db");
Logger(`Local database cleaning up...`);
await localDatabaseCleanUp(plugin, true, false);
} catch (ex) {
Logger("Failed to clean up db.")
Logger(ex, LOG_LEVEL.VERBOSE);
}
});
}