1
0
mirror of https://github.com/vrtmrz/obsidian-livesync.git synced 2025-09-16 08:56:33 +02:00

Implemented:

- Explicit types
- Path obfuscation.
- ... and minor changes.
This commit is contained in:
vorotamoroz
2023-04-10 12:04:30 +09:00
parent 97f91b1eb0
commit e395e53248
14 changed files with 448 additions and 468 deletions

View File

@@ -1,19 +1,34 @@
{
"root": true,
"parser": "@typescript-eslint/parser",
"plugins": ["@typescript-eslint"],
"extends": ["eslint:recommended", "plugin:@typescript-eslint/eslint-recommended", "plugin:@typescript-eslint/recommended"],
"plugins": [
"@typescript-eslint"
],
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended"
],
"parserOptions": {
"sourceType": "module"
"sourceType": "module",
"project": [
"tsconfig.json"
]
},
"rules": {
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": ["error", { "args": "none" }],
"@typescript-eslint/no-unused-vars": [
"error",
{
"args": "none"
}
],
"@typescript-eslint/ban-ts-comment": "off",
"no-prototype-builtins": "off",
"@typescript-eslint/no-empty-function": "off",
"require-await": "warn",
"no-async-promise-executor": "off",
"@typescript-eslint/no-explicit-any": "off"
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-unnecessary-type-assertion": "error"
}
}
}

View File

@@ -1,19 +1,20 @@
import { Notice, normalizePath, PluginManifest } from "./deps";
import { EntryDoc, LoadedEntry, LOG_LEVEL, InternalFileEntry } from "./lib/src/types";
import { EntryDoc, LoadedEntry, LOG_LEVEL, InternalFileEntry, FilePathWithPrefix, FilePath } from "./lib/src/types";
import { InternalFileInfo, ICHeader, ICHeaderEnd } from "./types";
import { delay, isDocContentSame } from "./lib/src/utils";
import { Logger } from "./lib/src/logger";
import { PouchDB } from "./lib/src/pouchdb-browser.js";
import { disposeMemoObject, id2path, memoIfNotExist, memoObject, path2id, retrieveMemoObject, scheduleTask, trimPrefix, isInternalMetadata, filename2idInternalMetadata, id2filenameInternalMetadata, PeriodicProcessor } from "./utils";
import { disposeMemoObject, memoIfNotExist, memoObject, retrieveMemoObject, scheduleTask, trimPrefix, isIdOfInternalMetadata, PeriodicProcessor } from "./utils";
import { WrappedNotice } from "./lib/src/wrapper";
import { base64ToArrayBuffer, arrayBufferToBase64 } from "./lib/src/strbin";
import { runWithLock } from "./lib/src/lock";
import { Semaphore } from "./lib/src/semaphore";
import { JsonResolveModal } from "./JsonResolveModal";
import { LiveSyncCommands } from "./LiveSyncCommands";
import { addPrefix, stripAllPrefixes } from "./lib/src/path";
export class HiddenFileSync extends LiveSyncCommands {
periodicInternalFileScanProcessor: PeriodicProcessor = new PeriodicProcessor(this.plugin, async () => await this.syncInternalFilesAndDatabase("push", false));
periodicInternalFileScanProcessor: PeriodicProcessor = new PeriodicProcessor(this.plugin, async () => this.settings.syncInternalFiles && this.localDatabase.isReady && await this.syncInternalFilesAndDatabase("push", false));
confirmPopup: WrappedNotice = null;
get kvDB() {
return this.plugin.kvDB;
@@ -21,7 +22,7 @@ export class HiddenFileSync extends LiveSyncCommands {
ensureDirectoryEx(fullPath: string) {
return this.plugin.ensureDirectoryEx(fullPath);
}
getConflictedDoc(path: string, rev: string) {
getConflictedDoc(path: FilePathWithPrefix, rev: string) {
return this.plugin.getConflictedDoc(path, rev);
}
onunload() {
@@ -49,11 +50,12 @@ export class HiddenFileSync extends LiveSyncCommands {
}
}
async beforeReplicate(showNotice: boolean) {
if (this.settings.syncInternalFiles && this.settings.syncInternalFilesBeforeReplication && !this.settings.watchInternalFileChanges) {
if (this.localDatabase.isReady && this.settings.syncInternalFiles && this.settings.syncInternalFilesBeforeReplication && !this.settings.watchInternalFileChanges) {
await this.syncInternalFilesAndDatabase("push", showNotice);
}
}
async onResume() {
this.periodicInternalFileScanProcessor?.disable();
if (this.plugin.suspended)
return;
if (this.settings.syncInternalFiles) {
@@ -66,6 +68,10 @@ export class HiddenFileSync extends LiveSyncCommands {
}
realizeSettingSyncMode(): Promise<void> {
this.periodicInternalFileScanProcessor?.disable();
if (this.plugin.suspended)
return;
if (!this.plugin.isReady)
return;
this.periodicInternalFileScanProcessor.enable(this.settings.syncInternalFiles && this.settings.syncInternalFilesInterval ? (this.settings.syncInternalFilesInterval * 1000) : 0);
return;
}
@@ -88,7 +94,7 @@ export class HiddenFileSync extends LiveSyncCommands {
}
recentProcessedInternalFiles = [] as string[];
async watchVaultRawEventsAsync(path: string) {
async watchVaultRawEventsAsync(path: FilePath) {
const stat = await this.app.vault.adapter.stat(path);
// sometimes folder is coming.
if (stat && stat.type != "file")
@@ -100,8 +106,9 @@ export class HiddenFileSync extends LiveSyncCommands {
return;
}
this.recentProcessedInternalFiles = [key, ...this.recentProcessedInternalFiles].slice(0, 100);
const id = filename2idInternalMetadata(path);
const filesOnDB = await this.localDatabase.getDBEntryMeta(id);
// const id = await this.path2id(path, ICHeader);
const prefixedFileName = addPrefix(path, ICHeader);
const filesOnDB = await this.localDatabase.getDBEntryMeta(prefixedFileName);
const dbMTime = ~~((filesOnDB && filesOnDB.mtime || 0) / 1000);
// Skip unchanged file.
@@ -131,36 +138,37 @@ export class HiddenFileSync extends LiveSyncCommands {
for await (const doc of conflicted) {
if (!("_conflicts" in doc))
continue;
if (isInternalMetadata(doc._id)) {
await this.resolveConflictOnInternalFile(doc._id);
if (isIdOfInternalMetadata(doc._id)) {
await this.resolveConflictOnInternalFile(doc.path);
}
}
}
async resolveConflictOnInternalFile(id: string): Promise<boolean> {
async resolveConflictOnInternalFile(path: FilePathWithPrefix): Promise<boolean> {
try {
// Retrieve data
const doc = await this.localDatabase.localDatabase.get(id, { conflicts: true });
const id = await this.path2id(path, ICHeader);
const doc = await this.localDatabase.getRaw(id, { conflicts: true });
// If there is no conflict, return with false.
if (!("_conflicts" in doc))
return false;
if (doc._conflicts.length == 0)
return false;
Logger(`Hidden file conflicted:${id2filenameInternalMetadata(id)}`);
Logger(`Hidden file conflicted:${path}`);
const conflicts = doc._conflicts.sort((a, b) => Number(a.split("-")[0]) - Number(b.split("-")[0]));
const revA = doc._rev;
const revB = conflicts[0];
if (doc._id.endsWith(".json")) {
if (path.endsWith(".json")) {
const conflictedRev = conflicts[0];
const conflictedRevNo = Number(conflictedRev.split("-")[0]);
//Search
const revFrom = (await this.localDatabase.localDatabase.get<EntryDoc>(id, { revs_info: true }));
const revFrom = (await this.localDatabase.getRaw<EntryDoc>(id, { revs_info: true }));
const commonBase = revFrom._revs_info.filter(e => e.status == "available" && Number(e.rev.split("-")[0]) < conflictedRevNo).first()?.rev ?? "";
const result = await this.plugin.mergeObject(id, commonBase, doc._rev, conflictedRev);
const result = await this.plugin.mergeObject(path, commonBase, doc._rev, conflictedRev);
if (result) {
Logger(`Object merge:${id}`, LOG_LEVEL.INFO);
const filename = id2filenameInternalMetadata(id);
Logger(`Object merge:${path}`, LOG_LEVEL.INFO);
const filename = stripAllPrefixes(path);
const isExists = await this.app.vault.adapter.exists(filename);
if (!isExists) {
await this.ensureDirectoryEx(filename);
@@ -169,24 +177,24 @@ export class HiddenFileSync extends LiveSyncCommands {
const stat = await this.app.vault.adapter.stat(filename);
await this.storeInternalFileToDatabase({ path: filename, ...stat });
await this.extractInternalFileFromDatabase(filename);
await this.localDatabase.localDatabase.remove(id, revB);
return this.resolveConflictOnInternalFile(id);
await this.localDatabase.removeRaw(id, revB);
return this.resolveConflictOnInternalFile(path);
} else {
Logger(`Object merge is not applicable.`, LOG_LEVEL.VERBOSE);
}
const docAMerge = await this.localDatabase.getDBEntry(id, { rev: revA });
const docBMerge = await this.localDatabase.getDBEntry(id, { rev: revB });
const docAMerge = await this.localDatabase.getDBEntry(path, { rev: revA });
const docBMerge = await this.localDatabase.getDBEntry(path, { rev: revB });
if (docAMerge != false && docBMerge != false) {
if (await this.showJSONMergeDialogAndMerge(docAMerge, docBMerge)) {
await delay(200);
// Again for other conflicted revisions.
return this.resolveConflictOnInternalFile(id);
return this.resolveConflictOnInternalFile(path);
}
return false;
}
}
const revBDoc = await this.localDatabase.localDatabase.get(id, { rev: revB });
const revBDoc = await this.localDatabase.getRaw(id, { rev: revB });
// determine which revision should been deleted.
// simply check modified time
const mtimeA = ("mtime" in doc && doc.mtime) || 0;
@@ -195,12 +203,12 @@ export class HiddenFileSync extends LiveSyncCommands {
// console.log(`mtime:${mtimeA} - ${mtimeB}`);
const delRev = mtimeA < mtimeB ? revA : revB;
// delete older one.
await this.localDatabase.localDatabase.remove(id, delRev);
Logger(`Older one has been deleted:${id2filenameInternalMetadata(id)}`);
await this.localDatabase.removeRaw(id, delRev);
Logger(`Older one has been deleted:${path}`);
// check the file again
return this.resolveConflictOnInternalFile(id);
return this.resolveConflictOnInternalFile(path);
} catch (ex) {
Logger("Failed to resolve conflict (Hidden)");
Logger(`Failed to resolve conflict (Hidden): ${path}`);
Logger(ex, LOG_LEVEL.VERBOSE);
return false;
}
@@ -216,9 +224,8 @@ export class HiddenFileSync extends LiveSyncCommands {
.split(",").filter(e => e).map(e => new RegExp(e, "i"));
if (!files)
files = await this.scanInternalFiles();
const filesOnDB = ((await this.localDatabase.localDatabase.allDocs({ startkey: ICHeader, endkey: ICHeaderEnd, include_docs: true })).rows.map(e => e.doc) as InternalFileEntry[]).filter(e => !e.deleted);
const allFileNamesSrc = [...new Set([...files.map(e => normalizePath(e.path)), ...filesOnDB.map(e => normalizePath(id2path(id2filenameInternalMetadata(e._id))))])];
const filesOnDB = ((await this.localDatabase.allDocsRaw({ startkey: ICHeader, endkey: ICHeaderEnd, include_docs: true })).rows.map(e => e.doc) as InternalFileEntry[]).filter(e => !e.deleted);
const allFileNamesSrc = [...new Set([...files.map(e => normalizePath(e.path)), ...filesOnDB.map(e => stripAllPrefixes(this.getPath(e)))])];
const allFileNames = allFileNamesSrc.filter(filename => !targetFiles || (targetFiles && targetFiles.indexOf(filename) !== -1));
function compareMTime(a: number, b: number) {
const wa = ~~(a / 1000);
@@ -258,6 +265,7 @@ export class HiddenFileSync extends LiveSyncCommands {
let caches: { [key: string]: { storageMtime: number; docMtime: number; }; } = {};
caches = await this.kvDB.get<{ [key: string]: { storageMtime: number; docMtime: number; }; }>("diff-caches-internal") || {};
for (const filename of allFileNames) {
if (!filename) continue;
processed++;
if (processed % 100 == 0)
Logger(`Hidden file: ${processed}/${fileCount}`, logLevel, "sync_internal");
@@ -265,7 +273,7 @@ export class HiddenFileSync extends LiveSyncCommands {
continue;
const fileOnStorage = files.find(e => e.path == filename);
const fileOnDatabase = filesOnDB.find(e => e._id == filename2idInternalMetadata(id2path(filename)));
const fileOnDatabase = filesOnDB.find(e => stripAllPrefixes(this.getPath(e)) == filename);
const addProc = async (p: () => Promise<void>): Promise<void> => {
const releaser = await semaphore.acquire(1);
try {
@@ -280,14 +288,16 @@ export class HiddenFileSync extends LiveSyncCommands {
const cache = filename in caches ? caches[filename] : { storageMtime: 0, docMtime: 0 };
p.push(addProc(async () => {
if (fileOnStorage && fileOnDatabase) {
const xFileOnStorage = fileOnStorage;
const xfileOnDatabase = fileOnDatabase;
if (xFileOnStorage && xfileOnDatabase) {
// Both => Synchronize
if (fileOnDatabase.mtime == cache.docMtime && fileOnStorage.mtime == cache.storageMtime) {
if (xfileOnDatabase.mtime == cache.docMtime && xFileOnStorage.mtime == cache.storageMtime) {
return;
}
const nw = compareMTime(fileOnStorage.mtime, fileOnDatabase.mtime);
const nw = compareMTime(xFileOnStorage.mtime, xfileOnDatabase.mtime);
if (nw > 0) {
await this.storeInternalFileToDatabase(fileOnStorage);
await this.storeInternalFileToDatabase(xFileOnStorage);
}
if (nw < 0) {
// skip if not extraction performed.
@@ -295,13 +305,13 @@ export class HiddenFileSync extends LiveSyncCommands {
return;
}
// If process successfully updated or file contents are same, update cache.
cache.docMtime = fileOnDatabase.mtime;
cache.storageMtime = fileOnStorage.mtime;
cache.docMtime = xfileOnDatabase.mtime;
cache.storageMtime = xFileOnStorage.mtime;
caches[filename] = cache;
countUpdatedFolder(filename);
} else if (!fileOnStorage && fileOnDatabase) {
} else if (!xFileOnStorage && xfileOnDatabase) {
if (direction == "push") {
if (fileOnDatabase.deleted)
if (xfileOnDatabase.deleted)
return;
await this.deleteInternalFileOnDatabase(filename, false);
} else if (direction == "pull") {
@@ -309,14 +319,14 @@ export class HiddenFileSync extends LiveSyncCommands {
countUpdatedFolder(filename);
}
} else if (direction == "safe") {
if (fileOnDatabase.deleted)
if (xfileOnDatabase.deleted)
return;
if (await this.extractInternalFileFromDatabase(filename)) {
countUpdatedFolder(filename);
}
}
} else if (fileOnStorage && !fileOnDatabase) {
await this.storeInternalFileToDatabase(fileOnStorage);
} else if (xFileOnStorage && !xfileOnDatabase) {
await this.storeInternalFileToDatabase(xFileOnStorage);
} else {
throw new Error("Invalid state on hidden file sync");
// Something corrupted?
@@ -335,7 +345,7 @@ export class HiddenFileSync extends LiveSyncCommands {
let updatedCount = updatedFolders[configDir];
try {
//@ts-ignore
const manifests = Object.values(this.app.plugins.manifests) as PluginManifest[];
const manifests = Object.values(this.app.plugins.manifests) as any as PluginManifest[];
//@ts-ignore
const enabledPlugins = this.app.plugins.enabledPlugins as Set<string>;
const enabledPluginManifests = manifests.filter(e => enabledPlugins.has(e.id));
@@ -427,7 +437,8 @@ export class HiddenFileSync extends LiveSyncCommands {
}
async storeInternalFileToDatabase(file: InternalFileInfo, forceWrite = false) {
const id = filename2idInternalMetadata(path2id(file.path));
const id = await this.path2id(file.path, ICHeader);
const prefixedFileName = addPrefix(file.path, ICHeader);
const contentBin = await this.app.vault.adapter.readBinary(file.path);
let content: string[];
try {
@@ -438,13 +449,14 @@ export class HiddenFileSync extends LiveSyncCommands {
return false;
}
const mtime = file.mtime;
return await runWithLock("file-" + id, false, async () => {
return await runWithLock("file-" + prefixedFileName, false, async () => {
try {
const old = await this.localDatabase.getDBEntry(id, null, false, false);
const old = await this.localDatabase.getDBEntry(prefixedFileName, null, false, false);
let saveData: LoadedEntry;
if (old === false) {
saveData = {
_id: id,
path: prefixedFileName,
data: content,
mtime,
ctime: mtime,
@@ -471,7 +483,6 @@ export class HiddenFileSync extends LiveSyncCommands {
type: "newnote",
};
}
const ret = await this.localDatabase.putDBEntry(saveData, true);
Logger(`STORAGE --> DB:${file.path}: (hidden) Done`);
return ret;
@@ -483,16 +494,18 @@ export class HiddenFileSync extends LiveSyncCommands {
});
}
async deleteInternalFileOnDatabase(filename: string, forceWrite = false) {
const id = filename2idInternalMetadata(path2id(filename));
async deleteInternalFileOnDatabase(filename: FilePath, forceWrite = false) {
const id = await this.path2id(filename, ICHeader);
const prefixedFileName = addPrefix(filename, ICHeader);
const mtime = new Date().getTime();
await runWithLock("file-" + id, false, async () => {
await runWithLock("file-" + prefixedFileName, false, async () => {
try {
const old = await this.localDatabase.getDBEntry(id, null, false, false) as InternalFileEntry | false;
const old = await this.localDatabase.getDBEntry(prefixedFileName, null, false, false) as InternalFileEntry | false;
let saveData: InternalFileEntry;
if (old === false) {
saveData = {
_id: id,
path: prefixedFileName,
mtime,
ctime: mtime,
size: 0,
@@ -515,7 +528,7 @@ export class HiddenFileSync extends LiveSyncCommands {
type: "newnote",
};
}
await this.localDatabase.localDatabase.put(saveData);
await this.localDatabase.putRaw(saveData);
Logger(`STORAGE -x> DB:${filename}: (hidden) Done`);
} catch (ex) {
Logger(`STORAGE -x> DB:${filename}: (hidden) Failed`);
@@ -525,20 +538,20 @@ export class HiddenFileSync extends LiveSyncCommands {
});
}
async extractInternalFileFromDatabase(filename: string, force = false) {
async extractInternalFileFromDatabase(filename: FilePath, force = false) {
const isExists = await this.app.vault.adapter.exists(filename);
const id = filename2idInternalMetadata(path2id(filename));
const prefixedFileName = addPrefix(filename, ICHeader);
return await runWithLock("file-" + id, false, async () => {
return await runWithLock("file-" + prefixedFileName, false, async () => {
try {
// Check conflicted status
//TODO option
const fileOnDB = await this.localDatabase.getDBEntry(id, { conflicts: true }, false, false) as false | LoadedEntry;
const fileOnDB = await this.localDatabase.getDBEntry(prefixedFileName, { conflicts: true }, false, false);
if (fileOnDB === false)
throw new Error(`File not found on database.:${id}`);
// Prevent overrite for Prevent overwriting while some conflicted revision exists.
throw new Error(`File not found on database.:${filename}`);
// Prevent overwrite for Prevent overwriting while some conflicted revision exists.
if (fileOnDB?._conflicts?.length) {
Logger(`Hidden file ${id} has conflicted revisions, to keep in safe, writing to storage has been prevented`, LOG_LEVEL.INFO);
Logger(`Hidden file ${filename} has conflicted revisions, to keep in safe, writing to storage has been prevented`, LOG_LEVEL.INFO);
return;
}
const deleted = "deleted" in fileOnDB ? fileOnDB.deleted : false;
@@ -603,10 +616,11 @@ export class HiddenFileSync extends LiveSyncCommands {
return new Promise((res) => {
Logger("Opening data-merging dialog", LOG_LEVEL.VERBOSE);
const docs = [docA, docB];
const modal = new JsonResolveModal(this.app, id2path(docA._id), [docA, docB], async (keep, result) => {
const path = stripAllPrefixes(docA.path);
const modal = new JsonResolveModal(this.app, path, [docA, docB], async (keep, result) => {
// modal.close();
try {
const filename = id2filenameInternalMetadata(docA._id);
const filename = path;
let needFlush = false;
if (!result && !keep) {
Logger(`Skipped merging: ${filename}`);
@@ -615,7 +629,7 @@ export class HiddenFileSync extends LiveSyncCommands {
if (result || keep) {
for (const doc of docs) {
if (doc._rev != keep) {
if (await this.localDatabase.deleteDBEntry(doc._id, { rev: doc._rev })) {
if (await this.localDatabase.deleteDBEntry(this.getPath(doc), { rev: doc._rev })) {
Logger(`Conflicted revision has been deleted: ${filename}`);
needFlush = true;
}
@@ -663,7 +677,7 @@ export class HiddenFileSync extends LiveSyncCommands {
const filenames = (await this.getFiles(findRoot, [], null, ignoreFilter)).filter(e => e.startsWith(".")).filter(e => !e.startsWith(".trash"));
const files = filenames.map(async (e) => {
return {
path: e,
path: e as FilePath,
stat: await this.app.vault.adapter.stat(e)
};
});

View File

@@ -1,5 +1,5 @@
import { normalizePath, PluginManifest } from "./deps";
import { EntryDoc, LoadedEntry, LOG_LEVEL } from "./lib/src/types";
import { DocumentID, EntryDoc, FilePathWithPrefix, LoadedEntry, LOG_LEVEL } from "./lib/src/types";
import { PluginDataEntry, PERIODIC_PLUGIN_SWEEP, PluginList, DevicePluginList, PSCHeader, PSCHeaderEnd } from "./types";
import { getDocData, isDocContentSame } from "./lib/src/utils";
import { Logger } from "./lib/src/logger";
@@ -99,9 +99,8 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
async getPluginList(): Promise<{ plugins: PluginList; allPlugins: DevicePluginList; thisDevicePlugins: DevicePluginList; }> {
const db = this.localDatabase.localDatabase;
const docList = await db.allDocs<PluginDataEntry>({ startkey: PSCHeader, endkey: PSCHeaderEnd, include_docs: false });
const oldDocs: PluginDataEntry[] = ((await Promise.all(docList.rows.map(async (e) => await this.localDatabase.getDBEntry(e.id)))).filter((e) => e !== false) as LoadedEntry[]).map((e) => JSON.parse(getDocData(e.data)));
const docList = await this.localDatabase.allDocsRaw<PluginDataEntry>({ startkey: PSCHeader, endkey: PSCHeaderEnd, include_docs: false });
const oldDocs: PluginDataEntry[] = ((await Promise.all(docList.rows.map(async (e) => await this.localDatabase.getDBEntry(e.id as FilePathWithPrefix /* WARN!! THIS SHOULD BE WRAPPED */)))).filter((e) => e !== false) as LoadedEntry[]).map((e) => JSON.parse(getDocData(e.data)));
const plugins: { [key: string]: PluginDataEntry[]; } = {};
const allPlugins: { [key: string]: PluginDataEntry; } = {};
const thisDevicePlugins: { [key: string]: PluginDataEntry; } = {};
@@ -170,8 +169,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
return;
}
Logger("Scanning plugins", logLevel);
const db = this.localDatabase.localDatabase;
const oldDocs = await db.allDocs({
const oldDocs = await this.localDatabase.allDocsRaw<EntryDoc>({
startkey: `ps:${this.deviceAndVaultName}-${specificPlugin}`,
endkey: `ps:${this.deviceAndVaultName}-${specificPlugin}\u{10ffff}`,
include_docs: true,
@@ -179,7 +177,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
// Logger("OLD DOCS.", LOG_LEVEL.VERBOSE);
// sweep current plugin.
const procs = manifests.map(async (m) => {
const pluginDataEntryID = `ps:${this.deviceAndVaultName}-${m.id}`;
const pluginDataEntryID = `ps:${this.deviceAndVaultName}-${m.id}` as DocumentID;
try {
if (specificPlugin && m.id != specificPlugin) {
return;
@@ -213,6 +211,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
};
const d: LoadedEntry = {
_id: p._id,
path: p._id as string as FilePathWithPrefix,
data: JSON.stringify(p),
ctime: mtime,
mtime: mtime,
@@ -223,7 +222,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
};
Logger(`check diff:${m.name}(${m.id})`, LOG_LEVEL.VERBOSE);
await runWithLock("plugin-" + m.id, false, async () => {
const old = await this.localDatabase.getDBEntry(p._id, null, false, false);
const old = await this.localDatabase.getDBEntry(p._id as string as FilePathWithPrefix /* This also should be explained */, null, false, false);
if (old !== false) {
const oldData = { data: old.data, deleted: old._deleted };
const newData = { data: d.data, deleted: d._deleted };
@@ -259,7 +258,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
return e.doc;
});
Logger(`Deleting old plugin:(${delDocs.length})`, LOG_LEVEL.VERBOSE);
await db.bulkDocs(delDocs);
await this.localDatabase.bulkDocsRaw(delDocs);
Logger(`Scan plugin done.`, logLevel);
});
}

View File

@@ -1,12 +1,13 @@
import { TFile, Modal, App } from "./deps";
import { isValidPath, path2id } from "./utils";
import { getPathFromTFile, isValidPath } from "./utils";
import { base64ToArrayBuffer, base64ToString, escapeStringToHTML } from "./lib/src/strbin";
import ObsidianLiveSyncPlugin from "./main";
import { DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT, diff_match_patch } from "diff-match-patch";
import { LoadedEntry, LOG_LEVEL } from "./lib/src/types";
import { DocumentID, FilePathWithPrefix, LoadedEntry, LOG_LEVEL } from "./lib/src/types";
import { Logger } from "./lib/src/logger";
import { isErrorOfMissingDoc } from "./lib/src/utils_couchdb";
import { getDocData } from "./lib/src/utils";
import { stripPrefix } from "./lib/src/path";
export class DocumentHistoryModal extends Modal {
plugin: ObsidianLiveSyncPlugin;
@@ -15,26 +16,35 @@ export class DocumentHistoryModal extends Modal {
info: HTMLDivElement;
fileInfo: HTMLDivElement;
showDiff = false;
id: DocumentID;
file: string;
file: FilePathWithPrefix;
revs_info: PouchDB.Core.RevisionInfo[] = [];
currentDoc: LoadedEntry;
currentText = "";
currentDeleted = false;
constructor(app: App, plugin: ObsidianLiveSyncPlugin, file: TFile | string) {
constructor(app: App, plugin: ObsidianLiveSyncPlugin, file: TFile | FilePathWithPrefix, id: DocumentID) {
super(app);
this.plugin = plugin;
this.file = (file instanceof TFile) ? file.path : file;
this.file = (file instanceof TFile) ? getPathFromTFile(file) : file;
this.id = id;
if (!file) {
this.file = this.plugin.id2path(id, null);
}
if (localStorage.getItem("ols-history-highlightdiff") == "1") {
this.showDiff = true;
}
}
async loadFile() {
if (!this.id) {
this.id = await this.plugin.path2id(this.file);
}
const db = this.plugin.localDatabase;
try {
const w = await db.localDatabase.get(path2id(this.file), { revs_info: true });
const w = await db.localDatabase.get(this.id, { revs_info: true });
this.revs_info = w._revs_info.filter((e) => e?.status == "available");
this.range.max = `${this.revs_info.length - 1}`;
this.range.value = this.range.max;
@@ -47,6 +57,9 @@ export class DocumentHistoryModal extends Modal {
this.range.disabled = true;
this.showDiff
this.contentView.setText(`History of this file was not recorded.`);
} else {
this.contentView.setText(`Error occurred.`);
Logger(ex, LOG_LEVEL.VERBOSE);
}
}
}
@@ -55,7 +68,7 @@ export class DocumentHistoryModal extends Modal {
const db = this.plugin.localDatabase;
const index = this.revs_info.length - 1 - (this.range.value as any) / 1;
const rev = this.revs_info[index];
const w = await db.getDBEntry(path2id(this.file), { rev: rev.rev }, false, false, true);
const w = await db.getDBEntry(this.file, { rev: rev.rev }, false, false, true);
this.currentText = "";
this.currentDeleted = false;
if (w === false) {
@@ -73,7 +86,7 @@ export class DocumentHistoryModal extends Modal {
const prevRevIdx = this.revs_info.length - 1 - ((this.range.value as any) / 1 - 1);
if (prevRevIdx >= 0 && prevRevIdx < this.revs_info.length) {
const oldRev = this.revs_info[prevRevIdx].rev;
const w2 = await db.getDBEntry(path2id(this.file), { rev: oldRev }, false, false, true);
const w2 = await db.getDBEntry(this.file, { rev: oldRev }, false, false, true);
if (w2 != false) {
const dmp = new diff_match_patch();
const w2data = w2.datatype == "plain" ? getDocData(w2.data) : base64ToString(w2.data);
@@ -102,7 +115,6 @@ export class DocumentHistoryModal extends Modal {
result = escapeStringToHTML(w1data);
}
this.contentView.innerHTML = (this.currentDeleted ? "(At this revision, the file has been deleted)\n" : "") + result;
}
}
@@ -173,7 +185,8 @@ export class DocumentHistoryModal extends Modal {
buttons.createEl("button", { text: "Back to this revision" }, (e) => {
e.addClass("mod-cta");
e.addEventListener("click", async () => {
const pathToWrite = this.file.startsWith("i:") ? this.file.substring("i:".length) : this.file;
// const pathToWrite = this.plugin.id2path(this.id, true);
const pathToWrite = stripPrefix(this.file);
if (!isValidPath(pathToWrite)) {
Logger("Path is not valid to write content.", LOG_LEVEL.INFO);
}

View File

@@ -1,15 +1,15 @@
import { App, Modal } from "./deps";
import { LoadedEntry } from "./lib/src/types";
import { FilePath, LoadedEntry } from "./lib/src/types";
import JsonResolvePane from "./JsonResolvePane.svelte";
export class JsonResolveModal extends Modal {
// result: Array<[number, string]>;
filename: string;
filename: FilePath;
callback: (keepRev: string, mergedStr?: string) => Promise<void>;
docs: LoadedEntry[];
component: JsonResolvePane;
constructor(app: App, filename: string, docs: LoadedEntry[], callback: (keepRev: string, mergedStr?: string) => Promise<void>) {
constructor(app: App, filename: FilePath, docs: LoadedEntry[], callback: (keepRev: string, mergedStr?: string) => Promise<void>) {
super(app);
this.callback = callback;
this.filename = filename;
@@ -31,6 +31,7 @@ export class JsonResolveModal extends Modal {
target: contentEl,
props: {
docs: this.docs,
filename: this.filename,
callback: (keepRev, mergedStr) => this.UICallback(keepRev, mergedStr),
},
});

View File

@@ -1,14 +1,15 @@
<script lang="ts">
import { Diff, DIFF_DELETE, DIFF_INSERT, diff_match_patch } from "diff-match-patch";
import type { LoadedEntry } from "./lib/src/types";
import type { FilePath, LoadedEntry } from "./lib/src/types";
import { base64ToString } from "./lib/src/strbin";
import { getDocData } from "./lib/src/utils";
import { id2path, mergeObject } from "./utils";
import { mergeObject } from "./utils";
export let docs: LoadedEntry[] = [];
export let callback: (keepRev: string, mergedStr?: string) => Promise<void> = async (_, __) => {
Promise.resolve();
};
export let filename: FilePath = "" as FilePath;
let docA: LoadedEntry = undefined;
let docB: LoadedEntry = undefined;
@@ -93,7 +94,6 @@
diffs = getJsonDiff(objA, selectedObj);
console.dir(selectedObj);
}
$: filename = id2path(docA?._id ?? "");
</script>
<h1>Conflicted settings</h1>

View File

@@ -1,4 +1,4 @@
import { EntryDoc } from "./lib/src/types";
import { AnyEntry, DocumentID, EntryDoc, EntryHasPath, FilePath, FilePathWithPrefix } from "./lib/src/types";
import { PouchDB } from "./lib/src/pouchdb-browser.js";
import type ObsidianLiveSyncPlugin from "./main";
@@ -14,6 +14,16 @@ export abstract class LiveSyncCommands {
get localDatabase() {
return this.plugin.localDatabase;
}
id2path(id: DocumentID, entry?: EntryHasPath, stripPrefix?: boolean): FilePathWithPrefix {
return this.plugin.id2path(id, entry, stripPrefix);
}
async path2id(filename: FilePathWithPrefix | FilePath, prefix?: string): Promise<DocumentID> {
return await this.plugin.path2id(filename, prefix);
}
getPath(entry: AnyEntry): FilePathWithPrefix {
return this.plugin.getPath(entry);
}
constructor(plugin: ObsidianLiveSyncPlugin) {
this.plugin = plugin;
}

View File

@@ -1,178 +0,0 @@
import { requestUrl, RequestUrlParam, RequestUrlResponse } from "./deps";
import { KeyValueDatabase, OpenKeyValueDatabase } from "./KeyValueDB.js";
import { LocalPouchDBBase } from "./lib/src/LocalPouchDBBase.js";
import { Logger } from "./lib/src/logger.js";
import { PouchDB } from "./lib/src/pouchdb-browser.js";
import { EntryDoc, LOG_LEVEL, ObsidianLiveSyncSettings } from "./lib/src/types.js";
import { enableEncryption } from "./lib/src/utils_couchdb.js";
import { isCloudantURI, isValidRemoteCouchDBURI } from "./lib/src/utils_couchdb.js";
import { id2path, path2id } from "./utils.js";
export class LocalPouchDB extends LocalPouchDBBase {
kvDB: KeyValueDatabase;
settings: ObsidianLiveSyncSettings;
id2path(filename: string): string {
return id2path(filename);
}
path2id(filename: string): string {
return path2id(filename);
}
CreatePouchDBInstance<T>(name?: string, options?: PouchDB.Configuration.DatabaseConfiguration): PouchDB.Database<T> {
if (this.settings.useIndexedDBAdapter) {
options.adapter = "indexeddb";
return new PouchDB(name + "-indexeddb", options);
}
return new PouchDB(name, options);
}
beforeOnUnload(): void {
this.kvDB.close();
}
onClose(): void {
this.kvDB.close();
}
async onInitializeDatabase(): Promise<void> {
this.kvDB = await OpenKeyValueDatabase(this.dbname + "-livesync-kv");
}
async onResetDatabase(): Promise<void> {
await this.kvDB.destroy();
}
last_successful_post = false;
getLastPostFailedBySize() {
return !this.last_successful_post;
}
async fetchByAPI(request: RequestUrlParam): Promise<RequestUrlResponse> {
const ret = await requestUrl(request);
if (ret.status - (ret.status % 100) !== 200) {
const er: Error & { status?: number } = new Error(`Request Error:${ret.status}`);
if (ret.json) {
er.message = ret.json.reason;
er.name = `${ret.json.error ?? ""}:${ret.json.message ?? ""}`;
}
er.status = ret.status;
throw er;
}
return ret;
}
async connectRemoteCouchDB(uri: string, auth: { username: string; password: string }, disableRequestURI: boolean, passphrase: string | false, useDynamicIterationCount: boolean): Promise<string | { db: PouchDB.Database<EntryDoc>; info: PouchDB.Core.DatabaseInfo }> {
if (!isValidRemoteCouchDBURI(uri)) return "Remote URI is not valid";
if (uri.toLowerCase() != uri) return "Remote URI and database name could not contain capital letters.";
if (uri.indexOf(" ") !== -1) return "Remote URI and database name could not contain spaces.";
let authHeader = "";
if (auth.username && auth.password) {
const utf8str = String.fromCharCode.apply(null, new TextEncoder().encode(`${auth.username}:${auth.password}`));
const encoded = window.btoa(utf8str);
authHeader = "Basic " + encoded;
} else {
authHeader = "";
}
// const _this = this;
const conf: PouchDB.HttpAdapter.HttpAdapterConfiguration = {
adapter: "http",
auth,
fetch: async (url: string | Request, opts: RequestInit) => {
let size = "";
const localURL = url.toString().substring(uri.length);
const method = opts.method ?? "GET";
if (opts.body) {
const opts_length = opts.body.toString().length;
if (opts_length > 1000 * 1000 * 10) {
// over 10MB
if (isCloudantURI(uri)) {
this.last_successful_post = false;
Logger("This request should fail on IBM Cloudant.", LOG_LEVEL.VERBOSE);
throw new Error("This request should fail on IBM Cloudant.");
}
}
size = ` (${opts_length})`;
}
if (!disableRequestURI && typeof url == "string" && typeof (opts.body ?? "") == "string") {
const body = opts.body as string;
const transformedHeaders = { ...(opts.headers as Record<string, string>) };
if (authHeader != "") transformedHeaders["authorization"] = authHeader;
delete transformedHeaders["host"];
delete transformedHeaders["Host"];
delete transformedHeaders["content-length"];
delete transformedHeaders["Content-Length"];
const requestParam: RequestUrlParam = {
url: url as string,
method: opts.method,
body: body,
headers: transformedHeaders,
contentType: "application/json",
// contentType: opts.headers,
};
try {
const r = await this.fetchByAPI(requestParam);
if (method == "POST" || method == "PUT") {
this.last_successful_post = r.status - (r.status % 100) == 200;
} else {
this.last_successful_post = true;
}
Logger(`HTTP:${method}${size} to:${localURL} -> ${r.status}`, LOG_LEVEL.DEBUG);
return new Response(r.arrayBuffer, {
headers: r.headers,
status: r.status,
statusText: `${r.status}`,
});
} catch (ex) {
Logger(`HTTP:${method}${size} to:${localURL} -> failed`, LOG_LEVEL.VERBOSE);
// limit only in bulk_docs.
if (url.toString().indexOf("_bulk_docs") !== -1) {
this.last_successful_post = false;
}
Logger(ex);
throw ex;
}
}
// -old implementation
try {
const response: Response = await fetch(url, opts);
if (method == "POST" || method == "PUT") {
this.last_successful_post = response.ok;
} else {
this.last_successful_post = true;
}
Logger(`HTTP:${method}${size} to:${localURL} -> ${response.status}`, LOG_LEVEL.DEBUG);
return response;
} catch (ex) {
Logger(`HTTP:${method}${size} to:${localURL} -> failed`, LOG_LEVEL.VERBOSE);
// limit only in bulk_docs.
if (url.toString().indexOf("_bulk_docs") !== -1) {
this.last_successful_post = false;
}
Logger(ex);
throw ex;
}
// return await fetch(url, opts);
},
};
const db: PouchDB.Database<EntryDoc> = new PouchDB<EntryDoc>(uri, conf);
if (passphrase !== "false" && typeof passphrase === "string") {
enableEncryption(db, passphrase, useDynamicIterationCount);
}
try {
const info = await db.info();
return { db: db, info: info };
} catch (ex) {
let msg = `${ex.name}:${ex.message}`;
if (ex.name == "TypeError" && ex.message == "Failed to fetch") {
msg += "\n**Note** This error caused by many reasons. The only sure thing is you didn't touch the server.\nTo check details, open inspector.";
}
Logger(ex, LOG_LEVEL.VERBOSE);
return msg;
}
}
}

View File

@@ -1,6 +1,5 @@
import { App, PluginSettingTab, Setting, sanitizeHTMLToDom, RequestUrlParam, requestUrl, TextAreaComponent, MarkdownRenderer, stringifyYaml } from "./deps";
import { DEFAULT_SETTINGS, LOG_LEVEL, ObsidianLiveSyncSettings, ConfigPassphraseStore, RemoteDBSettings } from "./lib/src/types";
import { path2id, id2path } from "./utils";
import { delay } from "./lib/src/utils";
import { Semaphore } from "./lib/src/semaphore";
import { versionNumberString2Number } from "./lib/src/strbin";
@@ -75,7 +74,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
<label class='sls-setting-label c-40'><input type='radio' name='disp' value='40' class='sls-setting-tab' ><div class='sls-setting-menu-btn'>🔧</div></label>
<label class='sls-setting-label c-50 wizardHidden'><input type='radio' name='disp' value='50' class='sls-setting-tab' ><div class='sls-setting-menu-btn'>🧰</div></label>
<label class='sls-setting-label c-60 wizardHidden'><input type='radio' name='disp' value='60' class='sls-setting-tab' ><div class='sls-setting-menu-btn'>🔌</div></label>
<label class='sls-setting-label c-70 wizardHidden'><input type='radio' name='disp' value='70' class='sls-setting-tab' ><div class='sls-setting-menu-btn'>🚑</div></label>
<!-- <label class='sls-setting-label c-70 wizardHidden'><input type='radio' name='disp' value='70' class='sls-setting-tab' ><div class='sls-setting-menu-btn'>🚑</div></label>-->
`;
const menuTabs = w.querySelectorAll(".sls-setting-label");
const changeDisplay = (screen: string) => {
@@ -88,11 +87,11 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
}
w.querySelectorAll(`.sls-setting-label`).forEach((element) => {
element.removeClass("selected");
(element.querySelector("input[type=radio]") as HTMLInputElement).checked = false;
(element.querySelector<HTMLInputElement>("input[type=radio]")).checked = false;
});
w.querySelectorAll(`.sls-setting-label.c-${screen}`).forEach((element) => {
element.addClass("selected");
(element.querySelector("input[type=radio]") as HTMLInputElement).checked = true;
(element.querySelector<HTMLInputElement>("input[type=radio]")).checked = true;
});
this.selectedScreen = screen;
};
@@ -307,11 +306,13 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
this.plugin.settings.encrypt = value;
passphraseSetting.setDisabled(!value);
dynamicIteration.setDisabled(!value);
usePathObfuscationEl.setDisabled(!value);
await this.plugin.saveSettings();
} else {
encrypt = value;
passphraseSetting.setDisabled(!value);
dynamicIteration.setDisabled(!value);
usePathObfuscationEl.setDisabled(!value);
await this.plugin.saveSettings();
markDirtyControl();
}
@@ -322,7 +323,8 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
const markDirtyControl = () => {
passphraseSetting.controlEl.toggleClass("sls-item-dirty", passphrase != this.plugin.settings.passphrase);
e2e.controlEl.toggleClass("sls-item-dirty", encrypt != this.plugin.settings.encrypt);
dynamicIteration.controlEl.toggleClass("sls-item-dirty", useDynamicIterationCount != this.plugin.settings.useDynamicIterationCount)
dynamicIteration.controlEl.toggleClass("sls-item-dirty", useDynamicIterationCount != this.plugin.settings.useDynamicIterationCount);
usePathObfuscationEl.controlEl.toggleClass("sls-item-dirty", usePathObfuscation != this.plugin.settings.usePathObfuscation);
}
const passphraseSetting = new Setting(containerRemoteDatabaseEl)
@@ -345,6 +347,23 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
});
passphraseSetting.setDisabled(!encrypt);
let usePathObfuscation = this.plugin.settings.usePathObfuscation;
const usePathObfuscationEl = new Setting(containerRemoteDatabaseEl)
.setName("Path Obfuscation")
.setDesc("(Experimental) Obfuscate paths of files. If we configured, we should rebuild the database.")
.addToggle((toggle) =>
toggle.setValue(usePathObfuscation).onChange(async (value) => {
if (inWizard) {
this.plugin.settings.usePathObfuscation = value;
await this.plugin.saveSettings();
} else {
usePathObfuscation = value;
await this.plugin.saveSettings();
markDirtyControl();
}
})
);
const dynamicIteration = new Setting(containerRemoteDatabaseEl)
.setName("Use dynamic iteration count (experimental)")
.setDesc("Balancing the encryption/decryption load against the length of the passphrase if toggled. (v0.17.5 or higher required)")
@@ -410,6 +429,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
this.plugin.settings.encrypt = encrypt;
this.plugin.settings.passphrase = passphrase;
this.plugin.settings.useDynamicIterationCount = useDynamicIterationCount;
this.plugin.settings.usePathObfuscation = usePathObfuscation;
await this.plugin.saveSettings();
markDirtyControl();
@@ -430,25 +450,45 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
.setClass("wizardHidden")
.addButton((button) =>
button
.setButtonText("Apply")
.setWarning()
.setDisabled(false)
.onClick(async () => {
await applyEncryption(true);
})
)
.addButton((button) =>
button
.setButtonText("Apply w/o rebuilding")
.setButtonText("Just apply")
.setWarning()
.setDisabled(false)
.onClick(async () => {
await applyEncryption(false);
})
)
.addButton((button) =>
button
.setButtonText("Apply and Fetch")
.setWarning()
.setDisabled(false)
.onClick(async () => {
await rebuildDB("localOnly");
})
)
.addButton((button) =>
button
.setButtonText("Apply and Rebuild")
.setWarning()
.setDisabled(false)
.onClick(async () => {
await rebuildDB("rebuildBothByThisDevice");
})
);
const rebuildDB = async (method: "localOnly" | "remoteOnly" | "rebuildBothByThisDevice") => {
if (encrypt && passphrase == "") {
Logger("If you enable encryption, you have to set the passphrase", LOG_LEVEL.NOTICE);
return;
}
if (encrypt && !(await testCrypt())) {
Logger("WARNING! Your device would not support encryption.", LOG_LEVEL.NOTICE);
return;
}
if (!encrypt) {
passphrase = "";
}
this.plugin.settings.liveSync = false;
this.plugin.settings.periodicReplication = false;
this.plugin.settings.syncOnSave = false;
@@ -457,10 +497,16 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
this.plugin.settings.syncAfterMerge = false;
this.plugin.settings.syncInternalFiles = false;
this.plugin.settings.usePluginSync = false;
this.plugin.settings.encrypt = encrypt;
this.plugin.settings.passphrase = passphrase;
this.plugin.settings.useDynamicIterationCount = useDynamicIterationCount;
this.plugin.settings.usePathObfuscation = usePathObfuscation;
Logger("Hidden files and plugin synchronization have been temporarily disabled. Please enable them after the fetching, if you need them.", LOG_LEVEL.NOTICE)
await this.plugin.saveSettings();
markDirtyControl();
applyDisplayEnabled();
// @ts-ignore
this.plugin.app.setting.close()
await delay(2000);
if (method == "localOnly") {
await this.plugin.resetLocalDatabase();
@@ -1729,72 +1775,75 @@ ${stringifyYaml(pluginConfig)}`;
addScreenElement("60", containerPluginSettings);
const containerCorruptedDataEl = containerEl.createDiv();
// const containerCorruptedDataEl = containerEl.createDiv();
containerCorruptedDataEl.createEl("h3", { text: "Corrupted or missing data" });
containerCorruptedDataEl.createEl("h4", { text: "Corrupted" });
if (Object.keys(this.plugin.localDatabase.corruptedEntries).length > 0) {
const cx = containerCorruptedDataEl.createEl("div", { text: "If you have a copy of these files on any device, simply edit them once and sync. If not, there's nothing we can do except deleting them. sorry.." });
for (const k in this.plugin.localDatabase.corruptedEntries) {
const xx = cx.createEl("div", { text: `${k}` });
// containerCorruptedDataEl.createEl("h3", { text: "Corrupted or missing data" });
// containerCorruptedDataEl.createEl("h4", { text: "Corrupted" });
// if (Object.keys(this.plugin.localDatabase.corruptedEntries).length > 0) {
// const cx = containerCorruptedDataEl.createEl("div", { text: "If you have a copy of these files on any device, simply edit them once and sync. If not, there's nothing we can do except deleting them. sorry.." });
// for (const k in this.plugin.localDatabase.corruptedEntries) {
// const xx = cx.createEl("div", { text: `${k}` });
const ba = xx.createEl("button", { text: `Delete this` }, (e) => {
e.addEventListener("click", async () => {
await this.plugin.localDatabase.deleteDBEntry(k);
xx.remove();
});
});
ba.addClass("mod-warning");
xx.createEl("button", { text: `Restore from file` }, (e) => {
e.addEventListener("click", async () => {
const f = await this.app.vault.getFiles().filter((e) => path2id(e.path) == k);
if (f.length == 0) {
Logger("Not found in vault", LOG_LEVEL.NOTICE);
return;
}
await this.plugin.updateIntoDB(f[0]);
xx.remove();
});
});
xx.addClass("mod-warning");
}
} else {
containerCorruptedDataEl.createEl("div", { text: "There is no corrupted data." });
}
containerCorruptedDataEl.createEl("h4", { text: "Missing or waiting" });
if (Object.keys(this.plugin.queuedFiles).length > 0) {
const cx = containerCorruptedDataEl.createEl("div", {
text: "These files have missing or waiting chunks. Perhaps these chunks will arrive in a while after replication. But if they don't, you have to restore it's database entry from a existing local file by hitting the button below.",
});
const files = [...new Set([...this.plugin.queuedFiles.map((e) => e.entry._id)])];
for (const k of files) {
const xx = cx.createEl("div", { text: `${id2path(k)}` });
// const ba = xx.createEl("button", { text: `Delete this` }, (e) => {
// e.addEventListener("click", async () => {
// await this.plugin.localDatabase.deleteDBEntry(k as string as FilePathWithPrefix /* should be explained */);
// xx.remove();
// });
// });
// ba.addClass("mod-warning");
// //TODO: FIX LATER
// // xx.createEl("button", { text: `Restore from file` }, (e) => {
// // e.addEventListener("click", async () => {
// // const f = await this.app.vault.getFiles().filter((e) => this.plugin.path2id(e.path) == k);
// // if (f.length == 0) {
// // Logger("Not found in vault", LOG_LEVEL.NOTICE);
// // return;
// // }
// // await this.plugin.updateIntoDB(f[0]);
// // xx.remove();
// // });
// // });
// // xx.addClass("mod-warning");
// }
// } else {
// containerCorruptedDataEl.createEl("div", { text: "There is no corrupted data." });
// }
// containerCorruptedDataEl.createEl("h4", { text: "Missing or waiting" });
// if (Object.keys(this.plugin.queuedFiles).length > 0) {
// const cx = containerCorruptedDataEl.createEl("div", {
// text: "These files have missing or waiting chunks. Perhaps these chunks will arrive in a while after replication. But if they don't, you have to restore it's database entry from a existing local file by hitting the button below.",
// });
// const files = [...new Set([...this.plugin.queuedFiles.map((e) => e.entry._id)])];
// for (const k of files) {
// const xx = cx.createEl("div", { text: `${this.plugin.id2path(k)}` });
// const ba = xx.createEl("button", { text: `Delete this` }, (e) => {
// e.addEventListener("click", async () => {
// await this.plugin.localDatabase.deleteDBEntry(k);
// xx.remove();
// });
// });
// ba.addClass("mod-warning");
// xx.createEl("button", { text: `Restore from file` }, (e) => {
// e.addEventListener("click", async () => {
// const f = await this.app.vault.getFiles().filter((e) => this.plugin.path2id(e.path) == k);
// if (f.length == 0) {
// Logger("Not found in vault", LOG_LEVEL.NOTICE);
// return;
// }
// await this.plugin.updateIntoDB(f[0]);
// xx.remove();
// });
// });
// xx.addClass("mod-warning");
// }
// } else {
// containerCorruptedDataEl.createEl("div", { text: "There is no missing or waiting chunk." });
// }
// applyDisplayEnabled();
// addScreenElement("70", containerCorruptedDataEl);
const ba = xx.createEl("button", { text: `Delete this` }, (e) => {
e.addEventListener("click", async () => {
await this.plugin.localDatabase.deleteDBEntry(k);
xx.remove();
});
});
ba.addClass("mod-warning");
xx.createEl("button", { text: `Restore from file` }, (e) => {
e.addEventListener("click", async () => {
const f = await this.app.vault.getFiles().filter((e) => path2id(e.path) == k);
if (f.length == 0) {
Logger("Not found in vault", LOG_LEVEL.NOTICE);
return;
}
await this.plugin.updateIntoDB(f[0]);
xx.remove();
});
});
xx.addClass("mod-warning");
}
} else {
containerCorruptedDataEl.createEl("div", { text: "There is no missing or waiting chunk." });
}
applyDisplayEnabled();
addScreenElement("70", containerCorruptedDataEl);
if (this.selectedScreen == "") {
if (lastVersion != this.plugin.settings.lastReadUpdates) {
if (JSON.stringify(this.plugin.settings) != JSON.stringify(DEFAULT_SETTINGS)) {

View File

@@ -1,4 +1,11 @@
import { FilePath } from "./lib/src/types";
export {
addIcon, App, DataWriteOptions, debounce, Editor, FuzzySuggestModal, MarkdownRenderer, MarkdownView, Modal, normalizePath, Notice, Platform, Plugin, PluginManifest,
PluginSettingTab, Plugin_2, requestUrl, RequestUrlParam, RequestUrlResponse, sanitizeHTMLToDom, Setting, stringifyYaml, TAbstractFile, TextAreaComponent, TFile, TFolder
addIcon, App, DataWriteOptions, debounce, Editor, FuzzySuggestModal, MarkdownRenderer, MarkdownView, Modal, Notice, Platform, Plugin, PluginManifest,
PluginSettingTab, Plugin_2, requestUrl, RequestUrlParam, RequestUrlResponse, sanitizeHTMLToDom, Setting, stringifyYaml, TAbstractFile, TextAreaComponent, TFile, TFolder,
} from "obsidian";
import {
normalizePath as normalizePath_
} from "obsidian";
const normalizePath = normalizePath_ as <T extends string | FilePath>(from: T) => T;
export { normalizePath }

Submodule src/lib updated: d103106931...f644c8dfc3

View File

@@ -2,7 +2,7 @@ const isDebug = false;
import { Diff, DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT, diff_match_patch } from "diff-match-patch";
import { debounce, Notice, Plugin, TFile, addIcon, TFolder, normalizePath, TAbstractFile, Editor, MarkdownView, RequestUrlParam, RequestUrlResponse, requestUrl } from "./deps";
import { EntryDoc, LoadedEntry, ObsidianLiveSyncSettings, diff_check_result, diff_result_leaf, EntryBody, LOG_LEVEL, VER, DEFAULT_SETTINGS, diff_result, FLAGMD_REDFLAG, SYNCINFO_ID, SALT_OF_PASSPHRASE, ConfigPassphraseStore, CouchDBConnection, FLAGMD_REDFLAG2, FLAGMD_REDFLAG3, PREFIXMD_LOGFILE, DatabaseConnectingStatus } from "./lib/src/types";
import { EntryDoc, LoadedEntry, ObsidianLiveSyncSettings, diff_check_result, diff_result_leaf, EntryBody, LOG_LEVEL, VER, DEFAULT_SETTINGS, diff_result, FLAGMD_REDFLAG, SYNCINFO_ID, SALT_OF_PASSPHRASE, ConfigPassphraseStore, CouchDBConnection, FLAGMD_REDFLAG2, FLAGMD_REDFLAG3, PREFIXMD_LOGFILE, DatabaseConnectingStatus, EntryHasPath, DocumentID, FilePathWithPrefix, FilePath, AnyEntry } from "./lib/src/types";
import { InternalFileInfo, queueItem, CacheData, FileEventItem, FileWatchEventQueueMax } from "./types";
import { delay, getDocData, isDocContentSame } from "./lib/src/utils";
import { Logger } from "./lib/src/logger";
@@ -11,14 +11,14 @@ import { LogDisplayModal } from "./LogDisplayModal";
import { ConflictResolveModal } from "./ConflictResolveModal";
import { ObsidianLiveSyncSettingTab } from "./ObsidianLiveSyncSettingTab";
import { DocumentHistoryModal } from "./DocumentHistoryModal";
import { applyPatch, cancelAllPeriodicTask, cancelAllTasks, cancelTask, generatePatchObj, id2path, isObjectMargeApplicable, isSensibleMargeApplicable, flattenObject, path2id, scheduleTask, tryParseJSON, createFile, modifyFile, isValidPath, getAbstractFileByPath, touch, recentlyTouched, isInternalMetadata, isPluginMetadata, id2filenameInternalMetadata, isChunk, askSelectString, askYesNo, askString, PeriodicProcessor, clearTouched } from "./utils";
import { applyPatch, cancelAllPeriodicTask, cancelAllTasks, cancelTask, generatePatchObj, id2path, isObjectMargeApplicable, isSensibleMargeApplicable, flattenObject, path2id, scheduleTask, tryParseJSON, createFile, modifyFile, isValidPath, getAbstractFileByPath, touch, recentlyTouched, isIdOfInternalMetadata, isPluginMetadata, stripInternalMetadataPrefix, isChunk, askSelectString, askYesNo, askString, PeriodicProcessor, clearTouched, getPath, getPathWithoutPrefix, getPathFromTFile } from "./utils";
import { encrypt, tryDecrypt } from "./lib/src/e2ee_v2";
import { enableEncryption, isCloudantURI, isErrorOfMissingDoc, isValidRemoteCouchDBURI } from "./lib/src/utils_couchdb";
import { getGlobalStore, ObservableStore, observeStores } from "./lib/src/store";
import { lockStore, logMessageStore, logStore } from "./lib/src/stores";
import { setNoticeClass } from "./lib/src/wrapper";
import { base64ToString, versionNumberString2Number, base64ToArrayBuffer, arrayBufferToBase64 } from "./lib/src/strbin";
import { isPlainText, shouldBeIgnored } from "./lib/src/path";
import { addPrefix, isPlainText, shouldBeIgnored, stripAllPrefixes } from "./lib/src/path";
import { runWithLock } from "./lib/src/lock";
import { Semaphore } from "./lib/src/semaphore";
import { StorageEventManager, StorageEventManagerObsidian } from "./StorageEventManager";
@@ -32,8 +32,6 @@ import { SetupLiveSync } from "./CmdSetupLiveSync";
setNoticeClass(Notice);
export default class ObsidianLiveSyncPlugin extends Plugin
implements LiveSyncLocalDBEnv, LiveSyncReplicatorEnv {
@@ -131,7 +129,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
delete transformedHeaders["content-length"];
delete transformedHeaders["Content-Length"];
const requestParam: RequestUrlParam = {
url: url as string,
url,
method: opts.method,
body: body,
headers: transformedHeaders,
@@ -205,12 +203,25 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
}
id2path(filename: string): string {
return id2path(filename);
id2path(id: DocumentID, entry: EntryHasPath, stripPrefix?: boolean): FilePathWithPrefix {
const tempId = id2path(id, entry);
if (stripPrefix && isIdOfInternalMetadata(tempId)) {
const out = stripInternalMetadataPrefix(tempId);
return out;
}
return tempId;
}
path2id(filename: string): string {
return path2id(filename);
getPath(entry: AnyEntry) {
return getPath(entry);
}
getPathWithoutPrefix(entry: AnyEntry) {
return getPathWithoutPrefix(entry);
}
async path2id(filename: FilePathWithPrefix | FilePath, prefix?: string): Promise<DocumentID> {
const destPath = addPrefix(filename, prefix);
return await path2id(destPath, this.settings.usePathObfuscation ? this.settings.passphrase : "");
}
createPouchDBInstance<T>(name?: string, options?: PouchDB.Configuration.DatabaseConfiguration): PouchDB.Database<T> {
if (this.settings.useIndexedDBAdapter) {
options.adapter = "indexeddb";
@@ -291,44 +302,46 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
}
showHistory(file: TFile | string) {
new DocumentHistoryModal(this.app, this, file).open();
showHistory(file: TFile | FilePathWithPrefix, id: DocumentID) {
new DocumentHistoryModal(this.app, this, file, id).open();
}
async fileHistory() {
const notes: { path: string, mtime: number }[] = [];
const notes: { id: DocumentID, path: FilePathWithPrefix, dispPath: string, mtime: number }[] = [];
for await (const doc of this.localDatabase.findAllDocs()) {
notes.push({ path: id2path(doc._id), mtime: doc.mtime });
notes.push({ id: doc._id, path: this.getPath(doc), dispPath: this.getPathWithoutPrefix(doc), mtime: doc.mtime });
}
notes.sort((a, b) => b.mtime - a.mtime);
const notesList = notes.map(e => e.path);
const notesList = notes.map(e => e.dispPath);
const target = await askSelectString(this.app, "File to view History", notesList);
if (target) {
this.showHistory(target);
const targetId = notes.find(e => e.dispPath == target);
this.showHistory(targetId.path, undefined);
}
}
async pickFileForResolve() {
const notes: { path: string, mtime: number }[] = [];
const notes: { id: DocumentID, path: FilePathWithPrefix, dispPath: string, mtime: number }[] = [];
for await (const doc of this.localDatabase.findAllDocs({ conflicts: true })) {
if (!("_conflicts" in doc)) continue;
notes.push({ path: id2path(doc._id), mtime: doc.mtime });
notes.push({ id: doc._id, path: this.getPath(doc), dispPath: this.getPathWithoutPrefix(doc), mtime: doc.mtime });
}
notes.sort((a, b) => b.mtime - a.mtime);
const notesList = notes.map(e => e.path);
const notesList = notes.map(e => e.dispPath);
if (notesList.length == 0) {
Logger("There are no conflicted documents", LOG_LEVEL.NOTICE);
return false;
}
const target = await askSelectString(this.app, "File to view History", notesList);
if (target) {
await this.resolveConflicted(target);
const targetItem = notes.find(e => e.dispPath == target);
await this.resolveConflicted(targetItem.path);
return true;
}
return false;
}
async resolveConflicted(target: string) {
if (isInternalMetadata(target)) {
async resolveConflicted(target: FilePathWithPrefix) {
if (isIdOfInternalMetadata(target)) {
await this.addOnHiddenFileSync.resolveConflictOnInternalFile(target);
} else if (isPluginMetadata(target)) {
await this.resolveConflictByNewerEntry(target);
@@ -346,7 +359,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
for await (const doc of this.localDatabase.findAllDocs({ conflicts: true })) {
if (doc.type == "newnote" || doc.type == "plain") {
if (doc.deleted && (doc.mtime - limit) < 0) {
notes.push({ path: id2path(doc._id), mtime: doc.mtime, ttl: (doc.mtime - limit) / 1000 / 86400, doc: doc });
notes.push({ path: this.getPath(doc), mtime: doc.mtime, ttl: (doc.mtime - limit) / 1000 / 86400, doc: doc });
}
}
}
@@ -360,8 +373,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
Logger(`Deletion history expired: ${v.path}`);
const delDoc = v.doc;
delDoc._deleted = true;
// console.dir(delDoc);
await this.localDatabase.localDatabase.put(delDoc);
await this.localDatabase.putRaw(delDoc);
}
Logger(`Checking expired file history done`);
}
@@ -453,7 +465,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
Logger(`Additional safety scan..`, LOG_LEVEL.VERBOSE);
for await (const doc of this.localDatabase.findAllDocs({ conflicts: true })) {
if (!("_conflicts" in doc)) continue;
notes.push({ path: id2path(doc._id), mtime: doc.mtime });
notes.push({ path: this.getPath(doc), mtime: doc.mtime });
}
if (notes.length > 0) {
Logger(`Some files have been left conflicted! Please resolve them by "Pick a file to resolve conflict". The list is written in the log.`, LOG_LEVEL.NOTICE);
@@ -555,14 +567,14 @@ export default class ObsidianLiveSyncPlugin extends Plugin
id: "livesync-dump",
name: "Dump information of this doc ",
editorCallback: (editor: Editor, view: MarkdownView) => {
this.localDatabase.getDBEntry(view.file.path, {}, true, false);
this.localDatabase.getDBEntry(getPathFromTFile(view.file), {}, true, false);
},
});
this.addCommand({
id: "livesync-checkdoc-conflicted",
name: "Resolve if conflicted.",
editorCallback: async (editor: Editor, view: MarkdownView) => {
await this.showIfConflicted(view.file.path);
await this.showIfConflicted(getPathFromTFile(view.file));
},
});
@@ -600,7 +612,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
id: "livesync-history",
name: "Show history",
editorCallback: (editor: Editor, view: MarkdownView) => {
this.showHistory(view.file);
this.showHistory(view.file, null);
},
});
this.addCommand({
@@ -766,6 +778,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
this.settings = settings;
if ("workingEncrypt" in this.settings) delete this.settings.workingEncrypt;
if ("workingPassphrase" in this.settings) delete this.settings.workingPassphrase;
@@ -953,7 +966,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
if (this.settings.syncOnFileOpen && !this.suspended) {
await this.replicate();
}
await this.showIfConflicted(file.path);
await this.showIfConflicted(getPathFromTFile(file));
}
async applyBatchChange() {
@@ -1012,6 +1025,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const messageContent = typeof message == "string" ? message : message instanceof Error ? `${message.name}:${message.message}` : JSON.stringify(message, null, 2);
if (message instanceof Error) {
// debugger;
console.dir(message.stack);
}
const newMessage = timestamp + "->" + messageContent;
@@ -1091,21 +1105,21 @@ export default class ObsidianLiveSyncPlugin extends Plugin
async doc2storage(docEntry: EntryBody, file?: TFile, force?: boolean) {
const mode = file == undefined ? "create" : "modify";
const pathSrc = id2path(docEntry._id);
if (shouldBeIgnored(pathSrc)) {
const path = this.getPath(docEntry);
if (shouldBeIgnored(path)) {
return;
}
if (!this.isTargetFile(pathSrc)) return;
if (!this.isTargetFile(path)) return;
if (docEntry._deleted || docEntry.deleted) {
// This occurs not only when files are deleted, but also when conflicts are resolved.
// We have to check no other revisions are left.
const lastDocs = await this.localDatabase.getDBEntry(pathSrc);
const lastDocs = await this.localDatabase.getDBEntry(path);
if (lastDocs === false) {
await this.deleteVaultItem(file);
} else {
// it perhaps delete some revisions.
// may be we have to reload this
await this.pullFile(pathSrc, null, true);
await this.pullFile(path, null, true);
Logger(`delete skipped:${lastDocs._id}`, LOG_LEVEL.VERBOSE);
}
return;
@@ -1113,9 +1127,8 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const localMtime = ~~((file?.stat?.mtime || 0) / 1000);
const docMtime = ~~(docEntry.mtime / 1000);
const doc = await this.localDatabase.getDBEntry(pathSrc, { rev: docEntry._rev });
const doc = await this.localDatabase.getDBEntry(path, { rev: docEntry._rev });
if (doc === false) return;
const path = id2path(doc._id);
const msg = `DB -> STORAGE (${mode}${force ? ",force" : ""},${doc.datatype}) `;
if (doc.datatype != "newnote" && doc.datatype != "plain") {
Logger(msg + "ERROR, Invalid datatype: " + path + "(" + doc.datatype + ")", LOG_LEVEL.NOTICE);
@@ -1134,7 +1147,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
outFile = await createFile(normalizePath(path), writeData, { ctime: doc.ctime, mtime: doc.mtime, });
} else {
await modifyFile(file, writeData, { ctime: doc.ctime, mtime: doc.mtime });
outFile = getAbstractFileByPath(file.path) as TFile;
outFile = getAbstractFileByPath(getPathFromTFile(file)) as TFile;
}
Logger(msg + path);
touch(outFile);
@@ -1172,7 +1185,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
handleDBChanged(change: EntryBody) {
// If the file is opened, we have to apply immediately
const af = app.workspace.getActiveFile();
if (af && af.path == id2path(change._id)) {
if (af && af.path == this.getPath(change)) {
this.queuedEntries = this.queuedEntries.filter(e => e._id != change._id);
return this.handleDBChangedAsync(change);
}
@@ -1188,15 +1201,16 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const entry = this.queuedEntries.shift();
// If the same file is to be manipulated, leave it to the last process.
if (this.queuedEntries.some(e => e._id == entry._id)) continue;
const path = getPath(entry);
try {
const releaser = await semaphore.acquire(1);
runWithLock(`dbchanged-${entry._id}`, false, async () => {
Logger(`Applying ${entry._id} (${entry._rev}) change...`, LOG_LEVEL.VERBOSE);
runWithLock(`dbchanged-${path}`, false, async () => {
Logger(`Applying ${path} (${entry._id}: ${entry._rev}) change...`, LOG_LEVEL.VERBOSE);
await this.handleDBChangedAsync(entry);
Logger(`Applied ${entry._id} (${entry._rev}) change...`);
Logger(`Applied ${path} (${entry._id}:${entry._rev}) change...`);
}).finally(() => { releaser(); });
} catch (ex) {
Logger(`Failed to apply the change of ${entry._id} (${entry._rev})`);
Logger(`Failed to apply the change of ${path} (${entry._id}:${entry._rev})`);
}
} while (this.queuedEntries.length > 0);
} finally {
@@ -1205,7 +1219,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
async handleDBChangedAsync(change: EntryBody) {
const targetFile = getAbstractFileByPath(id2path(change._id));
const targetFile = getAbstractFileByPath(this.getPathWithoutPrefix(change));
if (targetFile == null) {
if (change._deleted || change.deleted) {
return;
@@ -1232,17 +1246,17 @@ export default class ObsidianLiveSyncPlugin extends Plugin
await this.doc2storage(doc, file);
queueConflictCheck();
} else {
const d = await this.localDatabase.getDBEntryMeta(id2path(change._id), { conflicts: true }, true);
const d = await this.localDatabase.getDBEntryMeta(this.getPath(change), { conflicts: true }, true);
if (d && !d._conflicts) {
await this.doc2storage(doc, file);
} else {
if (!queueConflictCheck()) {
Logger(`${id2path(change._id)} is conflicted, write to the storage has been pended.`, LOG_LEVEL.NOTICE);
Logger(`${this.getPath(change)} is conflicted, write to the storage has been pended.`, LOG_LEVEL.NOTICE);
}
}
}
} else {
Logger(`${id2path(change._id)} is already exist as the folder`);
Logger(`${this.getPath(change)} is already exist as the folder`);
}
}
@@ -1258,7 +1272,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
async loadQueuedFiles() {
const lsKey = "obsidian-livesync-queuefiles-" + this.getVaultName();
const ids = JSON.parse(localStorage.getItem(lsKey) || "[]") as string[];
const ret = await this.localDatabase.localDatabase.allDocs({ keys: ids, include_docs: true });
const ret = await this.localDatabase.allDocsRaw<EntryDoc>({ keys: ids, include_docs: true });
for (const doc of ret.rows) {
if (doc.doc && !this.queuedFiles.some((e) => e.entry._id == doc.doc._id)) {
await this.parseIncomingDoc(doc.doc as PouchDB.Core.ExistingDocument<EntryBody & PouchDB.Core.AllDocsMeta>);
@@ -1274,11 +1288,11 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const now = new Date().getTime();
if (queue.missingChildren.length == 0) {
queue.done = true;
if (isInternalMetadata(queue.entry._id)) {
if (isIdOfInternalMetadata(queue.entry._id)) {
//system file
const filename = id2path(id2filenameInternalMetadata(queue.entry._id));
const filename = this.getPathWithoutPrefix(queue.entry);
this.addOnHiddenFileSync.procInternalFile(filename);
} else if (isValidPath(id2path(queue.entry._id))) {
} else if (isValidPath(this.getPath(queue.entry))) {
this.handleDBChanged(queue.entry);
} else {
Logger(`Skipped: ${queue.entry._id}`, LOG_LEVEL.VERBOSE);
@@ -1315,10 +1329,11 @@ export default class ObsidianLiveSyncPlugin extends Plugin
if (isNewFileCompleted) this.procQueuedFiles();
}
async parseIncomingDoc(doc: PouchDB.Core.ExistingDocument<EntryBody>) {
if (!this.isTargetFile(id2path(doc._id))) return;
const path = this.getPath(doc);
if (!this.isTargetFile(path)) return;
const skipOldFile = this.settings.skipOlderFilesOnSync && false; //patched temporary.
// Do not handle internal files if the feature has not been enabled.
if (isInternalMetadata(doc._id) && !this.settings.syncInternalFiles) return;
if (isIdOfInternalMetadata(doc._id) && !this.settings.syncInternalFiles) return;
// It is better for your own safety, not to handle the following files
const ignoreFiles = [
"_design/replicate",
@@ -1326,15 +1341,15 @@ export default class ObsidianLiveSyncPlugin extends Plugin
FLAGMD_REDFLAG2,
FLAGMD_REDFLAG3
];
if (!isInternalMetadata(doc._id) && ignoreFiles.contains(id2path(doc._id))) {
if (!isIdOfInternalMetadata(doc._id) && ignoreFiles.contains(path)) {
return;
}
if ((!isInternalMetadata(doc._id)) && skipOldFile) {
const info = getAbstractFileByPath(id2path(doc._id));
if ((!isIdOfInternalMetadata(doc._id)) && skipOldFile) {
const info = getAbstractFileByPath(stripAllPrefixes(path));
if (info && info instanceof TFile) {
const localMtime = ~~((info as TFile).stat.mtime / 1000);
const localMtime = ~~(info.stat.mtime / 1000);
const docMtime = ~~(doc.mtime / 1000);
//TODO: some margin required.
if (localMtime >= docMtime) {
@@ -1351,7 +1366,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
};
// If `Read chunks online` is enabled, retrieve chunks from the remote CouchDB directly.
if ((!this.settings.readChunksOnline) && "children" in doc) {
const c = await this.localDatabase.localDatabase.allDocs({ keys: doc.children, include_docs: false });
const c = await this.localDatabase.allDocsRaw<EntryDoc>({ keys: doc.children, include_docs: false });
const missing = c.rows.filter((e) => "error" in e).map((e) => e.key);
// fetch from remote
if (missing.length > 0) Logger(`${doc._id}(${doc._rev}) Queued (waiting ${missing.length} items)`, LOG_LEVEL.VERBOSE);
@@ -1590,10 +1605,10 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const filesStorage = this.app.vault.getFiles().filter(e => this.isTargetFile(e));
const filesStorageName = filesStorage.map((e) => e.path);
Logger("Collecting local files on the DB", LOG_LEVEL.VERBOSE);
const filesDatabase = [] as string[]
for await (const docId of this.localDatabase.findAllDocNames()) {
const path = id2path(docId);
if (isValidPath(docId) && this.isTargetFile(path)) {
const filesDatabase = [] as FilePathWithPrefix[]
for await (const doc of this.localDatabase.findAllNormalDocs()) {
const path = getPath(doc);
if (isValidPath(path) && this.isTargetFile(path)) {
filesDatabase.push(path);
}
}
@@ -1605,7 +1620,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
initialScan = true;
Logger("Database looks empty, save files as initial sync data");
}
const onlyInStorage = filesStorage.filter((e) => filesDatabase.indexOf(e.path) == -1);
const onlyInStorage = filesStorage.filter((e) => filesDatabase.indexOf(getPathFromTFile(e)) == -1);
const onlyInDatabase = filesDatabase.filter((e) => filesStorageName.indexOf(e) == -1);
const onlyInStorageNames = onlyInStorage.map((e) => e.path);
@@ -1659,9 +1674,16 @@ export default class ObsidianLiveSyncPlugin extends Plugin
caches = await this.kvDB.get<{ [key: string]: { storageMtime: number; docMtime: number } }>("diff-caches") || {};
const docsCount = syncFiles.length;
do {
const syncFilesX = syncFiles.splice(0, 100);
const docs = await this.localDatabase.localDatabase.allDocs({ keys: syncFilesX.map(e => path2id(e.path)), include_docs: true })
const syncFilesToSync = syncFilesX.map((e) => ({ file: e, doc: docs.rows.find(ee => ee.id == path2id(e.path)).doc as LoadedEntry }));
const syncFilesXSrc = syncFiles.splice(0, 100);
const syncFilesX = [] as { file: TFile, id: DocumentID }[];
for (const file of syncFilesXSrc) {
const id = await this.path2id(getPathFromTFile(file));
syncFilesX.push({ file: file, id: id });
}
const docs = await this.localDatabase.allDocsRaw<EntryDoc>({ keys: syncFilesX.map(e => e.id), include_docs: true })
const docsMap = docs.rows.reduce((p, c) => ({ ...p, [c.id]: c.doc }), {} as Record<DocumentID, EntryDoc>)
const syncFilesToSync = syncFilesX.map((e) => ({ file: e.file, doc: docsMap[e.id] as LoadedEntry }));
await runAll(`CHECK FILE STATUS:${syncFiles.length}/${docsCount}`, syncFilesToSync, async (e) => {
caches = await this.syncFileBetweenDBandStorage(e.file, e.doc, initialScan, caches);
});
@@ -1680,7 +1702,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
// --> conflict resolving
async getConflictedDoc(path: string, rev: string): Promise<false | diff_result_leaf> {
async getConflictedDoc(path: FilePathWithPrefix, rev: string): Promise<false | diff_result_leaf> {
try {
const doc = await this.localDatabase.getDBEntry(path, { rev: rev }, false, false, true);
if (doc === false) return false;
@@ -1705,7 +1727,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
return false;
}
//TODO: TIDY UP
async mergeSensibly(path: string, baseRev: string, currentRev: string, conflictedRev: string): Promise<Diff[] | false> {
async mergeSensibly(path: FilePathWithPrefix, baseRev: string, currentRev: string, conflictedRev: string): Promise<Diff[] | false> {
const baseLeaf = await this.getConflictedDoc(path, baseRev);
const leftLeaf = await this.getConflictedDoc(path, currentRev);
const rightLeaf = await this.getConflictedDoc(path, conflictedRev);
@@ -1862,7 +1884,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
}
async mergeObject(path: string, baseRev: string, currentRev: string, conflictedRev: string): Promise<string | false> {
async mergeObject(path: FilePathWithPrefix, baseRev: string, currentRev: string, conflictedRev: string): Promise<string | false> {
try {
const baseLeaf = await this.getConflictedDoc(path, baseRev);
const leftLeaf = await this.getConflictedDoc(path, currentRev);
@@ -1917,7 +1939,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
* @param path the file location
* @returns true -> resolved, false -> nothing to do, or check result.
*/
async getConflictedStatus(path: string): Promise<diff_check_result> {
async getConflictedStatus(path: FilePathWithPrefix): Promise<diff_check_result> {
const test = await this.localDatabase.getDBEntry(path, { conflicts: true, revs_info: true }, false, false, true);
if (test === false) return false;
if (test == null) return false;
@@ -1928,7 +1950,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const conflictedRev = conflicts[0];
const conflictedRevNo = Number(conflictedRev.split("-")[0]);
//Search
const revFrom = (await this.localDatabase.localDatabase.get<EntryDoc>(path2id(path), { revs_info: true }));
const revFrom = (await this.localDatabase.getRaw<EntryDoc>(await this.path2id(path), { revs_info: true }));
const commonBase = revFrom._revs_info.filter(e => e.status == "available" && Number(e.rev.split("-")[0]) < conflictedRevNo).first()?.rev ?? "";
let p = undefined;
if (commonBase) {
@@ -1956,7 +1978,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
// remove conflicted revision.
await this.localDatabase.deleteDBEntry(path, { rev: conflictedRev });
const file = getAbstractFileByPath(path) as TFile;
const file = getAbstractFileByPath(stripAllPrefixes(path)) as TFile;
if (file) {
await this.app.vault.modify(file, p);
await this.updateIntoDB(file);
@@ -2020,7 +2042,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
};
}
showMergeDialog(filename: string, conflictCheckResult: diff_result): Promise<boolean> {
showMergeDialog(filename: FilePathWithPrefix, conflictCheckResult: diff_result): Promise<boolean> {
return new Promise((res, rej) => {
Logger("open conflict dialog", LOG_LEVEL.VERBOSE);
new ConflictResolveModal(this.app, filename, conflictCheckResult, async (selected) => {
@@ -2040,7 +2062,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
// delete conflicted revision and write a new file, store it again.
const p = conflictCheckResult.diff.map((e) => e[1]).join("");
await this.localDatabase.deleteDBEntry(filename, { rev: testDoc._conflicts[0] });
const file = getAbstractFileByPath(filename) as TFile;
const file = getAbstractFileByPath(stripAllPrefixes(filename)) as TFile;
if (file) {
await this.app.vault.modify(file, p);
await this.updateIntoDB(file);
@@ -2076,25 +2098,25 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}).open();
});
}
conflictedCheckFiles: string[] = [];
conflictedCheckFiles: FilePath[] = [];
// queueing the conflicted file check
conflictedCheckTimer: number;
queueConflictedCheck(file: TFile) {
this.conflictedCheckFiles = this.conflictedCheckFiles.filter((e) => e != file.path);
this.conflictedCheckFiles.push(file.path);
this.conflictedCheckFiles.push(getPathFromTFile(file));
if (this.conflictedCheckTimer != null) {
window.clearTimeout(this.conflictedCheckTimer);
}
this.conflictedCheckTimer = window.setTimeout(async () => {
this.conflictedCheckTimer = null;
const checkFiles = JSON.parse(JSON.stringify(this.conflictedCheckFiles)) as string[];
const checkFiles = JSON.parse(JSON.stringify(this.conflictedCheckFiles)) as FilePath[];
for (const filename of checkFiles) {
try {
const file = getAbstractFileByPath(filename);
if (file != null && file instanceof TFile) {
await this.showIfConflicted(file.path);
await this.showIfConflicted(getPathFromTFile(file));
}
} catch (ex) {
Logger(ex);
@@ -2103,7 +2125,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}, 100);
}
async showIfConflicted(filename: string) {
async showIfConflicted(filename: FilePathWithPrefix) {
await runWithLock("conflicted", false, async () => {
const conflictCheckResult = await this.getConflictedStatus(filename);
if (conflictCheckResult === false) {
@@ -2126,9 +2148,9 @@ export default class ObsidianLiveSyncPlugin extends Plugin
});
}
async pullFile(filename: string, fileList?: TFile[], force?: boolean, rev?: string, waitForReady = true) {
const targetFile = getAbstractFileByPath(id2path(filename));
if (!this.isTargetFile(id2path(filename))) return;
async pullFile(filename: FilePathWithPrefix, fileList?: TFile[], force?: boolean, rev?: string, waitForReady = true) {
const targetFile = getAbstractFileByPath(stripAllPrefixes(filename));
if (!this.isTargetFile(filename)) return;
if (targetFile == null) {
//have to create;
const doc = await this.localDatabase.getDBEntry(filename, rev ? { rev: rev } : null, false, waitForReady);
@@ -2186,7 +2208,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
//newer database file.
Logger("STORAGE <- DB :" + file.path);
Logger(`${storageMtime} < ${docMtime}`);
const docx = await this.localDatabase.getDBEntry(file.path, null, false, false);
const docx = await this.localDatabase.getDBEntry(getPathFromTFile(file), null, false, false);
if (docx != false) {
await this.doc2storage(docx, file);
} else {
@@ -2241,9 +2263,11 @@ export default class ObsidianLiveSyncPlugin extends Plugin
datatype = "plain";
}
}
const fullPath = path2id(file.path);
const fullPath = getPathFromTFile(file);
const id = await this.path2id(fullPath);
const d: LoadedEntry = {
_id: fullPath,
_id: id,
path: getPathFromTFile(file),
data: content,
ctime: file.stat.ctime,
mtime: file.stat.mtime,
@@ -2292,7 +2316,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
async deleteFromDB(file: TFile) {
if (!this.isTargetFile(file)) return;
const fullPath = file.path;
const fullPath = getPathFromTFile(file);
Logger(`deleteDB By path:${fullPath}`);
await this.deleteFromDBbyPath(fullPath);
if (this.settings.syncOnSave && !this.suspended) {
@@ -2300,7 +2324,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
}
async deleteFromDBbyPath(fullPath: string) {
async deleteFromDBbyPath(fullPath: FilePath) {
await this.localDatabase.deleteDBEntry(fullPath);
if (this.settings.syncOnSave && !this.suspended) {
await this.replicate();
@@ -2352,24 +2376,25 @@ export default class ObsidianLiveSyncPlugin extends Plugin
await this.app.vault.adapter.append(file.path, "", { ctime: file.ctime, mtime: file.mtime });
}
async resolveConflictByNewerEntry(id: string) {
const doc = await this.localDatabase.localDatabase.get(id, { conflicts: true });
async resolveConflictByNewerEntry(path: FilePathWithPrefix) {
const id = await this.path2id(path);
const doc = await this.localDatabase.getRaw<AnyEntry>(id, { conflicts: true });
// If there is no conflict, return with false.
if (!("_conflicts" in doc)) return false;
if (doc._conflicts.length == 0) return false;
Logger(`Hidden file conflicted:${id2filenameInternalMetadata(id)}`);
Logger(`Hidden file conflicted:${this.getPath(doc)}`);
const conflicts = doc._conflicts.sort((a, b) => Number(a.split("-")[0]) - Number(b.split("-")[0]));
const revA = doc._rev;
const revB = conflicts[0];
const revBDoc = await this.localDatabase.localDatabase.get(id, { rev: revB });
const revBDoc = await this.localDatabase.getRaw<EntryDoc>(id, { rev: revB });
// determine which revision should been deleted.
// simply check modified time
const mtimeA = ("mtime" in doc && doc.mtime) || 0;
const mtimeB = ("mtime" in revBDoc && revBDoc.mtime) || 0;
const delRev = mtimeA < mtimeB ? revA : revB;
// delete older one.
await this.localDatabase.localDatabase.remove(id, delRev);
Logger(`Older one has been deleted:${id2filenameInternalMetadata(id)}`);
await this.localDatabase.removeRaw(id, delRev);
Logger(`Older one has been deleted:${this.getPath(doc)}`);
return true;
}

View File

@@ -1,5 +1,5 @@
import { PluginManifest, TFile } from "./deps";
import { DatabaseEntry, EntryBody } from "./lib/src/types";
import { DatabaseEntry, EntryBody, FilePath } from "./lib/src/types";
export interface PluginDataEntry extends DatabaseEntry {
deviceVaultName: string;
@@ -24,7 +24,7 @@ export interface DevicePluginList {
export const PERIODIC_PLUGIN_SWEEP = 60;
export interface InternalFileInfo {
path: string;
path: FilePath;
mtime: number;
ctime: number;
size: number;
@@ -32,7 +32,7 @@ export interface InternalFileInfo {
}
export interface FileInfo {
path: string;
path: FilePath;
mtime: number;
ctime: number;
size: number;
@@ -70,4 +70,5 @@ export const ICHeaderEnd = "i;";
export const ICHeaderLength = ICHeader.length;
export const FileWatchEventQueueMax = 10;
export const configURIBase = "obsidian://setuplivesync?settings=";
export const configURIBase = "obsidian://setuplivesync?settings=";

View File

@@ -1,20 +1,44 @@
import { DataWriteOptions, normalizePath, TFile, Platform, TAbstractFile, App, Plugin_2 } from "./deps";
import { path2id_base, id2path_base, isValidFilenameInLinux, isValidFilenameInDarwin, isValidFilenameInWidows, isValidFilenameInAndroid } from "./lib/src/path";
import { path2id_base, id2path_base, isValidFilenameInLinux, isValidFilenameInDarwin, isValidFilenameInWidows, isValidFilenameInAndroid, stripAllPrefixes } from "./lib/src/path";
import { Logger } from "./lib/src/logger";
import { LOG_LEVEL } from "./lib/src/types";
import { AnyEntry, DocumentID, EntryHasPath, FilePath, FilePathWithPrefix, LOG_LEVEL } from "./lib/src/types";
import { CHeader, ICHeader, ICHeaderLength, PSCHeader } from "./types";
import { InputStringDialog, PopoverSelectString } from "./dialogs";
// For backward compatibility, using the path for determining id.
// Only CouchDB unacceptable ID (that starts with an underscore) has been prefixed with "/".
// The first slash will be deleted when the path is normalized.
export function path2id(filename: string): string {
const x = normalizePath(filename);
return path2id_base(x);
export async function path2id(filename: FilePathWithPrefix | FilePath, obfuscatePassphrase: string | false): Promise<DocumentID> {
const temp = filename.split(":");
const path = temp.pop();
const normalizedPath = normalizePath(path as FilePath);
temp.push(normalizedPath);
const fixedPath = temp.join(":") as FilePathWithPrefix;
const out = await path2id_base(fixedPath, obfuscatePassphrase);
return out;
}
export function id2path(filename: string): string {
return id2path_base(normalizePath(filename));
export function id2path(id: DocumentID, entry?: EntryHasPath): FilePathWithPrefix {
const filename = id2path_base(id, entry);
const temp = filename.split(":");
const path = temp.pop();
const normalizedPath = normalizePath(path as FilePath);
temp.push(normalizedPath);
const fixedPath = temp.join(":") as FilePathWithPrefix;
return fixedPath;
}
export function getPath(entry: AnyEntry) {
return id2path(entry._id, entry);
}
export function getPathWithoutPrefix(entry: AnyEntry) {
const f = getPath(entry);
return stripAllPrefixes(f);
}
export function getPathFromTFile(file: TAbstractFile) {
return file.path as FilePath;
}
const tasks: { [key: string]: ReturnType<typeof setTimeout> } = {};
@@ -300,7 +324,7 @@ export function isValidPath(filename: string) {
let touchedFiles: string[] = [];
export function getAbstractFileByPath(path: string): TAbstractFile | null {
export function getAbstractFileByPath(path: FilePath): TAbstractFile | null {
// Hidden API but so useful.
// @ts-ignore
if ("getAbstractFileByPathInsensitive" in app.vault && (app.vault.adapter?.insensitive ?? false)) {
@@ -314,7 +338,7 @@ export function trimPrefix(target: string, prefix: string) {
return target.startsWith(prefix) ? target.substring(prefix.length) : target;
}
export function touch(file: TFile | string) {
export function touch(file: TFile | FilePath) {
const f = file instanceof TFile ? file : getAbstractFileByPath(file) as TFile;
const key = `${f.path}-${f.stat.mtime}-${f.stat.size}`;
touchedFiles.unshift(key);
@@ -331,17 +355,17 @@ export function clearTouched() {
/**
* returns is internal chunk of file
* @param str ID
* @param id ID
* @returns
*/
export function isInternalMetadata(str: string): boolean {
return str.startsWith(ICHeader);
export function isIdOfInternalMetadata(id: FilePath | FilePathWithPrefix | DocumentID): boolean {
return id.startsWith(ICHeader);
}
export function id2filenameInternalMetadata(str: string): string {
return str.substring(ICHeaderLength);
export function stripInternalMetadataPrefix<T extends FilePath | FilePathWithPrefix | DocumentID>(id: T): T {
return id.substring(ICHeaderLength) as T;
}
export function filename2idInternalMetadata(str: string): string {
return ICHeader + str;
export function id2InternalMetadataId(id: DocumentID): DocumentID {
return ICHeader + id as DocumentID;
}
// const CHeaderLength = CHeader.length;