1
0
mirror of https://github.com/vrtmrz/obsidian-livesync.git synced 2024-12-12 09:04:06 +02:00

New feature:

- (Beta) ignore files handling

Fixed:
- Buttons on lock-detected-dialogue now can be shown in narrow-width devices.

Improved:
- Some constant has been flattened to be evaluated.
- The usage of the deprecated API of obsidian has been reduced.
- Now the indexedDB adapter will be enabled while the importing configuration.

Misc:
- Compiler, framework, and dependencies have been upgraded.
- Due to standing for these impacts (especially in esbuild and svelte,) terser has been introduced.
  Feel free to notify your opinion to me! I do not like to obfuscate the code too.
This commit is contained in:
vorotamoroz 2023-08-04 09:45:04 +01:00
parent a7c179fc86
commit faaa94423c
25 changed files with 2654 additions and 1530 deletions

View File

@ -2,3 +2,4 @@ node_modules
build
.eslintrc.js.bak
src/lib/src/patches/pouchdb-utils
esbuild.config.mjs

1
.gitignore vendored
View File

@ -8,6 +8,7 @@ package-lock.json
# build
main.js
main_org.js
*.js.map
# obsidian

View File

@ -1,21 +1,111 @@
//@ts-check
import esbuild from "esbuild";
import process from "process";
import builtins from "builtin-modules";
import sveltePlugin from "esbuild-svelte";
import sveltePreprocess from "svelte-preprocess";
import fs from "node:fs";
// import terser from "terser";
import { minify } from "terser";
const banner = `/*
THIS IS A GENERATED/BUNDLED FILE BY ESBUILD
THIS IS A GENERATED/BUNDLED FILE BY ESBUILD AND TERSER
if you want to view the source, please visit the github repository of this plugin
*/
`;
const prod = process.argv[2] === "production";
const manifestJson = JSON.parse(fs.readFileSync("./manifest.json"));
const packageJson = JSON.parse(fs.readFileSync("./package.json"));
const terserOpt = {
sourceMap: (!prod ? {
url: "inline"
} : {}),
format: {
indent_level: 2,
beautify: true,
comments: "some",
ecma: 2018,
preamble: banner,
webkit: true
},
parse: {
// parse options
},
compress: {
// compress options
defaults: false,
evaluate: true,
inline: 3,
join_vars: true,
loops: true,
passes: prod ? 4 : 1,
reduce_vars: true,
reduce_funcs: true,
arrows: true,
collapse_vars: true,
comparisons: true,
lhs_constants: true,
hoist_props: true,
side_effects: true,
// if_return: true,
},
// mangle: {
// // mangle options
// keep_classnames: true,
// keep_fnames: true,
// properties: {
// // mangle property options
// }
// },
ecma: 2018, // specify one of: 5, 2015, 2016, etc.
enclose: false, // or specify true, or "args:values"
keep_classnames: true,
keep_fnames: true,
ie8: false,
module: false,
// nameCache: null, // or specify a name cache object
safari10: false,
toplevel: false
}
const manifestJson = JSON.parse(fs.readFileSync("./manifest.json") + "");
const packageJson = JSON.parse(fs.readFileSync("./package.json") + "");
const updateInfo = JSON.stringify(fs.readFileSync("./updates.md") + "");
esbuild
.build({
/** @type esbuild.Plugin[] */
const plugins = [{
name: 'my-plugin',
setup(build) {
let count = 0;
build.onEnd(async result => {
if (count++ === 0) {
console.log('first build:', result);
} else {
console.log('subsequent build:');
}
if (prod) {
console.log("Performing terser");
const src = fs.readFileSync("./main_org.js").toString();
// @ts-ignore
const ret = await minify(src, terserOpt);
if (ret && ret.code) {
fs.writeFileSync("./main.js", ret.code);
}
console.log("Finished terser");
} else {
fs.copyFileSync("./main_org.js", "./main.js");
}
});
},
}];
const context = await esbuild.context({
banner: {
js: banner,
},
@ -25,22 +115,49 @@ esbuild
"MANIFEST_VERSION": `"${manifestJson.version}"`,
"PACKAGE_VERSION": `"${packageJson.version}"`,
"UPDATE_INFO": `${updateInfo}`,
"global":"window",
"global": "window",
},
external: ["obsidian", "electron", "crypto"],
external: [
"obsidian",
"electron",
"crypto",
"@codemirror/autocomplete",
"@codemirror/collab",
"@codemirror/commands",
"@codemirror/language",
"@codemirror/lint",
"@codemirror/search",
"@codemirror/state",
"@codemirror/view",
"@lezer/common",
"@lezer/highlight",
"@lezer/lr"],
// minifyWhitespace: true,
format: "cjs",
watch: !prod,
target: "es2018",
logLevel: "info",
platform: "browser",
sourcemap: prod ? false : "inline",
treeShaking: true,
platform: "browser",
outfile: "main_org.js",
minifyWhitespace: false,
minifySyntax: false,
minifyIdentifiers: false,
minify: false,
// keepNames: true,
plugins: [
sveltePlugin({
preprocess: sveltePreprocess(),
compilerOptions: { css: true },
compilerOptions: { css: true, preserveComments: true },
}),
...plugins
],
outfile: "main.js",
})
.catch(() => process.exit(1));
})
if (prod) {
await context.rebuild();
process.exit(0);
} else {
await context.watch();
}

3270
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -13,43 +13,51 @@
"author": "vorotamoroz",
"license": "MIT",
"devDependencies": {
"@tsconfig/svelte": "^4.0.1",
"@tsconfig/svelte": "^5.0.0",
"@types/diff-match-patch": "^1.0.32",
"@types/node": "^20.2.5",
"@types/pouchdb": "^6.4.0",
"@types/pouchdb-browser": "^6.1.3",
"@typescript-eslint/eslint-plugin": "^5.54.0",
"@typescript-eslint/parser": "^5.54.0",
"@types/pouchdb-adapter-http": "^6.1.3",
"@types/pouchdb-adapter-idb": "^6.1.4",
"@types/pouchdb-core": "^7.0.11",
"@types/pouchdb-mapreduce": "^6.1.7",
"@types/pouchdb-replication": "^6.4.4",
"@types/transform-pouch": "^1.0.2",
"@typescript-eslint/eslint-plugin": "^6.2.1",
"@typescript-eslint/parser": "^6.2.1",
"builtin-modules": "^3.3.0",
"esbuild": "0.15.15",
"esbuild-svelte": "^0.7.3",
"eslint": "^8.35.0",
"esbuild": "0.18.17",
"esbuild-svelte": "^0.7.4",
"eslint": "^8.46.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-plugin-import": "^2.27.5",
"eslint-plugin-import": "^2.28.0",
"events": "^3.3.0",
"obsidian": "^1.1.1",
"postcss": "^8.4.21",
"obsidian": "^1.3.5",
"postcss": "^8.4.27",
"postcss-load-config": "^4.0.1",
"pouchdb-adapter-http": "^8.0.1",
"pouchdb-adapter-idb": "^8.0.1",
"pouchdb-adapter-indexeddb": "^8.0.1",
"pouchdb-core": "^8.0.1",
"pouchdb-errors": "^8.0.1",
"pouchdb-find": "^8.0.1",
"pouchdb-mapreduce": "^8.0.1",
"pouchdb-merge": "^8.0.1",
"pouchdb-errors": "^8.0.1",
"pouchdb-replication": "^8.0.1",
"pouchdb-utils": "^8.0.1",
"svelte": "^3.59.1",
"svelte-preprocess": "^5.0.3",
"svelte": "^4.1.2",
"svelte-preprocess": "^5.0.4",
"terser": "^5.19.2",
"transform-pouch": "^2.0.0",
"tslib": "^2.5.0",
"typescript": "^5.0.4"
"tslib": "^2.6.1",
"typescript": "^5.1.6"
},
"dependencies": {
"diff-match-patch": "^1.0.5",
"idb": "^7.1.1",
"xxhash-wasm": "^0.4.2",
"minimatch": "^9.0.3",
"xxhash-wasm": "0.4.2",
"xxhash-wasm-102": "npm:xxhash-wasm@^1.0.2"
}
}

View File

@ -2,7 +2,7 @@ import { writable } from 'svelte/store';
import { Notice, type PluginManifest, parseYaml } from "./deps";
import type { EntryDoc, LoadedEntry, InternalFileEntry, FilePathWithPrefix, FilePath, DocumentID, AnyEntry } from "./lib/src/types";
import { LOG_LEVEL } from "./lib/src/types";
import { LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "./lib/src/types";
import { ICXHeader, PERIODIC_PLUGIN_SWEEP, } from "./types";
import { delay, getDocData } from "./lib/src/utils";
import { Logger } from "./lib/src/logger";
@ -139,7 +139,7 @@ export class ConfigSync extends LiveSyncCommands {
Logger("Scanning customizations : done");
} catch (ex) {
Logger("Scanning customizations : failed");
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
}
}
@ -165,13 +165,14 @@ export class ConfigSync extends LiveSyncCommands {
await this.updatePluginList(showMessage);
}
async updatePluginList(showMessage: boolean, updatedDocumentPath?: FilePathWithPrefix): Promise<void> {
const logLevel = showMessage ? LOG_LEVEL.NOTICE : LOG_LEVEL.INFO;
const logLevel = showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO;
// pluginList.set([]);
if (!this.settings.usePluginSync) {
this.pluginList = [];
pluginList.set(this.pluginList)
return;
}
await Promise.resolve(); // Just to prevent warning.
scheduleTask("update-plugin-list-task", 200, async () => {
await runWithLock("update-plugin-list", false, async () => {
try {
@ -191,7 +192,7 @@ export class ConfigSync extends LiveSyncCommands {
try {
count++;
if (count % 10 == 0) Logger(`Enumerating files... ${count}`, logLevel, "get-plugins");
Logger(`plugin-${path}`, LOG_LEVEL.VERBOSE);
Logger(`plugin-${path}`, LOG_LEVEL_VERBOSE);
const wx = await this.localDatabase.getDBEntry(path, null, false, false);
if (wx) {
const data = deserialize(getDocData(wx.data), {}) as PluginDataEx;
@ -211,7 +212,7 @@ export class ConfigSync extends LiveSyncCommands {
// return entries;
} catch (ex) {
//TODO
Logger(`Something happened at enumerating customization :${path}`, LOG_LEVEL.NOTICE);
Logger(`Something happened at enumerating customization :${path}`, LOG_LEVEL_NOTICE);
console.warn(ex);
}
return false;
@ -257,7 +258,7 @@ export class ConfigSync extends LiveSyncCommands {
const fileB = pluginDataB.files[0];
const docAx = { ...docA, ...fileA } as LoadedEntry, docBx = { ...docB, ...fileB } as LoadedEntry
return runWithLock("config:merge-data", false, () => new Promise((res) => {
Logger("Opening data-merging dialog", LOG_LEVEL.VERBOSE);
Logger("Opening data-merging dialog", LOG_LEVEL_VERBOSE);
// const docs = [docA, docB];
const path = stripAllPrefixes(docAx.path.split("/").slice(-1).join("/") as FilePath);
const modal = new JsonResolveModal(this.app, path, [docAx, docBx], async (keep, result) => {
@ -266,7 +267,7 @@ export class ConfigSync extends LiveSyncCommands {
res(await this.applyData(pluginDataA, result));
} catch (ex) {
Logger("Could not apply merged file");
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
res(false);
}
}, "📡", "🛰️", "B");
@ -299,7 +300,7 @@ export class ConfigSync extends LiveSyncCommands {
} catch (ex) {
Logger(`Applying ${f.filename} of ${data.displayName || data.name}.. Failed`);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
}
}
@ -307,7 +308,7 @@ export class ConfigSync extends LiveSyncCommands {
await this.storeCustomizationFiles(uPath);
await this.updatePluginList(true, uPath);
await delay(100);
Logger(`Config ${data.displayName || data.name} has been applied`, LOG_LEVEL.NOTICE);
Logger(`Config ${data.displayName || data.name} has been applied`, LOG_LEVEL_NOTICE);
if (data.category == "PLUGIN_DATA" || data.category == "PLUGIN_MAIN") {
//@ts-ignore
const manifests = Object.values(this.app.plugins.manifests) as any as PluginManifest[];
@ -315,12 +316,12 @@ export class ConfigSync extends LiveSyncCommands {
const enabledPlugins = this.app.plugins.enabledPlugins as Set<string>;
const pluginManifest = manifests.find((manifest) => enabledPlugins.has(manifest.id) && manifest.dir == `${baseDir}/plugins/${data.name}`);
if (pluginManifest) {
Logger(`Unloading plugin: ${pluginManifest.name}`, LOG_LEVEL.NOTICE, "plugin-reload-" + pluginManifest.id);
Logger(`Unloading plugin: ${pluginManifest.name}`, LOG_LEVEL_NOTICE, "plugin-reload-" + pluginManifest.id);
// @ts-ignore
await this.app.plugins.unloadPlugin(pluginManifest.id);
// @ts-ignore
await this.app.plugins.loadPlugin(pluginManifest.id);
Logger(`Plugin reloaded: ${pluginManifest.name}`, LOG_LEVEL.NOTICE, "plugin-reload-" + pluginManifest.id);
Logger(`Plugin reloaded: ${pluginManifest.name}`, LOG_LEVEL_NOTICE, "plugin-reload-" + pluginManifest.id);
}
} else if (data.category == "CONFIG") {
scheduleTask("configReload", 250, async () => {
@ -333,7 +334,7 @@ export class ConfigSync extends LiveSyncCommands {
return true;
} catch (ex) {
Logger(`Applying ${data.displayName || data.name}.. Failed`);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
return false;
}
}
@ -342,11 +343,11 @@ export class ConfigSync extends LiveSyncCommands {
if (data.documentPath) {
await this.deleteConfigOnDatabase(data.documentPath);
await this.updatePluginList(false, data.documentPath);
Logger(`Delete: ${data.documentPath}`, LOG_LEVEL.NOTICE);
Logger(`Delete: ${data.documentPath}`, LOG_LEVEL_NOTICE);
}
return true;
} catch (ex) {
Logger(`Failed to delete: ${data.documentPath}`, LOG_LEVEL.NOTICE);
Logger(`Failed to delete: ${data.documentPath}`, LOG_LEVEL_NOTICE);
return false;
}
@ -433,12 +434,12 @@ export class ConfigSync extends LiveSyncCommands {
displayName = `${json.name}`;
}
} catch (ex) {
Logger(`Configuration sync data: ${path} looks like manifest, but could not read the version`, LOG_LEVEL.INFO);
Logger(`Configuration sync data: ${path} looks like manifest, but could not read the version`, LOG_LEVEL_INFO);
}
}
} catch (ex) {
Logger(`The file ${path} could not be encoded`);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
return false;
}
const mtime = stat.mtime;
@ -465,7 +466,7 @@ export class ConfigSync extends LiveSyncCommands {
async storeCustomizationFiles(path: FilePath, termOverRide?: string) {
const term = termOverRide || this.plugin.deviceAndVaultName;
if (term == "") {
Logger("We have to configure the device name", LOG_LEVEL.NOTICE);
Logger("We have to configure the device name", LOG_LEVEL_NOTICE);
return;
}
const vf = this.filenameToUnifiedKey(path, term);
@ -501,7 +502,7 @@ export class ConfigSync extends LiveSyncCommands {
for (const target of fileTargets) {
const data = await this.makeEntryFromFile(target);
if (data == false) {
// Logger(`Config: skipped: ${target} `, LOG_LEVEL.VERBOSE);
// Logger(`Config: skipped: ${target} `, LOG_LEVEL_VERBOSE);
continue;
}
if (data.version) {
@ -543,7 +544,7 @@ export class ConfigSync extends LiveSyncCommands {
};
} else {
if (old.mtime == mtime) {
// Logger(`STORAGE --> DB:${file.path}: (hidden) Not changed`, LOG_LEVEL.VERBOSE);
// Logger(`STORAGE --> DB:${file.path}: (hidden) Not changed`, LOG_LEVEL_VERBOSE);
return true;
}
saveData =
@ -564,7 +565,7 @@ export class ConfigSync extends LiveSyncCommands {
return ret;
} catch (ex) {
Logger(`STORAGE --> DB:${prefixedFileName}: (config) Failed`);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
return false;
}
})
@ -591,11 +592,11 @@ export class ConfigSync extends LiveSyncCommands {
async scanAllConfigFiles(showMessage: boolean) {
const logLevel = showMessage ? LOG_LEVEL.NOTICE : LOG_LEVEL.INFO;
const logLevel = showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO;
Logger("Scanning customizing files.", logLevel, "scan-all-config");
const term = this.plugin.deviceAndVaultName;
if (term == "") {
Logger("We have to configure the device name", LOG_LEVEL.NOTICE);
Logger("We have to configure the device name", LOG_LEVEL_NOTICE);
return;
}
const filesAll = await this.scanInternalFiles();
@ -643,7 +644,7 @@ export class ConfigSync extends LiveSyncCommands {
Logger(`STORAGE -x> DB:${prefixedFileName}: (config) Done`);
} catch (ex) {
Logger(`STORAGE -x> DB:${prefixedFileName}: (config) Failed`);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
return false;
}
});

View File

@ -1,5 +1,5 @@
import { Notice, normalizePath, type PluginManifest } from "./deps";
import { type EntryDoc, type LoadedEntry, LOG_LEVEL, type InternalFileEntry, type FilePathWithPrefix, type FilePath } from "./lib/src/types";
import { type EntryDoc, type LoadedEntry, type InternalFileEntry, type FilePathWithPrefix, type FilePath, LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "./lib/src/types";
import { type InternalFileInfo, ICHeader, ICHeaderEnd } from "./types";
import { Parallels, delay, isDocContentSame } from "./lib/src/utils";
import { Logger } from "./lib/src/logger";
@ -44,7 +44,7 @@ export class HiddenFileSync extends LiveSyncCommands {
Logger("Synchronizing hidden files done");
} catch (ex) {
Logger("Synchronizing hidden files failed");
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
}
}
}
@ -161,7 +161,7 @@ export class HiddenFileSync extends LiveSyncCommands {
const commonBase = revFrom._revs_info.filter(e => e.status == "available" && Number(e.rev.split("-")[0]) < conflictedRevNo).first()?.rev ?? "";
const result = await this.plugin.mergeObject(path, commonBase, doc._rev, conflictedRev);
if (result) {
Logger(`Object merge:${path}`, LOG_LEVEL.INFO);
Logger(`Object merge:${path}`, LOG_LEVEL_INFO);
const filename = stripAllPrefixes(path);
const isExists = await this.app.vault.adapter.exists(filename);
if (!isExists) {
@ -174,7 +174,7 @@ export class HiddenFileSync extends LiveSyncCommands {
await this.localDatabase.removeRaw(id, revB);
return this.resolveConflictOnInternalFile(path);
} else {
Logger(`Object merge is not applicable.`, LOG_LEVEL.VERBOSE);
Logger(`Object merge is not applicable.`, LOG_LEVEL_VERBOSE);
}
const docAMerge = await this.localDatabase.getDBEntry(path, { rev: revA });
@ -203,7 +203,7 @@ export class HiddenFileSync extends LiveSyncCommands {
return this.resolveConflictOnInternalFile(path);
} catch (ex) {
Logger(`Failed to resolve conflict (Hidden): ${path}`);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
return false;
}
}
@ -211,7 +211,7 @@ export class HiddenFileSync extends LiveSyncCommands {
//TODO: Tidy up. Even though it is experimental feature, So dirty...
async syncInternalFilesAndDatabase(direction: "push" | "pull" | "safe" | "pullForce" | "pushForce", showMessage: boolean, files: InternalFileInfo[] | false = false, targetFiles: string[] | false = false) {
await this.resolveConflictOnInternalFiles();
const logLevel = showMessage ? LOG_LEVEL.NOTICE : LOG_LEVEL.INFO;
const logLevel = showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO;
Logger("Scanning hidden files.", logLevel, "sync_internal");
const ignorePatterns = this.settings.syncInternalFilesIgnorePatterns
.replace(/\n| /g, "")
@ -273,6 +273,9 @@ export class HiddenFileSync extends LiveSyncCommands {
if (!filename) continue;
if (ignorePatterns.some(e => filename.match(e)))
continue;
if (!await this.plugin.isIgnoredByIgnoreFiles(filename)) {
continue
}
const fileOnStorage = filename in filesMap ? filesMap[filename] : undefined;
const fileOnDatabase = filename in filesOnDBMap ? filesOnDBMap[filename] : undefined;
@ -355,12 +358,12 @@ export class HiddenFileSync extends LiveSyncCommands {
a.appendChild(a.createEl("a", null, (anchor) => {
anchor.text = "HERE";
anchor.addEventListener("click", async () => {
Logger(`Unloading plugin: ${updatePluginName}`, LOG_LEVEL.NOTICE, "plugin-reload-" + updatePluginId);
Logger(`Unloading plugin: ${updatePluginName}`, LOG_LEVEL_NOTICE, "plugin-reload-" + updatePluginId);
// @ts-ignore
await this.app.plugins.unloadPlugin(updatePluginId);
// @ts-ignore
await this.app.plugins.loadPlugin(updatePluginId);
Logger(`Plugin reloaded: ${updatePluginName}`, LOG_LEVEL.NOTICE, "plugin-reload-" + updatePluginId);
Logger(`Plugin reloaded: ${updatePluginName}`, LOG_LEVEL_NOTICE, "plugin-reload-" + updatePluginId);
});
}));
@ -391,7 +394,7 @@ export class HiddenFileSync extends LiveSyncCommands {
}
} catch (ex) {
Logger("Error on checking plugin status.");
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
}
@ -431,6 +434,9 @@ export class HiddenFileSync extends LiveSyncCommands {
}
async storeInternalFileToDatabase(file: InternalFileInfo, forceWrite = false) {
if (!await this.plugin.isIgnoredByIgnoreFiles(file.path)) {
return
}
const id = await this.path2id(file.path, ICHeader);
const prefixedFileName = addPrefix(file.path, ICHeader);
const contentBin = await this.app.vault.adapter.readBinary(file.path);
@ -439,7 +445,7 @@ export class HiddenFileSync extends LiveSyncCommands {
content = await arrayBufferToBase64(contentBin);
} catch (ex) {
Logger(`The file ${file.path} could not be encoded`);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
return false;
}
const mtime = file.mtime;
@ -462,7 +468,7 @@ export class HiddenFileSync extends LiveSyncCommands {
};
} else {
if (isDocContentSame(old.data, content) && !forceWrite) {
// Logger(`STORAGE --> DB:${file.path}: (hidden) Not changed`, LOG_LEVEL.VERBOSE);
// Logger(`STORAGE --> DB:${file.path}: (hidden) Not changed`, LOG_LEVEL_VERBOSE);
return;
}
saveData =
@ -482,7 +488,7 @@ export class HiddenFileSync extends LiveSyncCommands {
return ret;
} catch (ex) {
Logger(`STORAGE --> DB:${file.path}: (hidden) Failed`);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
return false;
}
});
@ -492,6 +498,9 @@ export class HiddenFileSync extends LiveSyncCommands {
const id = await this.path2id(filename, ICHeader);
const prefixedFileName = addPrefix(filename, ICHeader);
const mtime = new Date().getTime();
if (!await this.plugin.isIgnoredByIgnoreFiles(filename)) {
return
}
await runWithLock("file-" + prefixedFileName, false, async () => {
try {
const old = await this.localDatabase.getDBEntryMeta(prefixedFileName, null, true) as InternalFileEntry | false;
@ -526,7 +535,7 @@ export class HiddenFileSync extends LiveSyncCommands {
Logger(`STORAGE -x> DB:${filename}: (hidden) Done`);
} catch (ex) {
Logger(`STORAGE -x> DB:${filename}: (hidden) Failed`);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
return false;
}
});
@ -535,7 +544,9 @@ export class HiddenFileSync extends LiveSyncCommands {
async extractInternalFileFromDatabase(filename: FilePath, force = false) {
const isExists = await this.app.vault.adapter.exists(filename);
const prefixedFileName = addPrefix(filename, ICHeader);
if (!await this.plugin.isIgnoredByIgnoreFiles(filename)) {
return;
}
return await runWithLock("file-" + prefixedFileName, false, async () => {
try {
// Check conflicted status
@ -545,7 +556,7 @@ export class HiddenFileSync extends LiveSyncCommands {
throw new Error(`File not found on database.:${filename}`);
// Prevent overwrite for Prevent overwriting while some conflicted revision exists.
if (fileOnDB?._conflicts?.length) {
Logger(`Hidden file ${filename} has conflicted revisions, to keep in safe, writing to storage has been prevented`, LOG_LEVEL.INFO);
Logger(`Hidden file ${filename} has conflicted revisions, to keep in safe, writing to storage has been prevented`, LOG_LEVEL_INFO);
return;
}
const deleted = "deleted" in fileOnDB ? fileOnDB.deleted : false;
@ -559,8 +570,8 @@ export class HiddenFileSync extends LiveSyncCommands {
//@ts-ignore internalAPI
await app.vault.adapter.reconcileInternalFile(filename);
} catch (ex) {
Logger("Failed to call internal API(reconcileInternalFile)", LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger("Failed to call internal API(reconcileInternalFile)", LOG_LEVEL_VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
}
}
return true;
@ -572,8 +583,8 @@ export class HiddenFileSync extends LiveSyncCommands {
//@ts-ignore internalAPI
await app.vault.adapter.reconcileInternalFile(filename);
} catch (ex) {
Logger("Failed to call internal API(reconcileInternalFile)", LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger("Failed to call internal API(reconcileInternalFile)", LOG_LEVEL_VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
}
Logger(`STORAGE <-- DB:${filename}: written (hidden,new${force ? ", force" : ""})`);
return true;
@ -581,7 +592,7 @@ export class HiddenFileSync extends LiveSyncCommands {
const contentBin = await this.app.vault.adapter.readBinary(filename);
const content = await arrayBufferToBase64(contentBin);
if (content == fileOnDB.data && !force) {
// Logger(`STORAGE <-- DB:${filename}: skipped (hidden) Not changed`, LOG_LEVEL.VERBOSE);
// Logger(`STORAGE <-- DB:${filename}: skipped (hidden) Not changed`, LOG_LEVEL_VERBOSE);
return true;
}
await this.app.vault.adapter.writeBinary(filename, base64ToArrayBuffer(fileOnDB.data), { mtime: fileOnDB.mtime, ctime: fileOnDB.ctime });
@ -589,8 +600,8 @@ export class HiddenFileSync extends LiveSyncCommands {
//@ts-ignore internalAPI
await app.vault.adapter.reconcileInternalFile(filename);
} catch (ex) {
Logger("Failed to call internal API(reconcileInternalFile)", LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger("Failed to call internal API(reconcileInternalFile)", LOG_LEVEL_VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
}
Logger(`STORAGE <-- DB:${filename}: written (hidden, overwrite${force ? ", force" : ""})`);
return true;
@ -598,7 +609,7 @@ export class HiddenFileSync extends LiveSyncCommands {
}
} catch (ex) {
Logger(`STORAGE <-- DB:${filename}: written (hidden, overwrite${force ? ", force" : ""}) Failed`);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
return false;
}
});
@ -608,7 +619,7 @@ export class HiddenFileSync extends LiveSyncCommands {
showJSONMergeDialogAndMerge(docA: LoadedEntry, docB: LoadedEntry): Promise<boolean> {
return runWithLock("conflict:merge-data", false, () => new Promise((res) => {
Logger("Opening data-merging dialog", LOG_LEVEL.VERBOSE);
Logger("Opening data-merging dialog", LOG_LEVEL_VERBOSE);
const docs = [docA, docB];
const path = stripAllPrefixes(docA.path);
const modal = new JsonResolveModal(this.app, path, [docA, docB], async (keep, result) => {
@ -644,8 +655,8 @@ export class HiddenFileSync extends LiveSyncCommands {
//@ts-ignore internalAPI
await app.vault.adapter.reconcileInternalFile(filename);
} catch (ex) {
Logger("Failed to call internal API(reconcileInternalFile)", LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger("Failed to call internal API(reconcileInternalFile)", LOG_LEVEL_VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
}
Logger(`STORAGE <-- DB:${filename}: written (hidden,merged)`);
}
@ -656,7 +667,7 @@ export class HiddenFileSync extends LiveSyncCommands {
res(true);
} catch (ex) {
Logger("Could not merge conflicted json");
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
res(false);
}
});
@ -680,6 +691,9 @@ export class HiddenFileSync extends LiveSyncCommands {
const result: InternalFileInfo[] = [];
for (const f of files) {
const w = await f;
if (!await this.plugin.isIgnoredByIgnoreFiles(w.path)) {
continue
}
result.push({
...w,
...w.stat
@ -698,12 +712,18 @@ export class HiddenFileSync extends LiveSyncCommands {
) {
const w = await this.app.vault.adapter.list(path);
let files = [
const filesSrc = [
...w.files
.filter((e) => !ignoreList.some((ee) => e.endsWith(ee)))
.filter((e) => !filter || filter.some((ee) => e.match(ee)))
.filter((e) => !ignoreFilter || ignoreFilter.every((ee) => !e.match(ee))),
];
let files = [] as string[];
for (const file of filesSrc) {
if (!await this.plugin.isIgnoredByIgnoreFiles(file)) {
files.push(file);
}
}
L1: for (const v of w.folders) {
for (const ignore of ignoreList) {
@ -714,6 +734,9 @@ export class HiddenFileSync extends LiveSyncCommands {
if (ignoreFilter && ignoreFilter.some(e => v.match(e))) {
continue L1;
}
if (!await this.plugin.isIgnoredByIgnoreFiles(v)) {
continue L1;
}
files = files.concat(await this.getFiles(v, ignoreList, filter, ignoreFilter));
}
return files;

View File

@ -1,6 +1,6 @@
import { normalizePath, type PluginManifest } from "./deps";
import type { DocumentID, EntryDoc, FilePathWithPrefix, LoadedEntry } from "./lib/src/types";
import { LOG_LEVEL } from "./lib/src/types";
import { LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "./lib/src/types";
import { type PluginDataEntry, PERIODIC_PLUGIN_SWEEP, type PluginList, type DevicePluginList, PSCHeader, PSCHeaderEnd } from "./types";
import { getDocData, isDocContentSame } from "./lib/src/utils";
import { Logger } from "./lib/src/logger";
@ -79,7 +79,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
Logger("Scanning plugins done");
} catch (ex) {
Logger("Scanning plugins failed");
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
}
}
@ -148,7 +148,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
});
NewNotice(fragment, 10000);
} else {
Logger("Everything is up to date.", LOG_LEVEL.NOTICE);
Logger("Everything is up to date.", LOG_LEVEL_NOTICE);
}
}
@ -165,9 +165,9 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
specificPlugin = manifests.find(e => e.dir.endsWith("/" + specificPluginPath))?.id ?? "";
}
await runWithLock("sweepplugin", true, async () => {
const logLevel = showMessage ? LOG_LEVEL.NOTICE : LOG_LEVEL.INFO;
const logLevel = showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO;
if (!this.deviceAndVaultName) {
Logger("You have to set your device name.", LOG_LEVEL.NOTICE);
Logger("You have to set your device name.", LOG_LEVEL_NOTICE);
return;
}
Logger("Scanning plugins", logLevel);
@ -176,7 +176,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
endkey: `ps:${this.deviceAndVaultName}-${specificPlugin}\u{10ffff}`,
include_docs: true,
});
// Logger("OLD DOCS.", LOG_LEVEL.VERBOSE);
// Logger("OLD DOCS.", LOG_LEVEL_VERBOSE);
// sweep current plugin.
const procs = manifests.map(async (m) => {
const pluginDataEntryID = `ps:${this.deviceAndVaultName}-${m.id}` as DocumentID;
@ -184,7 +184,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
if (specificPlugin && m.id != specificPlugin) {
return;
}
Logger(`Reading plugin:${m.name}(${m.id})`, LOG_LEVEL.VERBOSE);
Logger(`Reading plugin:${m.name}(${m.id})`, LOG_LEVEL_VERBOSE);
const path = normalizePath(m.dir) + "/";
const adapter = this.app.vault.adapter;
const files = ["manifest.json", "main.js", "styles.css", "data.json"];
@ -222,7 +222,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
datatype: "plain",
type: "plain"
};
Logger(`check diff:${m.name}(${m.id})`, LOG_LEVEL.VERBOSE);
Logger(`check diff:${m.name}(${m.id})`, LOG_LEVEL_VERBOSE);
await runWithLock("plugin-" + m.id, false, async () => {
const old = await this.localDatabase.getDBEntry(p._id as string as FilePathWithPrefix /* This also should be explained */, null, false, false);
if (old !== false) {
@ -237,7 +237,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
Logger(`Plugin saved:${m.name}`, logLevel);
});
} catch (ex) {
Logger(`Plugin save failed:${m.name}`, LOG_LEVEL.NOTICE);
Logger(`Plugin save failed:${m.name}`, LOG_LEVEL_NOTICE);
} finally {
oldDocs.rows = oldDocs.rows.filter((e) => e.id != pluginDataEntryID);
}
@ -259,7 +259,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
}
return e.doc;
});
Logger(`Deleting old plugin:(${delDocs.length})`, LOG_LEVEL.VERBOSE);
Logger(`Deleting old plugin:(${delDocs.length})`, LOG_LEVEL_VERBOSE);
await this.localDatabase.bulkDocsRaw(delDocs);
Logger(`Scan plugin done.`, logLevel);
});
@ -274,15 +274,15 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
if (stat) {
// @ts-ignore
await this.app.plugins.unloadPlugin(plugin.manifest.id);
Logger(`Unload plugin:${plugin.manifest.id}`, LOG_LEVEL.NOTICE);
Logger(`Unload plugin:${plugin.manifest.id}`, LOG_LEVEL_NOTICE);
}
if (plugin.dataJson)
await adapter.write(pluginTargetFolderPath + "data.json", plugin.dataJson);
Logger("wrote:" + pluginTargetFolderPath + "data.json", LOG_LEVEL.NOTICE);
Logger("wrote:" + pluginTargetFolderPath + "data.json", LOG_LEVEL_NOTICE);
if (stat) {
// @ts-ignore
await this.app.plugins.loadPlugin(plugin.manifest.id);
Logger(`Load plugin:${plugin.manifest.id}`, LOG_LEVEL.NOTICE);
Logger(`Load plugin:${plugin.manifest.id}`, LOG_LEVEL_NOTICE);
}
});
}
@ -294,7 +294,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
if (stat) {
// @ts-ignore
await this.app.plugins.unloadPlugin(plugin.manifest.id);
Logger(`Unload plugin:${plugin.manifest.id}`, LOG_LEVEL.NOTICE);
Logger(`Unload plugin:${plugin.manifest.id}`, LOG_LEVEL_NOTICE);
}
const pluginTargetFolderPath = normalizePath(plugin.manifest.dir) + "/";
@ -309,7 +309,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
if (stat) {
// @ts-ignore
await this.app.plugins.loadPlugin(plugin.manifest.id);
Logger(`Load plugin:${plugin.manifest.id}`, LOG_LEVEL.NOTICE);
Logger(`Load plugin:${plugin.manifest.id}`, LOG_LEVEL_NOTICE);
}
});
}

View File

@ -1,4 +1,4 @@
import { type EntryDoc, type ObsidianLiveSyncSettings, LOG_LEVEL, DEFAULT_SETTINGS } from "./lib/src/types";
import { type EntryDoc, type ObsidianLiveSyncSettings, DEFAULT_SETTINGS, LOG_LEVEL_NOTICE } from "./lib/src/types";
import { configURIBase } from "./types";
import { Logger } from "./lib/src/logger";
import { PouchDB } from "./lib/src/pouchdb-browser.js";
@ -55,7 +55,7 @@ export class SetupLiveSync extends LiveSyncCommands {
const encryptedSetting = encodeURIComponent(await encrypt(JSON.stringify(setting), encryptingPassphrase, false));
const uri = `${configURIBase}${encryptedSetting}`;
await navigator.clipboard.writeText(uri);
Logger("Setup URI copied to clipboard", LOG_LEVEL.NOTICE);
Logger("Setup URI copied to clipboard", LOG_LEVEL_NOTICE);
}
async command_copySetupURIFull() {
const encryptingPassphrase = await askString(this.app, "Encrypt your settings", "The passphrase to encrypt the setup URI", "", true);
@ -65,14 +65,14 @@ export class SetupLiveSync extends LiveSyncCommands {
const encryptedSetting = encodeURIComponent(await encrypt(JSON.stringify(setting), encryptingPassphrase, false));
const uri = `${configURIBase}${encryptedSetting}`;
await navigator.clipboard.writeText(uri);
Logger("Setup URI copied to clipboard", LOG_LEVEL.NOTICE);
Logger("Setup URI copied to clipboard", LOG_LEVEL_NOTICE);
}
async command_openSetupURI() {
const setupURI = await askString(this.app, "Easy setup", "Set up URI", `${configURIBase}aaaaa`);
if (setupURI === false)
return;
if (!setupURI.startsWith(`${configURIBase}`)) {
Logger("Set up URI looks wrong.", LOG_LEVEL.NOTICE);
Logger("Set up URI looks wrong.", LOG_LEVEL_NOTICE);
return;
}
const config = decodeURIComponent(setupURI.substring(configURIBase.length));
@ -103,6 +103,11 @@ export class SetupLiveSync extends LiveSyncCommands {
const setupManually = "Leave everything to me";
newSettingW.syncInternalFiles = false;
newSettingW.usePluginSync = false;
// Migrate completely obsoleted configuration.
if (!newSettingW.useIndexedDBAdapter) {
newSettingW.useIndexedDBAdapter = true;
}
const setupType = await askSelectString(this.app, "How would you like to set it up?", [setupAsNew, setupAgain, setupJustImport, setupManually]);
if (setupType == setupJustImport) {
this.plugin.settings = newSettingW;
@ -135,13 +140,13 @@ export class SetupLiveSync extends LiveSyncCommands {
await this.plugin.replicate(true);
await this.plugin.markRemoteUnlocked();
}
Logger("Configuration loaded.", LOG_LEVEL.NOTICE);
Logger("Configuration loaded.", LOG_LEVEL_NOTICE);
return;
}
if (keepLocalDB == "no" && keepRemoteDB == "no") {
const reset = await askYesNo(this.app, "Drop everything?");
if (reset != "yes") {
Logger("Cancelled", LOG_LEVEL.NOTICE);
Logger("Cancelled", LOG_LEVEL_NOTICE);
this.plugin.settings = oldConf;
return;
}
@ -176,17 +181,17 @@ export class SetupLiveSync extends LiveSyncCommands {
}
}
Logger("Configuration loaded.", LOG_LEVEL.NOTICE);
Logger("Configuration loaded.", LOG_LEVEL_NOTICE);
} else {
Logger("Cancelled.", LOG_LEVEL.NOTICE);
Logger("Cancelled.", LOG_LEVEL_NOTICE);
}
} catch (ex) {
Logger("Couldn't parse or decrypt configuration uri.", LOG_LEVEL.NOTICE);
Logger("Couldn't parse or decrypt configuration uri.", LOG_LEVEL_NOTICE);
}
}
suspendExtraSync() {
Logger("Hidden files and plugin synchronization have been temporarily disabled. Please enable them after the fetching, if you need them.", LOG_LEVEL.NOTICE)
Logger("Hidden files and plugin synchronization have been temporarily disabled. Please enable them after the fetching, if you need them.", LOG_LEVEL_NOTICE)
this.plugin.settings.syncInternalFiles = false;
this.plugin.settings.usePluginSync = false;
this.plugin.settings.autoSweepPlugins = false;
@ -231,7 +236,7 @@ Of course, we are able to disable these features.`
return;
}
if (mode != "CUSTOMIZE") {
Logger("Gathering files for enabling Hidden File Sync", LOG_LEVEL.NOTICE);
Logger("Gathering files for enabling Hidden File Sync", LOG_LEVEL_NOTICE);
if (mode == "FETCH") {
await this.plugin.addOnHiddenFileSync.syncInternalFilesAndDatabase("pullForce", true);
} else if (mode == "OVERWRITE") {
@ -241,7 +246,7 @@ Of course, we are able to disable these features.`
}
this.plugin.settings.syncInternalFiles = true;
await this.plugin.saveSettings();
Logger(`Done! Restarting the app is strongly recommended!`, LOG_LEVEL.NOTICE);
Logger(`Done! Restarting the app is strongly recommended!`, LOG_LEVEL_NOTICE);
} else if (mode == "CUSTOMIZE") {
if (!this.plugin.deviceAndVaultName) {
let name = await askString(this.app, "Device name", "Please set this device name", `desktop`);
@ -287,14 +292,14 @@ Of course, we are able to disable these features.`
}
async suspendReflectingDatabase() {
if (this.plugin.settings.doNotSuspendOnFetching) return;
Logger(`Suspending reflection: Database and storage changes will not be reflected in each other until completely finished the fetching.`, LOG_LEVEL.NOTICE);
Logger(`Suspending reflection: Database and storage changes will not be reflected in each other until completely finished the fetching.`, LOG_LEVEL_NOTICE);
this.plugin.settings.suspendParseReplicationResult = true;
this.plugin.settings.suspendFileWatching = true;
await this.plugin.saveSettings();
}
async resumeReflectingDatabase() {
if (this.plugin.settings.doNotSuspendOnFetching) return;
Logger(`Database and storage reflection has been resumed!`, LOG_LEVEL.NOTICE);
Logger(`Database and storage reflection has been resumed!`, LOG_LEVEL_NOTICE);
this.plugin.settings.suspendParseReplicationResult = false;
this.plugin.settings.suspendFileWatching = false;
await this.plugin.saveSettings();
@ -320,14 +325,14 @@ Of course, we are able to disable these features.`
}
async fetchRemoteChunks() {
if (!this.plugin.settings.doNotSuspendOnFetching && this.plugin.settings.readChunksOnline) {
Logger(`Fetching chunks`, LOG_LEVEL.NOTICE);
Logger(`Fetching chunks`, LOG_LEVEL_NOTICE);
const remoteDB = await this.plugin.getReplicator().connectRemoteCouchDBWithSetting(this.settings, this.plugin.getIsMobile(), true);
if (typeof remoteDB == "string") {
Logger(remoteDB, LOG_LEVEL.NOTICE);
Logger(remoteDB, LOG_LEVEL_NOTICE);
} else {
await fetchAllUsedChunks(this.localDatabase.localDatabase, remoteDB.db);
}
Logger(`Fetching chunks done`, LOG_LEVEL.NOTICE);
Logger(`Fetching chunks done`, LOG_LEVEL_NOTICE);
}
}
async fetchLocal() {

View File

@ -1,6 +1,6 @@
import { App, Modal } from "./deps";
import { DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT } from "diff-match-patch";
import { diff_result } from "./lib/src/types";
import { type diff_result } from "./lib/src/types";
import { escapeStringToHTML } from "./lib/src/strbin";
export class ConflictResolveModal extends Modal {

View File

@ -2,7 +2,7 @@ import { TFile, Modal, App, DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT, diff_match_pat
import { getPathFromTFile, isValidPath } from "./utils";
import { base64ToArrayBuffer, base64ToString, escapeStringToHTML } from "./lib/src/strbin";
import ObsidianLiveSyncPlugin from "./main";
import { type DocumentID, type FilePathWithPrefix, type LoadedEntry, LOG_LEVEL } from "./lib/src/types";
import { type DocumentID, type FilePathWithPrefix, type LoadedEntry, LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "./lib/src/types";
import { Logger } from "./lib/src/logger";
import { isErrorOfMissingDoc } from "./lib/src/utils_couchdb";
import { getDocData } from "./lib/src/utils";
@ -60,7 +60,7 @@ export class DocumentHistoryModal extends Modal {
this.contentView.setText(`History of this file was not recorded.`);
} else {
this.contentView.setText(`Error occurred.`);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
}
}
}
@ -178,7 +178,7 @@ export class DocumentHistoryModal extends Modal {
e.addClass("mod-cta");
e.addEventListener("click", async () => {
await navigator.clipboard.writeText(this.currentText);
Logger(`Old content copied to clipboard`, LOG_LEVEL.NOTICE);
Logger(`Old content copied to clipboard`, LOG_LEVEL_NOTICE);
});
});
async function focusFile(path: string) {
@ -189,7 +189,7 @@ export class DocumentHistoryModal extends Modal {
const leaf = app.workspace.getLeaf(false);
await leaf.openFile(targetFile);
} else {
Logger("The file could not view on the editor", LOG_LEVEL.NOTICE)
Logger("The file could not view on the editor", LOG_LEVEL_NOTICE)
}
}
buttons.createEl("button", { text: "Back to this revision" }, (e) => {
@ -198,7 +198,7 @@ export class DocumentHistoryModal extends Modal {
// const pathToWrite = this.plugin.id2path(this.id, true);
const pathToWrite = stripPrefix(this.file);
if (!isValidPath(pathToWrite)) {
Logger("Path is not valid to write content.", LOG_LEVEL.INFO);
Logger("Path is not valid to write content.", LOG_LEVEL_INFO);
}
if (this.currentDoc?.datatype == "plain") {
await this.app.vault.adapter.write(pathToWrite, getDocData(this.currentDoc.data));
@ -210,7 +210,7 @@ export class DocumentHistoryModal extends Modal {
this.close();
} else {
Logger(`Could not parse entry`, LOG_LEVEL.NOTICE);
Logger(`Could not parse entry`, LOG_LEVEL_NOTICE);
}
});
});

View File

@ -32,6 +32,7 @@ export class GlobalHistoryView extends ItemView {
return "Vault history";
}
// eslint-disable-next-line require-await
async onOpen() {
this.component = new GlobalHistoryComponent({
target: this.contentEl,
@ -41,6 +42,7 @@ export class GlobalHistoryView extends ItemView {
});
}
// eslint-disable-next-line require-await
async onClose() {
this.component.$destroy();
}

View File

@ -1,5 +1,5 @@
import { App, Modal } from "./deps";
import { FilePath, LoadedEntry } from "./lib/src/types";
import { type FilePath, type LoadedEntry } from "./lib/src/types";
import JsonResolvePane from "./JsonResolvePane.svelte";
export class JsonResolveModal extends Modal {
@ -41,7 +41,7 @@ export class JsonResolveModal extends Modal {
nameA: this.nameA,
nameB: this.nameB,
defaultSelect: this.defaultSelect,
callback: (keepRev, mergedStr) => this.UICallback(keepRev, mergedStr),
callback: (keepRev: string, mergedStr: string) => this.UICallback(keepRev, mergedStr),
},
});
}

View File

@ -1,4 +1,4 @@
import { deleteDB, IDBPDatabase, openDB } from "idb";
import { deleteDB, type IDBPDatabase, openDB } from "idb";
export interface KeyValueDatabase {
get<T>(key: string): Promise<T>;
set<T>(key: string, value: T): Promise<IDBValidKey>;

View File

@ -1,4 +1,4 @@
import { AnyEntry, DocumentID, EntryDoc, EntryHasPath, FilePath, FilePathWithPrefix } from "./lib/src/types";
import { type AnyEntry, type DocumentID, type EntryDoc, type EntryHasPath, type FilePath, type FilePathWithPrefix } from "./lib/src/types";
import { PouchDB } from "./lib/src/pouchdb-browser.js";
import type ObsidianLiveSyncPlugin from "./main";

View File

@ -5,7 +5,7 @@ import {
import LogPaneComponent from "./LogPane.svelte";
import type ObsidianLiveSyncPlugin from "./main";
export const VIEW_TYPE_LOG = "log-log";
// Show notes as like scroll.
//Log view
export class LogPaneView extends ItemView {
component: LogPaneComponent;
@ -32,6 +32,7 @@ export class LogPaneView extends ItemView {
return "Self-hosted LiveSync Log";
}
// eslint-disable-next-line require-await
async onOpen() {
this.component = new LogPaneComponent({
target: this.contentEl,
@ -40,6 +41,7 @@ export class LogPaneView extends ItemView {
});
}
// eslint-disable-next-line require-await
async onClose() {
this.component.$destroy();
}

View File

@ -1,5 +1,5 @@
import { App, PluginSettingTab, Setting, sanitizeHTMLToDom, TextAreaComponent, MarkdownRenderer, stringifyYaml } from "./deps";
import { DEFAULT_SETTINGS, LOG_LEVEL, type ObsidianLiveSyncSettings, type ConfigPassphraseStore, type RemoteDBSettings, type FilePathWithPrefix, type HashAlgorithm, type DocumentID } from "./lib/src/types";
import { DEFAULT_SETTINGS, type ObsidianLiveSyncSettings, type ConfigPassphraseStore, type RemoteDBSettings, type FilePathWithPrefix, type HashAlgorithm, type DocumentID, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "./lib/src/types";
import { delay } from "./lib/src/utils";
import { Semaphore } from "./lib/src/semaphore";
import { versionNumberString2Number } from "./lib/src/strbin";
@ -21,10 +21,10 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
async testConnection(): Promise<void> {
const db = await this.plugin.replicator.connectRemoteCouchDBWithSetting(this.plugin.settings, this.plugin.isMobile, true);
if (typeof db === "string") {
this.plugin.addLog(`could not connect to ${this.plugin.settings.couchDB_URI} : ${this.plugin.settings.couchDB_DBNAME} \n(${db})`, LOG_LEVEL.NOTICE);
this.plugin.addLog(`could not connect to ${this.plugin.settings.couchDB_URI} : ${this.plugin.settings.couchDB_DBNAME} \n(${db})`, LOG_LEVEL_NOTICE);
return;
}
this.plugin.addLog(`Connected to ${db.info.db_name}`, LOG_LEVEL.NOTICE);
this.plugin.addLog(`Connected to ${db.info.db_name}`, LOG_LEVEL_NOTICE);
}
display(): void {
const { containerEl } = this;
@ -111,7 +111,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
}
MarkdownRenderer.renderMarkdown(updateInformation, informationDivEl, "/", this.plugin);
MarkdownRenderer.render(this.plugin.app, updateInformation, informationDivEl, "/", this.plugin);
addScreenElement("100", containerInformationEl);
@ -133,12 +133,13 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
new Setting(setupWizardEl)
.setName("Discard the existing configuration and set up")
.addButton((text) => {
// eslint-disable-next-line require-await
text.setButtonText("Next").onClick(async () => {
if (JSON.stringify(this.plugin.settings) != JSON.stringify(DEFAULT_SETTINGS)) {
this.plugin.replicator.closeReplication();
this.plugin.settings = { ...DEFAULT_SETTINGS };
this.plugin.saveSettings();
Logger("Configuration has been flushed, please open it again", LOG_LEVEL.NOTICE)
Logger("Configuration has been flushed, please open it again", LOG_LEVEL_NOTICE)
// @ts-ignore
this.plugin.app.setting.close()
} else {
@ -302,7 +303,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
const checkConfig = async () => {
try {
if (isCloudantURI(this.plugin.settings.couchDB_URI)) {
Logger("This feature cannot be used with IBM Cloudant.", LOG_LEVEL.NOTICE);
Logger("This feature cannot be used with IBM Cloudant.", LOG_LEVEL_NOTICE);
return;
}
@ -334,11 +335,11 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
const res = await requestToCouchDB(this.plugin.settings.couchDB_URI, this.plugin.settings.couchDB_USER, this.plugin.settings.couchDB_PASSWORD, undefined, key, value);
console.dir(res);
if (res.status == 200) {
Logger(`${title} successfully updated`, LOG_LEVEL.NOTICE);
Logger(`${title} successfully updated`, LOG_LEVEL_NOTICE);
checkResultDiv.removeChild(x);
checkConfig();
} else {
Logger(`${title} failed`, LOG_LEVEL.NOTICE);
Logger(`${title} failed`, LOG_LEVEL_NOTICE);
Logger(res.text);
}
});
@ -451,7 +452,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
addResult("--Done--", ["ob-btn-config-head"]);
addResult("If you have some trouble with Connection-check even though all Config-check has been passed, Please check your reverse proxy's configuration.", ["ob-btn-config-info"]);
} catch (ex) {
Logger(`Checking configuration failed`, LOG_LEVEL.NOTICE);
Logger(`Checking configuration failed`, LOG_LEVEL_NOTICE);
Logger(ex);
}
};
@ -609,25 +610,25 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
console.dir(settingForCheck);
const db = await this.plugin.replicator.connectRemoteCouchDBWithSetting(settingForCheck, this.plugin.isMobile, true);
if (typeof db === "string") {
Logger("Could not connect to the database.", LOG_LEVEL.NOTICE);
Logger("Could not connect to the database.", LOG_LEVEL_NOTICE);
return false;
} else {
if (await checkSyncInfo(db.db)) {
// Logger("Database connected", LOG_LEVEL.NOTICE);
// Logger("Database connected", LOG_LEVEL_NOTICE);
return true;
} else {
Logger("Failed to read remote database", LOG_LEVEL.NOTICE);
Logger("Failed to read remote database", LOG_LEVEL_NOTICE);
return false;
}
}
};
const applyEncryption = async (sendToServer: boolean) => {
if (encrypt && passphrase == "") {
Logger("If you enable encryption, you have to set the passphrase", LOG_LEVEL.NOTICE);
Logger("If you enable encryption, you have to set the passphrase", LOG_LEVEL_NOTICE);
return;
}
if (encrypt && !(await testCrypt())) {
Logger("WARNING! Your device would not support encryption.", LOG_LEVEL.NOTICE);
Logger("WARNING! Your device would not support encryption.", LOG_LEVEL_NOTICE);
return;
}
if (!(await checkWorkingPassphrase()) && !sendToServer) {
@ -654,11 +655,11 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
const rebuildDB = async (method: "localOnly" | "remoteOnly" | "rebuildBothByThisDevice") => {
if (encrypt && passphrase == "") {
Logger("If you enable encryption, you have to set the passphrase", LOG_LEVEL.NOTICE);
Logger("If you enable encryption, you have to set the passphrase", LOG_LEVEL_NOTICE);
return;
}
if (encrypt && !(await testCrypt())) {
Logger("WARNING! Your device would not support encryption.", LOG_LEVEL.NOTICE);
Logger("WARNING! Your device would not support encryption.", LOG_LEVEL_NOTICE);
return;
}
if (!encrypt) {
@ -670,7 +671,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
this.plugin.settings.passphrase = passphrase;
this.plugin.settings.useDynamicIterationCount = useDynamicIterationCount;
this.plugin.settings.usePathObfuscation = usePathObfuscation;
Logger("All synchronization have been temporarily disabled. Please enable them after the fetching, if you need them.", LOG_LEVEL.NOTICE)
Logger("All synchronization have been temporarily disabled. Please enable them after the fetching, if you need them.", LOG_LEVEL_NOTICE)
await this.plugin.saveSettings();
updateE2EControls();
applyDisplayEnabled();
@ -882,7 +883,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
.setCta()
.onClick(async () => {
if (currentPreset == "") {
Logger("Select any preset.", LOG_LEVEL.NOTICE);
Logger("Select any preset.", LOG_LEVEL_NOTICE);
return;
}
const presetAllDisabled = {
@ -913,15 +914,15 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
...this.plugin.settings,
...presetLiveSync
}
Logger("Synchronization setting configured as LiveSync.", LOG_LEVEL.NOTICE);
Logger("Synchronization setting configured as LiveSync.", LOG_LEVEL_NOTICE);
} else if (currentPreset == "PERIODIC") {
this.plugin.settings = {
...this.plugin.settings,
...presetPeriodic
}
Logger("Synchronization setting configured as Periodic sync with batch database update.", LOG_LEVEL.NOTICE);
Logger("Synchronization setting configured as Periodic sync with batch database update.", LOG_LEVEL_NOTICE);
} else {
Logger("All synchronization disabled.", LOG_LEVEL.NOTICE);
Logger("All synchronization disabled.", LOG_LEVEL_NOTICE);
this.plugin.settings = {
...this.plugin.settings,
...presetAllDisabled
@ -943,7 +944,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
}
await this.plugin.replicate(true);
Logger("All done! Please set up subsequent devices with 'Copy setup URI' and 'Open setup URI'.", LOG_LEVEL.NOTICE);
Logger("All done! Please set up subsequent devices with 'Copy setup URI' and 'Open setup URI'.", LOG_LEVEL_NOTICE);
// @ts-ignore
this.plugin.app.commands.executeCommandById("obsidian-livesync:livesync-copysetupuri")
}
@ -1330,7 +1331,38 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
return text;
}
);
new Setting(containerSyncSettingEl)
.setName("(Beta) Use ignore files")
.setDesc("If this is set, changes to local files which are matched by the ignore files will be skipped. Remote changes are determined using local ignore files.")
.setClass("wizardHidden")
.addToggle((toggle) => {
toggle
.setValue(this.plugin.settings.useIgnoreFiles)
.onChange(async (value) => {
this.plugin.settings.useIgnoreFiles = value;
await this.plugin.saveSettings();
this.display();
})
return toggle;
}
);
if (this.plugin.settings.useIgnoreFiles) {
new Setting(containerSyncSettingEl)
.setName("Ignore files")
.setDesc("We can use multiple ignore files, e.g.) `.gitignore, .dockerignore`")
.setClass("wizardHidden")
.addTextArea((text) => {
text
.setValue(this.plugin.settings.ignoreFiles)
.setPlaceholder(".gitignore, .dockerignore")
.onChange(async (value) => {
this.plugin.settings.ignoreFiles = value;
await this.plugin.saveSettings();
})
return text;
}
);
}
containerSyncSettingEl.createEl("h4", {
text: sanitizeHTMLToDom(`Advanced settings`),
}).addClass("wizardHidden");
@ -1468,7 +1500,7 @@ ${stringifyYaml(responseConfig)}
${stringifyYaml(pluginConfig)}`;
console.log(msgConfig);
await navigator.clipboard.writeText(msgConfig);
Logger(`Information has been copied to clipboard`, LOG_LEVEL.NOTICE);
Logger(`Information has been copied to clipboard`, LOG_LEVEL_NOTICE);
})
);
@ -1521,11 +1553,11 @@ ${stringifyYaml(pluginConfig)}`;
Logger(`UPDATE DATABASE ${file.path}`);
await this.plugin.updateIntoDB(file, false, null, true);
i++;
Logger(`${i}/${files.length}\n${file.path}`, LOG_LEVEL.NOTICE, "verify");
Logger(`${i}/${files.length}\n${file.path}`, LOG_LEVEL_NOTICE, "verify");
} catch (ex) {
i++;
Logger(`Error while verifyAndRepair`, LOG_LEVEL.NOTICE);
Logger(`Error while verifyAndRepair`, LOG_LEVEL_NOTICE);
Logger(ex);
} finally {
releaser();
@ -1533,7 +1565,7 @@ ${stringifyYaml(pluginConfig)}`;
}
)(e));
await Promise.all(processes);
Logger("done", LOG_LEVEL.NOTICE, "verify");
Logger("done", LOG_LEVEL_NOTICE, "verify");
})
);
new Setting(containerHatchEl)
@ -1573,35 +1605,35 @@ ${stringifyYaml(pluginConfig)}`;
}
const ret = await this.plugin.localDatabase.putRaw(newDoc, { force: true });
if (ret.ok) {
Logger(`${docName} has been converted as conflicted document`, LOG_LEVEL.NOTICE);
Logger(`${docName} has been converted as conflicted document`, LOG_LEVEL_NOTICE);
doc._deleted = true;
if ((await this.plugin.localDatabase.putRaw(doc)).ok) {
Logger(`Old ${docName} has been deleted`, LOG_LEVEL.NOTICE);
Logger(`Old ${docName} has been deleted`, LOG_LEVEL_NOTICE);
}
await this.plugin.showIfConflicted(docName as FilePathWithPrefix);
} else {
Logger(`Converting ${docName} Failed!`, LOG_LEVEL.NOTICE);
Logger(ret, LOG_LEVEL.VERBOSE);
Logger(`Converting ${docName} Failed!`, LOG_LEVEL_NOTICE);
Logger(ret, LOG_LEVEL_VERBOSE);
}
} catch (ex) {
if (ex?.status == 404) {
// We can perform this safely
if ((await this.plugin.localDatabase.putRaw(newDoc)).ok) {
Logger(`${docName} has been converted`, LOG_LEVEL.NOTICE);
Logger(`${docName} has been converted`, LOG_LEVEL_NOTICE);
doc._deleted = true;
if ((await this.plugin.localDatabase.putRaw(doc)).ok) {
Logger(`Old ${docName} has been deleted`, LOG_LEVEL.NOTICE);
Logger(`Old ${docName} has been deleted`, LOG_LEVEL_NOTICE);
}
}
} else {
Logger(`Something went wrong on converting ${docName}`, LOG_LEVEL.NOTICE);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(`Something went wrong on converting ${docName}`, LOG_LEVEL_NOTICE);
Logger(ex, LOG_LEVEL_VERBOSE);
// Something wrong.
}
}
}
}
Logger(`Converting finished`, LOG_LEVEL.NOTICE);
Logger(`Converting finished`, LOG_LEVEL_NOTICE);
}));
new Setting(containerHatchEl)
.setName("Suspend file watching")
@ -1727,12 +1759,12 @@ ${stringifyYaml(pluginConfig)}`;
button.setButtonText("Change")
.onClick(async () => {
if (this.plugin.settings.additionalSuffixOfDatabaseName == newDatabaseName) {
Logger("Suffix was not changed.", LOG_LEVEL.NOTICE);
Logger("Suffix was not changed.", LOG_LEVEL_NOTICE);
return;
}
this.plugin.settings.additionalSuffixOfDatabaseName = newDatabaseName;
await this.plugin.saveSettings();
Logger("Suffix has been changed. Reopening database...", LOG_LEVEL.NOTICE);
Logger("Suffix has been changed. Reopening database...", LOG_LEVEL_NOTICE);
await this.plugin.initializeDatabase();
})
})
@ -1786,11 +1818,11 @@ ${stringifyYaml(pluginConfig)}`;
vaultName.setDisabled(this.plugin.settings.usePluginSync);
// vaultName.setTooltip(this.plugin.settings.autoSweepPlugins || this.plugin.settings.autoSweepPluginsPeriodic ? "You could not change when you enabling auto scan." : "");
};
updateDisabledOfDeviceAndVaultName
updateDisabledOfDeviceAndVaultName();
new Setting(containerPluginSettings).setName("Enable customization sync").addToggle((toggle) =>
toggle.setValue(this.plugin.settings.usePluginSync).onChange(async (value) => {
if (value && this.plugin.deviceAndVaultName.trim() == "") {
Logger("We have to configure `Device name` to use this feature.", LOG_LEVEL.NOTICE);
Logger("We have to configure `Device name` to use this feature.", LOG_LEVEL_NOTICE);
toggle.setValue(false);
return false;
}

View File

@ -2,7 +2,7 @@
import type { PluginDataExDisplay } from "./CmdConfigSync";
import { Logger } from "./lib/src/logger";
import { versionNumberString2Number } from "./lib/src/strbin";
import { type FilePath, LOG_LEVEL } from "./lib/src/types";
import { type FilePath, LOG_LEVEL_NOTICE } from "./lib/src/types";
import { getDocData } from "./lib/src/utils";
import type ObsidianLiveSyncPlugin from "./main";
import { askString, scheduleTask } from "./utils";
@ -229,7 +229,7 @@
const duplicateTermName = await askString(plugin.app, "Duplicate", "device name", "");
if (duplicateTermName) {
if (duplicateTermName.contains("/")) {
Logger(`We can not use "/" to the device name`, LOG_LEVEL.NOTICE);
Logger(`We can not use "/" to the device name`, LOG_LEVEL_NOTICE);
return;
}
const key = `${plugin.app.vault.configDir}/${local.files[0].filename}`;

View File

@ -1,8 +1,8 @@
import { Plugin_2, TAbstractFile, TFile, TFolder } from "./deps";
import { Plugin, TAbstractFile, TFile, TFolder } from "./deps";
import { isPlainText, shouldBeIgnored } from "./lib/src/path";
import { getGlobalStore } from "./lib/src/store";
import { FilePath, ObsidianLiveSyncSettings } from "./lib/src/types";
import { FileEventItem, FileEventType, FileInfo, InternalFileInfo, queueItem } from "./types";
import { type FilePath, type ObsidianLiveSyncSettings } from "./lib/src/types";
import { type FileEventItem, type FileEventType, type FileInfo, type InternalFileInfo, type queueItem } from "./types";
import { recentlyTouched } from "./utils";
@ -12,12 +12,13 @@ export abstract class StorageEventManager {
abstract getQueueLength(): number;
}
type LiveSyncForStorageEventManager = Plugin_2 &
type LiveSyncForStorageEventManager = Plugin &
{
settings: ObsidianLiveSyncSettings
ignoreFiles: string[],
} & {
isTargetFile: (file: string | TAbstractFile) => boolean,
procFileEvent: (applyBatch?: boolean) => Promise<boolean>
isTargetFile: (file: string | TAbstractFile) => Promise<boolean>,
procFileEvent: (applyBatch?: boolean) => Promise<boolean>,
};
@ -35,12 +36,12 @@ export class StorageEventManagerObsidian extends StorageEventManager {
this.watchVaultDelete = this.watchVaultDelete.bind(this);
this.watchVaultRename = this.watchVaultRename.bind(this);
this.watchVaultRawEvents = this.watchVaultRawEvents.bind(this);
plugin.registerEvent(app.vault.on("modify", this.watchVaultChange));
plugin.registerEvent(app.vault.on("delete", this.watchVaultDelete));
plugin.registerEvent(app.vault.on("rename", this.watchVaultRename));
plugin.registerEvent(app.vault.on("create", this.watchVaultCreate));
plugin.registerEvent(plugin.app.vault.on("modify", this.watchVaultChange));
plugin.registerEvent(plugin.app.vault.on("delete", this.watchVaultDelete));
plugin.registerEvent(plugin.app.vault.on("rename", this.watchVaultRename));
plugin.registerEvent(plugin.app.vault.on("create", this.watchVaultCreate));
//@ts-ignore : Internal API
plugin.registerEvent(app.vault.on("raw", this.watchVaultRawEvents));
plugin.registerEvent(plugin.app.vault.on("raw", this.watchVaultRawEvents));
}
watchVaultCreate(file: TAbstractFile, ctx?: any) {
@ -64,9 +65,18 @@ export class StorageEventManagerObsidian extends StorageEventManager {
}
// Watch raw events (Internal API)
watchVaultRawEvents(path: FilePath) {
if (this.plugin.settings.useIgnoreFiles && this.plugin.ignoreFiles.some(e => path.endsWith(e.trim()))) {
// If it is one of ignore files, refresh the cached one.
this.plugin.isTargetFile(path).then(() => this._watchVaultRawEvents(path));
} else {
this._watchVaultRawEvents(path);
}
}
_watchVaultRawEvents(path: FilePath) {
if (!this.plugin.settings.syncInternalFiles && !this.plugin.settings.usePluginSync) return;
if (!this.plugin.settings.watchInternalFileChanges) return;
if (!path.startsWith(app.vault.configDir)) return;
if (!path.startsWith(this.plugin.app.vault.configDir)) return;
const ignorePatterns = this.plugin.settings.syncInternalFilesIgnorePatterns
.replace(/\n| /g, "")
.split(",").filter(e => e).map(e => new RegExp(e, "i"));
@ -77,7 +87,6 @@ export class StorageEventManagerObsidian extends StorageEventManager {
file: { path, mtime: 0, ctime: 0, size: 0 }
}], null);
}
// Cache file and waiting to can be proceed.
async appendWatchEvent(params: { type: FileEventType, file: TAbstractFile | InternalFileInfo, oldPath?: string }[], ctx?: any) {
let forcePerform = false;
@ -90,7 +99,7 @@ export class StorageEventManagerObsidian extends StorageEventManager {
const file = param.file;
const oldPath = param.oldPath;
if (file instanceof TFolder) continue;
if (!this.plugin.isTargetFile(file.path)) continue;
if (!await this.plugin.isTargetFile(file.path)) continue;
if (this.plugin.settings.suspendFileWatching) continue;
let cache: null | string | ArrayBuffer;
@ -100,11 +109,11 @@ export class StorageEventManagerObsidian extends StorageEventManager {
continue;
}
if (!isPlainText(file.name)) {
cache = await app.vault.readBinary(file);
cache = await this.plugin.app.vault.readBinary(file);
} else {
// cache = await this.app.vault.read(file);
cache = await app.vault.cachedRead(file);
if (!cache) cache = await app.vault.read(file);
cache = await this.plugin.app.vault.cachedRead(file);
if (!cache) cache = await this.plugin.app.vault.read(file);
}
}
if (type == "DELETE" || type == "RENAME") {

View File

@ -1,7 +1,7 @@
import { type FilePath } from "./lib/src/types";
export {
addIcon, App, debounce, Editor, FuzzySuggestModal, MarkdownRenderer, MarkdownView, Modal, Notice, Platform, Plugin, PluginSettingTab, Plugin_2, requestUrl, sanitizeHTMLToDom, Setting, stringifyYaml, TAbstractFile, TextAreaComponent, TFile, TFolder,
addIcon, App, debounce, Editor, FuzzySuggestModal, MarkdownRenderer, MarkdownView, Modal, Notice, Platform, Plugin, PluginSettingTab, requestUrl, sanitizeHTMLToDom, Setting, stringifyYaml, TAbstractFile, TextAreaComponent, TFile, TFolder,
parseYaml, ItemView, WorkspaceLeaf
} from "obsidian";
export type { DataWriteOptions, PluginManifest, RequestUrlParam, RequestUrlResponse } from "obsidian";

View File

@ -183,8 +183,9 @@ export class MessageBox extends Modal {
})
contentEl.createEl("h1", { text: this.title });
const div = contentEl.createDiv();
MarkdownRenderer.renderMarkdown(this.contentMd, div, "/", this.plugin);
MarkdownRenderer.render(this.plugin.app, this.contentMd, div, "/", this.plugin);
const buttonSetting = new Setting(contentEl);
buttonSetting.controlEl.style.flexWrap = "wrap";
for (const button of this.buttons) {
buttonSetting.addButton((btn) => {
btn

@ -1 +1 @@
Subproject commit 1a5cac6d6539ec06ed8b544d67e7baa2260d306d
Subproject commit 6efd115e0e72cfdb775ae452a3ee1eb4798eed77

View File

@ -2,7 +2,7 @@ const isDebug = false;
import { type Diff, DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT, diff_match_patch } from "./deps";
import { debounce, Notice, Plugin, TFile, addIcon, TFolder, normalizePath, TAbstractFile, Editor, MarkdownView, type RequestUrlParam, type RequestUrlResponse, requestUrl } from "./deps";
import { type EntryDoc, type LoadedEntry, type ObsidianLiveSyncSettings, type diff_check_result, type diff_result_leaf, type EntryBody, LOG_LEVEL, VER, DEFAULT_SETTINGS, type diff_result, FLAGMD_REDFLAG, SYNCINFO_ID, SALT_OF_PASSPHRASE, type ConfigPassphraseStore, type CouchDBConnection, FLAGMD_REDFLAG2, FLAGMD_REDFLAG3, PREFIXMD_LOGFILE, type DatabaseConnectingStatus, type EntryHasPath, type DocumentID, type FilePathWithPrefix, type FilePath, type AnyEntry } from "./lib/src/types";
import { type EntryDoc, type LoadedEntry, type ObsidianLiveSyncSettings, type diff_check_result, type diff_result_leaf, type EntryBody, LOG_LEVEL, VER, DEFAULT_SETTINGS, type diff_result, FLAGMD_REDFLAG, SYNCINFO_ID, SALT_OF_PASSPHRASE, type ConfigPassphraseStore, type CouchDBConnection, FLAGMD_REDFLAG2, FLAGMD_REDFLAG3, PREFIXMD_LOGFILE, type DatabaseConnectingStatus, type EntryHasPath, type DocumentID, type FilePathWithPrefix, type FilePath, type AnyEntry, LOG_LEVEL_DEBUG, LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_URGENT, LOG_LEVEL_VERBOSE } from "./lib/src/types";
import { type InternalFileInfo, type queueItem, type CacheData, type FileEventItem, FileWatchEventQueueMax } from "./types";
import { arrayToChunkedArray, getDocData, isDocContentSame } from "./lib/src/utils";
import { Logger, setGlobalLogFunction } from "./lib/src/logger";
@ -17,7 +17,7 @@ import { getGlobalStore, ObservableStore, observeStores } from "./lib/src/store"
import { lockStore, logMessageStore, logStore, type LogEntry } from "./lib/src/stores";
import { setNoticeClass } from "./lib/src/wrapper";
import { base64ToString, versionNumberString2Number, base64ToArrayBuffer, arrayBufferToBase64 } from "./lib/src/strbin";
import { addPrefix, isPlainText, shouldBeIgnored, stripAllPrefixes } from "./lib/src/path";
import { addPrefix, isAcceptedAll, isPlainText, shouldBeIgnored, stripAllPrefixes } from "./lib/src/path";
import { isLockAcquired, runWithLock } from "./lib/src/lock";
import { Semaphore } from "./lib/src/semaphore";
import { StorageEventManager, StorageEventManagerObsidian } from "./StorageEventManager";
@ -32,6 +32,7 @@ import { confirmWithMessage } from "./dialogs";
import { GlobalHistoryView, VIEW_TYPE_GLOBAL_HISTORY } from "./GlobalHistoryView";
import { LogPaneView, VIEW_TYPE_LOG } from "./LogPaneView";
import { mapAllTasksWithConcurrencyLimit, processAllTasksWithConcurrencyLimit } from "./lib/src/task";
import { LRUCache } from "./lib/src/LRUCache";
setNoticeClass(Notice);
@ -124,7 +125,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
// over 10MB
if (isCloudantURI(uri)) {
this.last_successful_post = false;
Logger("This request should fail on IBM Cloudant.", LOG_LEVEL.VERBOSE);
Logger("This request should fail on IBM Cloudant.", LOG_LEVEL_VERBOSE);
throw new Error("This request should fail on IBM Cloudant.");
}
}
@ -156,7 +157,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
} else {
this.last_successful_post = true;
}
Logger(`HTTP:${method}${size} to:${localURL} -> ${r.status}`, LOG_LEVEL.DEBUG);
Logger(`HTTP:${method}${size} to:${localURL} -> ${r.status}`, LOG_LEVEL_DEBUG);
return new Response(r.arrayBuffer, {
headers: r.headers,
@ -164,7 +165,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
statusText: `${r.status}`,
});
} catch (ex) {
Logger(`HTTP:${method}${size} to:${localURL} -> failed`, LOG_LEVEL.VERBOSE);
Logger(`HTTP:${method}${size} to:${localURL} -> failed`, LOG_LEVEL_VERBOSE);
// limit only in bulk_docs.
if (url.toString().indexOf("_bulk_docs") !== -1) {
this.last_successful_post = false;
@ -183,10 +184,10 @@ export default class ObsidianLiveSyncPlugin extends Plugin
} else {
this.last_successful_post = true;
}
Logger(`HTTP:${method}${size} to:${localURL} -> ${response.status}`, LOG_LEVEL.DEBUG);
Logger(`HTTP:${method}${size} to:${localURL} -> ${response.status}`, LOG_LEVEL_DEBUG);
return response;
} catch (ex) {
Logger(`HTTP:${method}${size} to:${localURL} -> failed`, LOG_LEVEL.VERBOSE);
Logger(`HTTP:${method}${size} to:${localURL} -> failed`, LOG_LEVEL_VERBOSE);
// limit only in bulk_docs.
if (url.toString().indexOf("_bulk_docs") !== -1) {
this.last_successful_post = false;
@ -213,7 +214,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
if (ex.name == "TypeError" && ex.message == "Failed to fetch") {
msg += "\n**Note** This error caused by many reasons. The only sure thing is you didn't touch the server.\nTo check details, open inspector.";
}
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
return msg;
}
}
@ -345,7 +346,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
notes.sort((a, b) => b.mtime - a.mtime);
const notesList = notes.map(e => e.dispPath);
if (notesList.length == 0) {
Logger("There are no conflicted documents", LOG_LEVEL.NOTICE);
Logger("There are no conflicted documents", LOG_LEVEL_NOTICE);
return false;
}
const target = await askSelectString(this.app, "File to view History", notesList);
@ -397,7 +398,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
async onLayoutReady() {
this.registerFileWatchEvents();
if (!this.localDatabase.isReady) {
Logger(`Something went wrong! The local database is not ready`, LOG_LEVEL.NOTICE);
Logger(`Something went wrong! The local database is not ready`, LOG_LEVEL_NOTICE);
return;
}
@ -409,7 +410,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
this.settings.suspendFileWatching = true;
await this.saveSettings();
if (this.isRedFlag2Raised()) {
Logger(`${FLAGMD_REDFLAG2} has been detected! Self-hosted LiveSync suspends all sync and rebuild everything.`, LOG_LEVEL.NOTICE);
Logger(`${FLAGMD_REDFLAG2} has been detected! Self-hosted LiveSync suspends all sync and rebuild everything.`, LOG_LEVEL_NOTICE);
await this.addOnSetup.rebuildEverything();
await this.deleteRedFlag2();
if (await askYesNo(this.app, "Do you want to disable Suspend file watching and restart obsidian now?") == "yes") {
@ -419,7 +420,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
this.app.commands.executeCommandById("app:reload")
}
} else if (this.isRedFlag3Raised()) {
Logger(`${FLAGMD_REDFLAG3} has been detected! Self-hosted LiveSync will discard the local database and fetch everything from the remote once again.`, LOG_LEVEL.NOTICE);
Logger(`${FLAGMD_REDFLAG3} has been detected! Self-hosted LiveSync will discard the local database and fetch everything from the remote once again.`, LOG_LEVEL_NOTICE);
await this.addOnSetup.fetchLocal();
await this.deleteRedFlag3();
if (this.settings.suspendFileWatching) {
@ -434,15 +435,15 @@ export default class ObsidianLiveSyncPlugin extends Plugin
this.settings.writeLogToTheFile = true;
await this.openDatabase();
const warningMessage = "The red flag is raised! The whole initialize steps are skipped, and any file changes are not captured.";
Logger(warningMessage, LOG_LEVEL.NOTICE);
Logger(warningMessage, LOG_LEVEL_NOTICE);
this.setStatusBarText(warningMessage);
}
} else {
if (this.settings.suspendFileWatching) {
Logger("'Suspend file watching' turned on. Are you sure this is what you intended? Every modification on the vault will be ignored.", LOG_LEVEL.NOTICE);
Logger("'Suspend file watching' turned on. Are you sure this is what you intended? Every modification on the vault will be ignored.", LOG_LEVEL_NOTICE);
}
if (this.settings.suspendParseReplicationResult) {
Logger("'Suspend database reflecting' turned on. Are you sure this is what you intended? Every replicated change will be postponed until disabling this option.", LOG_LEVEL.NOTICE);
Logger("'Suspend database reflecting' turned on. Are you sure this is what you intended? Every replicated change will be postponed until disabling this option.", LOG_LEVEL_NOTICE);
}
const isInitialized = await this.initializeDatabase(false, false);
if (!isInitialized) {
@ -457,8 +458,8 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
this.scanStat();
} catch (ex) {
Logger("Error while loading Self-hosted LiveSync", LOG_LEVEL.NOTICE);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger("Error while loading Self-hosted LiveSync", LOG_LEVEL_NOTICE);
Logger(ex, LOG_LEVEL_VERBOSE);
}
}
@ -467,20 +468,20 @@ export default class ObsidianLiveSyncPlugin extends Plugin
*/
async scanStat() {
const notes: { path: string, mtime: number }[] = [];
Logger(`Additional safety scan..`, LOG_LEVEL.VERBOSE);
Logger(`Additional safety scan..`, LOG_LEVEL_VERBOSE);
for await (const doc of this.localDatabase.findAllDocs({ conflicts: true })) {
if (!("_conflicts" in doc)) continue;
notes.push({ path: this.getPath(doc), mtime: doc.mtime });
}
if (notes.length > 0) {
Logger(`Some files have been left conflicted! Please resolve them by "Pick a file to resolve conflict". The list is written in the log.`, LOG_LEVEL.NOTICE);
Logger(`Some files have been left conflicted! Please resolve them by "Pick a file to resolve conflict". The list is written in the log.`, LOG_LEVEL_NOTICE);
for (const note of notes) {
Logger(`Conflicted: ${note.path}`);
}
} else {
Logger(`There are no conflicted files`, LOG_LEVEL.VERBOSE);
Logger(`There are no conflicted files`, LOG_LEVEL_VERBOSE);
}
Logger(`Additional safety scan done`, LOG_LEVEL.VERBOSE);
Logger(`Additional safety scan done`, LOG_LEVEL_VERBOSE);
}
async onload() {
@ -501,7 +502,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const lastVersion = ~~(versionNumberString2Number(manifestVersion) / 1000);
if (lastVersion > this.settings.lastReadUpdates) {
Logger("Self-hosted LiveSync has undergone a major upgrade. Please open the setting dialog, and check the information pane.", LOG_LEVEL.NOTICE);
Logger("Self-hosted LiveSync has undergone a major upgrade. Please open the setting dialog, and check the information pane.", LOG_LEVEL_NOTICE);
}
//@ts-ignore
if (this.app.isMobile) {
@ -589,10 +590,10 @@ export default class ObsidianLiveSyncPlugin extends Plugin
callback: async () => {
if (this.settings.liveSync) {
this.settings.liveSync = false;
Logger("LiveSync Disabled.", LOG_LEVEL.NOTICE);
Logger("LiveSync Disabled.", LOG_LEVEL_NOTICE);
} else {
this.settings.liveSync = true;
Logger("LiveSync Enabled.", LOG_LEVEL.NOTICE);
Logger("LiveSync Enabled.", LOG_LEVEL_NOTICE);
}
await this.realizeSettingSyncMode();
this.saveSettings();
@ -604,10 +605,10 @@ export default class ObsidianLiveSyncPlugin extends Plugin
callback: async () => {
if (this.suspended) {
this.suspended = false;
Logger("Self-hosted LiveSync resumed", LOG_LEVEL.NOTICE);
Logger("Self-hosted LiveSync resumed", LOG_LEVEL_NOTICE);
} else {
this.suspended = true;
Logger("Self-hosted LiveSync suspended", LOG_LEVEL.NOTICE);
Logger("Self-hosted LiveSync suspended", LOG_LEVEL_NOTICE);
}
await this.realizeSettingSyncMode();
this.saveSettings();
@ -772,7 +773,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const passphrase = await this.getPassphrase(settings);
if (passphrase === false) {
Logger("Could not determine passphrase to save data.json! You probably make the configuration sure again!", LOG_LEVEL.URGENT);
Logger("Could not determine passphrase to save data.json! You probably make the configuration sure again!", LOG_LEVEL_URGENT);
return "";
}
const dec = await encrypt(src, passphrase + SALT_OF_PASSPHRASE, false);
@ -788,7 +789,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const settings = Object.assign({}, DEFAULT_SETTINGS, await this.loadData()) as ObsidianLiveSyncSettings;
const passphrase = await this.getPassphrase(settings);
if (passphrase === false) {
Logger("Could not determine passphrase for reading data.json! DO NOT synchronize with the remote before making sure your configuration is!", LOG_LEVEL.URGENT);
Logger("Could not determine passphrase for reading data.json! DO NOT synchronize with the remote before making sure your configuration is!", LOG_LEVEL_URGENT);
} else {
if (settings.encryptedCouchDBConnection) {
const keys = ["couchDB_URI", "couchDB_USER", "couchDB_PASSWORD", "couchDB_DBNAME"] as (keyof CouchDBConnection)[];
@ -800,7 +801,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
}
} else {
Logger("Could not decrypt passphrase for reading data.json! DO NOT synchronize with the remote before making sure your configuration is!", LOG_LEVEL.URGENT);
Logger("Could not decrypt passphrase for reading data.json! DO NOT synchronize with the remote before making sure your configuration is!", LOG_LEVEL_URGENT);
for (const key of keys) {
settings[key] = "";
}
@ -812,7 +813,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
if (decrypted) {
settings.passphrase = decrypted;
} else {
Logger("Could not decrypt passphrase for reading data.json! DO NOT synchronize with the remote before making sure your configuration is!", LOG_LEVEL.URGENT);
Logger("Could not decrypt passphrase for reading data.json! DO NOT synchronize with the remote before making sure your configuration is!", LOG_LEVEL_URGENT);
settings.passphrase = "";
}
}
@ -840,10 +841,11 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
}
if (isCloudantURI(this.settings.couchDB_URI) && this.settings.customChunkSize != 0) {
Logger("Configuration verification founds problems with your configuration. This has been fixed automatically. But you may already have data that cannot be synchronised. If this is the case, please rebuild everything.", LOG_LEVEL.NOTICE)
Logger("Configuration verification founds problems with your configuration. This has been fixed automatically. But you may already have data that cannot be synchronised. If this is the case, please rebuild everything.", LOG_LEVEL_NOTICE)
this.settings.customChunkSize = 0;
}
this.deviceAndVaultName = localStorage.getItem(lsKey) || "";
this.ignoreFiles = this.settings.ignoreFiles.split(",").map(e => e.trim());
}
triggerRealizeSettingSyncMode() {
@ -854,9 +856,10 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const lsKey = "obsidian-live-sync-vaultanddevicename-" + this.getVaultName();
localStorage.setItem(lsKey, this.deviceAndVaultName || "");
const settings = { ...this.settings };
if (this.usedPassphrase == "" && !await this.getPassphrase(settings)) {
Logger("Could not determine passphrase for saving data.json! Our data.json have insecure items!", LOG_LEVEL.NOTICE);
Logger("Could not determine passphrase for saving data.json! Our data.json have insecure items!", LOG_LEVEL_NOTICE);
} else {
if (settings.couchDB_PASSWORD != "" || settings.couchDB_URI != "" || settings.couchDB_USER != "" || settings.couchDB_DBNAME) {
const connectionSetting: CouchDBConnection = {
@ -878,6 +881,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
await this.saveData(settings);
this.localDatabase.settings = this.settings;
this.ignoreFiles = this.settings.ignoreFiles.split(",").map(e => e.trim());
this.triggerRealizeSettingSyncMode();
}
@ -967,12 +971,12 @@ export default class ObsidianLiveSyncPlugin extends Plugin
} else {
const targetFile = this.app.vault.getAbstractFileByPath(file.path);
if (!(targetFile instanceof TFile)) {
Logger(`Target file was not found: ${file.path}`, LOG_LEVEL.INFO);
Logger(`Target file was not found: ${file.path}`, LOG_LEVEL_INFO);
continue;
}
//TODO: check from cache time.
if (file.mtime == last) {
Logger(`File has been already scanned on ${queue.type}, skip: ${file.path}`, LOG_LEVEL.VERBOSE);
Logger(`File has been already scanned on ${queue.type}, skip: ${file.path}`, LOG_LEVEL_VERBOSE);
continue;
}
@ -981,7 +985,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const keyD1 = `file-last-proc-DELETED-${file.path}`;
await this.kvDB.set(keyD1, mtime);
if (!await this.updateIntoDB(targetFile, false, cache)) {
Logger(`DB -> STORAGE: failed, cancel the relative operations: ${targetFile.path}`, LOG_LEVEL.INFO);
Logger(`DB -> STORAGE: failed, cancel the relative operations: ${targetFile.path}`, LOG_LEVEL_INFO);
// cancel running queues and remove one of atomic operation
this.vaultManager.cancelRelativeEvent(queue);
continue;
@ -1038,7 +1042,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
async watchVaultRenameAsync(file: TFile, oldFile: any, cache?: CacheData) {
Logger(`${oldFile} renamed to ${file.path}`, LOG_LEVEL.VERBOSE);
Logger(`${oldFile} renamed to ${file.path}`, LOG_LEVEL_VERBOSE);
if (file instanceof TFile) {
try {
// Logger(`RENAMING.. ${file.path} into db`);
@ -1046,7 +1050,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
// Logger(`deleted ${oldFile} from db`);
await this.deleteFromDBbyPath(oldFile);
} else {
Logger(`Could not save new file: ${file.path} `, LOG_LEVEL.NOTICE);
Logger(`Could not save new file: ${file.path} `, LOG_LEVEL_NOTICE);
}
} catch (ex) {
Logger(ex);
@ -1059,14 +1063,14 @@ export default class ObsidianLiveSyncPlugin extends Plugin
lastLog = "";
// eslint-disable-next-line require-await
async addLog(message: any, level: LOG_LEVEL = LOG_LEVEL.INFO, key = "") {
if (level == LOG_LEVEL.DEBUG && !isDebug) {
async addLog(message: any, level: LOG_LEVEL = LOG_LEVEL_INFO, key = "") {
if (level == LOG_LEVEL_DEBUG && !isDebug) {
return;
}
if (level < LOG_LEVEL.INFO && this.settings && this.settings.lessInformationInLog) {
if (level < LOG_LEVEL_INFO && this.settings && this.settings.lessInformationInLog) {
return;
}
if (this.settings && !this.settings.showVerboseLog && level == LOG_LEVEL.VERBOSE) {
if (this.settings && !this.settings.showVerboseLog && level == LOG_LEVEL_VERBOSE) {
return;
}
const vaultName = this.getVaultName();
@ -1092,7 +1096,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
logMessageStore.apply(e => [...e, newMessage].slice(-100));
this.setStatusBarText(null, messageContent);
if (level >= LOG_LEVEL.NOTICE) {
if (level >= LOG_LEVEL_NOTICE) {
if (!key) key = messageContent;
if (key in this.notifies) {
// @ts-ignore
@ -1159,13 +1163,13 @@ export default class ObsidianLiveSyncPlugin extends Plugin
if (shouldBeIgnored(path)) {
return;
}
if (!this.isTargetFile(path)) return;
if (!await this.isTargetFile(path)) return;
if (docEntry._deleted || docEntry.deleted) {
// This occurs not only when files are deleted, but also when conflicts are resolved.
// We have to check no other revisions are left.
const lastDocs = await this.localDatabase.getDBEntry(path);
if (path != file.path) {
Logger(`delete skipped: ${file.path} :Not exactly matched`, LOG_LEVEL.VERBOSE);
Logger(`delete skipped: ${file.path} :Not exactly matched`, LOG_LEVEL_VERBOSE);
}
if (lastDocs === false) {
await this.deleteVaultItem(file);
@ -1173,7 +1177,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
// it perhaps delete some revisions.
// may be we have to reload this
await this.pullFile(path, null, true);
Logger(`delete skipped:${file.path}`, LOG_LEVEL.VERBOSE);
Logger(`delete skipped:${file.path}`, LOG_LEVEL_VERBOSE);
}
return;
}
@ -1184,12 +1188,12 @@ export default class ObsidianLiveSyncPlugin extends Plugin
if (doc === false) return;
const msg = `DB -> STORAGE (${mode}${force ? ",force" : ""},${doc.datatype}) `;
if (doc.datatype != "newnote" && doc.datatype != "plain") {
Logger(msg + "ERROR, Invalid datatype: " + path + "(" + doc.datatype + ")", LOG_LEVEL.NOTICE);
Logger(msg + "ERROR, Invalid datatype: " + path + "(" + doc.datatype + ")", LOG_LEVEL_NOTICE);
return;
}
if (!force && localMtime >= docMtime) return;
if (!isValidPath(path)) {
Logger(msg + "ERROR, invalid path: " + path, LOG_LEVEL.NOTICE);
Logger(msg + "ERROR, invalid path: " + path, LOG_LEVEL_NOTICE);
return;
}
const writeData = doc.datatype == "newnote" ? base64ToArrayBuffer(doc.data) : getDocData(doc.data);
@ -1207,14 +1211,14 @@ export default class ObsidianLiveSyncPlugin extends Plugin
this.app.vault.trigger(mode, outFile);
} catch (ex) {
Logger(msg + "ERROR, Could not write: " + path, LOG_LEVEL.NOTICE);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(msg + "ERROR, Could not write: " + path, LOG_LEVEL_NOTICE);
Logger(ex, LOG_LEVEL_VERBOSE);
}
}
async deleteVaultItem(file: TFile | TFolder) {
if (file instanceof TFile) {
if (!this.isTargetFile(file)) return;
if (!await this.isTargetFile(file)) return;
}
const dir = file.parent;
if (this.settings.trashInsteadDelete) {
@ -1258,7 +1262,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
try {
const releaser = await semaphore.acquire(1);
runWithLock(`dbchanged-${path}`, false, async () => {
Logger(`Applying ${path} (${entry._id}: ${entry._rev}) change...`, LOG_LEVEL.VERBOSE);
Logger(`Applying ${path} (${entry._id}: ${entry._rev}) change...`, LOG_LEVEL_VERBOSE);
await this.handleDBChangedAsync(entry);
Logger(`Applied ${path} (${entry._id}:${entry._rev}) change...`);
}).finally(() => { releaser(); });
@ -1304,7 +1308,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
await this.doc2storage(doc, file);
} else {
if (!queueConflictCheck()) {
Logger(`${this.getPath(change)} is conflicted, write to the storage has been pended.`, LOG_LEVEL.NOTICE);
Logger(`${this.getPath(change)} is conflicted, write to the storage has been pended.`, LOG_LEVEL_NOTICE);
}
}
}
@ -1350,10 +1354,10 @@ export default class ObsidianLiveSyncPlugin extends Plugin
} else if (isValidPath(this.getPath(queue.entry))) {
this.handleDBChanged(queue.entry);
} else {
Logger(`Skipped: ${queue.entry._id}`, LOG_LEVEL.VERBOSE);
Logger(`Skipped: ${queue.entry._id}`, LOG_LEVEL_VERBOSE);
}
} else if (now > queue.timeout) {
if (!queue.warned) Logger(`Timed out: ${queue.entry._id} could not collect ${queue.missingChildren.length} chunks. plugin keeps watching, but you have to check the file after the replication.`, LOG_LEVEL.NOTICE);
if (!queue.warned) Logger(`Timed out: ${queue.entry._id} could not collect ${queue.missingChildren.length} chunks. plugin keeps watching, but you have to check the file after the replication.`, LOG_LEVEL_NOTICE);
queue.warned = true;
continue;
}
@ -1385,7 +1389,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
async parseIncomingDoc(doc: PouchDB.Core.ExistingDocument<EntryBody>) {
const path = this.getPath(doc);
if (!this.isTargetFile(path)) return;
if (!await this.isTargetFile(path)) return;
const skipOldFile = this.settings.skipOlderFilesOnSync && false; //patched temporary.
// Do not handle internal files if the feature has not been enabled.
if (isInternalMetadata(doc._id) && !this.settings.syncInternalFiles) return;
@ -1409,7 +1413,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const docMtime = ~~(doc.mtime / 1000);
//TODO: some margin required.
if (localMtime >= docMtime) {
Logger(`${path} (${doc._id}, ${doc._rev}) Skipped, older than storage.`, LOG_LEVEL.VERBOSE);
Logger(`${path} (${doc._id}, ${doc._rev}) Skipped, older than storage.`, LOG_LEVEL_VERBOSE);
return;
}
}
@ -1425,7 +1429,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
if ((!this.settings.readChunksOnline) && "children" in doc) {
const c = await this.localDatabase.collectChunksWithCache(doc.children as DocumentID[]);
const missing = c.filter((e) => e.chunk === false).map((e) => e.id);
if (missing.length > 0) Logger(`${path} (${doc._id}, ${doc._rev}) Queued (waiting ${missing.length} items)`, LOG_LEVEL.VERBOSE);
if (missing.length > 0) Logger(`${path} (${doc._id}, ${doc._rev}) Queued (waiting ${missing.length} items)`, LOG_LEVEL_VERBOSE);
newQueue.missingChildren = missing;
this.queuedFiles.push(newQueue);
} else {
@ -1467,7 +1471,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
missingChildren: [] as string[],
timeout: 0,
};
Logger(`Processing scheduled: ${change.path}`, LOG_LEVEL.INFO);
Logger(`Processing scheduled: ${change.path}`, LOG_LEVEL_INFO);
this.queuedFiles.push(newQueue);
this.saveQueuedFiles();
continue;
@ -1479,7 +1483,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
if (change.type == "versioninfo") {
if (change.version > VER) {
this.replicator.closeReplication();
Logger(`Remote database updated to incompatible version. update your self-hosted-livesync plugin.`, LOG_LEVEL.NOTICE);
Logger(`Remote database updated to incompatible version. update your self-hosted-livesync plugin.`, LOG_LEVEL_NOTICE);
}
}
}
@ -1619,11 +1623,11 @@ export default class ObsidianLiveSyncPlugin extends Plugin
async replicate(showMessage?: boolean) {
if (!this.isReady) return;
if (isLockAcquired("cleanup")) {
Logger("Database cleaning up is in process. replication has been cancelled", LOG_LEVEL.NOTICE);
Logger("Database cleaning up is in process. replication has been cancelled", LOG_LEVEL_NOTICE);
return;
}
if (this.settings.versionUpFlash != "") {
Logger("Open settings and check message, please. replication has been cancelled.", LOG_LEVEL.NOTICE);
Logger("Open settings and check message, please. replication has been cancelled.", LOG_LEVEL_NOTICE);
return;
}
await this.applyBatchChange();
@ -1632,8 +1636,8 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const ret = await this.replicator.openReplication(this.settings, false, showMessage);
if (!ret) {
if (this.replicator.remoteLockedAndDeviceNotAccepted) {
if (this.replicator.remoteCleaned) {
Logger(`The remote database has been cleaned.`, showMessage ? LOG_LEVEL.NOTICE : LOG_LEVEL.INFO);
if (this.replicator.remoteCleaned && this.settings.useIndexedDBAdapter) {
Logger(`The remote database has been cleaned.`, showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO);
await runWithLock("cleanup", true, async () => {
const count = await purgeUnreferencedChunks(this.localDatabase.localDatabase, true);
const message = `The remote database has been cleaned up.
@ -1651,7 +1655,7 @@ Even if you choose to clean up, you will see this option again if you exit Obsid
if (ret == CHOICE_CLEAN) {
const remoteDB = await this.getReplicator().connectRemoteCouchDBWithSetting(this.settings, this.getIsMobile(), true);
if (typeof remoteDB == "string") {
Logger(remoteDB, LOG_LEVEL.NOTICE);
Logger(remoteDB, LOG_LEVEL_NOTICE);
return false;
}
@ -1663,9 +1667,9 @@ Even if you choose to clean up, you will see this option again if you exit Obsid
await purgeUnreferencedChunks(this.localDatabase.localDatabase, false);
this.localDatabase.hashCaches.clear();
await this.getReplicator().markRemoteResolved(this.settings);
Logger("The local database has been cleaned up.", showMessage ? LOG_LEVEL.NOTICE : LOG_LEVEL.INFO)
Logger("The local database has been cleaned up.", showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO)
} else {
Logger("Replication has been cancelled. Please try it again.", showMessage ? LOG_LEVEL.NOTICE : LOG_LEVEL.INFO)
Logger("Replication has been cancelled. Please try it again.", showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO)
}
}
@ -1733,28 +1737,36 @@ Or if you are sure know what had been happened, we can unlock the database from
// synchronize all files between database and storage.
let initialScan = false;
if (showingNotice) {
Logger("Initializing", LOG_LEVEL.NOTICE, "syncAll");
Logger("Initializing", LOG_LEVEL_NOTICE, "syncAll");
}
Logger("Initialize and checking database files");
Logger("Checking deleted files");
await this.collectDeletedFiles();
Logger("Collecting local files on the storage", LOG_LEVEL.VERBOSE);
const filesStorage = this.app.vault.getFiles().filter(e => this.isTargetFile(e));
Logger("Collecting local files on the storage", LOG_LEVEL_VERBOSE);
const filesStorageSrc = this.app.vault.getFiles();
const filesStorage = [] as typeof filesStorageSrc;
for (const f of filesStorageSrc) {
if (await this.isTargetFile(f.path)) {
filesStorage.push(f);
}
}
const filesStorageName = filesStorage.map((e) => e.path);
Logger("Collecting local files on the DB", LOG_LEVEL.VERBOSE);
Logger("Collecting local files on the DB", LOG_LEVEL_VERBOSE);
const filesDatabase = [] as FilePathWithPrefix[]
let count = 0;
for await (const doc of this.localDatabase.findAllNormalDocs()) {
count++;
if (count % 25 == 0) Logger(`Collecting local files on the DB: ${count}`, showingNotice ? LOG_LEVEL.NOTICE : LOG_LEVEL.INFO, "syncAll");
if (count % 25 == 0) Logger(`Collecting local files on the DB: ${count}`, showingNotice ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO, "syncAll");
const path = getPath(doc);
if (isValidPath(path) && this.isTargetFile(path)) {
if (isValidPath(path) && await this.isTargetFile(path)) {
filesDatabase.push(path);
}
}
Logger("Opening the key-value database", LOG_LEVEL.VERBOSE);
Logger("Opening the key-value database", LOG_LEVEL_VERBOSE);
const isInitialized = await (this.kvDB.get<boolean>("initialized")) || false;
// Make chunk bigger if it is the initial scan. There must be non-active docs.
if (filesDatabase.length == 0 && !isInitialized) {
@ -1778,8 +1790,8 @@ Or if you are sure know what had been happened, we can unlock the database from
await callback(e);
return true;
} catch (ex) {
Logger(`Error while ${procedureName}`, LOG_LEVEL.NOTICE);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(`Error while ${procedureName}`, LOG_LEVEL_NOTICE);
Logger(ex, LOG_LEVEL_VERBOSE);
return false;
}
@ -1808,7 +1820,7 @@ Or if you are sure know what had been happened, we can unlock the database from
await this.pullFile(e, filesStorage, false, null, false);
Logger(`Check or pull from db:${e} OK`);
} else if (w) {
Logger(`Deletion history skipped: ${e}`, LOG_LEVEL.VERBOSE);
Logger(`Deletion history skipped: ${e}`, LOG_LEVEL_VERBOSE);
} else {
Logger(`entry not found: ${e}`);
}
@ -1839,7 +1851,7 @@ Or if you are sure know what had been happened, we can unlock the database from
await (this.kvDB.set("initialized", true))
}
if (showingNotice) {
Logger("Initialize done!", LOG_LEVEL.NOTICE, "syncAll");
Logger("Initialize done!", LOG_LEVEL_NOTICE, "syncAll");
}
}
@ -1990,7 +2002,7 @@ Or if you are sure know what had been happened, we can unlock the database from
// except insertion, the line should not be different.
if (rightItem[1] != leftItem[1]) {
//TODO: SHOULD BE PANIC.
Logger(`MERGING PANIC:${leftItem[0]},${leftItem[1]} == ${rightItem[0]},${rightItem[1]}`, LOG_LEVEL.VERBOSE);
Logger(`MERGING PANIC:${leftItem[0]},${leftItem[1]} == ${rightItem[0]},${rightItem[1]}`, LOG_LEVEL_VERBOSE);
autoMerge = false;
break LOOP_MERGE;
}
@ -2014,12 +2026,12 @@ Or if you are sure know what had been happened, we can unlock the database from
break LOOP_MERGE;
}
}
Logger(`Weird condition:${leftItem[0]},${leftItem[1]} == ${rightItem[0]},${rightItem[1]}`, LOG_LEVEL.VERBOSE);
Logger(`Weird condition:${leftItem[0]},${leftItem[1]} == ${rightItem[0]},${rightItem[1]}`, LOG_LEVEL_VERBOSE);
// here is the exception
break LOOP_MERGE;
} while (leftIdx < diffLeft.length || rightIdx < diffRight.length);
if (autoMerge) {
Logger(`Sensibly merge available`, LOG_LEVEL.VERBOSE);
Logger(`Sensibly merge available`, LOG_LEVEL_VERBOSE);
return merged;
} else {
return false;
@ -2071,7 +2083,7 @@ Or if you are sure know what had been happened, we can unlock the database from
return JSON.stringify(newObj.data);
} catch (ex) {
Logger("Could not merge object");
Logger(ex, LOG_LEVEL.VERBOSE)
Logger(ex, LOG_LEVEL_VERBOSE)
return false;
}
}
@ -2101,18 +2113,18 @@ Or if you are sure know what had been happened, we can unlock the database from
if (result) {
p = result.filter(e => e[0] != DIFF_DELETE).map((e) => e[1]).join("");
// can be merged.
Logger(`Sensible merge:${path}`, LOG_LEVEL.INFO);
Logger(`Sensible merge:${path}`, LOG_LEVEL_INFO);
} else {
Logger(`Sensible merge is not applicable.`, LOG_LEVEL.VERBOSE);
Logger(`Sensible merge is not applicable.`, LOG_LEVEL_VERBOSE);
}
} else if (isObjectMargeApplicable(path)) {
// can be merged.
const result = await this.mergeObject(path, commonBase, test._rev, conflictedRev);
if (result) {
Logger(`Object merge:${path}`, LOG_LEVEL.INFO);
Logger(`Object merge:${path}`, LOG_LEVEL_INFO);
p = result;
} else {
Logger(`Object merge is not applicable.`, LOG_LEVEL.VERBOSE);
Logger(`Object merge is not applicable.`, LOG_LEVEL_VERBOSE);
}
}
@ -2129,7 +2141,7 @@ Or if you are sure know what had been happened, we can unlock the database from
await this.updateIntoDB(newFile);
}
await this.pullFile(path);
Logger(`Automatically merged (sensible) :${path}`, LOG_LEVEL.INFO);
Logger(`Automatically merged (sensible) :${path}`, LOG_LEVEL_INFO);
return true;
}
}
@ -2139,14 +2151,14 @@ Or if you are sure know what had been happened, we can unlock the database from
const rightLeaf = await this.getConflictedDoc(path, conflicts[0]);
if (leftLeaf == false) {
// what's going on..
Logger(`could not get current revisions:${path}`, LOG_LEVEL.NOTICE);
Logger(`could not get current revisions:${path}`, LOG_LEVEL_NOTICE);
return false;
}
if (rightLeaf == false) {
// Conflicted item could not load, delete this.
await this.localDatabase.deleteDBEntry(path, { rev: conflicts[0] });
await this.pullFile(path, null, true);
Logger(`could not get old revisions, automatically used newer one:${path}`, LOG_LEVEL.NOTICE);
Logger(`could not get old revisions, automatically used newer one:${path}`, LOG_LEVEL_NOTICE);
return true;
}
// first, check for same contents and deletion status.
@ -2169,7 +2181,7 @@ Or if you are sure know what had been happened, we can unlock the database from
}
await this.localDatabase.deleteDBEntry(path, { rev: loser.rev });
await this.pullFile(path, null, true);
Logger(`Automatically merged (newerFileResolve) :${path}`, LOG_LEVEL.NOTICE);
Logger(`Automatically merged (newerFileResolve) :${path}`, LOG_LEVEL_NOTICE);
return true;
}
// make diff.
@ -2187,15 +2199,15 @@ Or if you are sure know what had been happened, we can unlock the database from
showMergeDialog(filename: FilePathWithPrefix, conflictCheckResult: diff_result): Promise<boolean> {
return runWithLock("resolve-conflict:" + filename, false, () =>
new Promise((res, rej) => {
Logger("open conflict dialog", LOG_LEVEL.VERBOSE);
Logger("open conflict dialog", LOG_LEVEL_VERBOSE);
new ConflictResolveModal(this.app, filename, conflictCheckResult, async (selected) => {
const testDoc = await this.localDatabase.getDBEntry(filename, { conflicts: true }, false, false, true);
if (testDoc === false) {
Logger("Missing file..", LOG_LEVEL.VERBOSE);
Logger("Missing file..", LOG_LEVEL_VERBOSE);
return res(true);
}
if (!testDoc._conflicts) {
Logger("Nothing have to do with this conflict", LOG_LEVEL.VERBOSE);
Logger("Nothing have to do with this conflict", LOG_LEVEL_VERBOSE);
return res(true);
}
const toDelete = selected;
@ -2288,7 +2300,7 @@ Or if you are sure know what had been happened, we can unlock the database from
async pullFile(filename: FilePathWithPrefix, fileList?: TFile[], force?: boolean, rev?: string, waitForReady = true) {
const targetFile = getAbstractFileByPath(stripAllPrefixes(filename));
if (!this.isTargetFile(filename)) return;
if (!await this.isTargetFile(filename)) return;
if (targetFile == null) {
//have to create;
const doc = await this.localDatabase.getDBEntry(filename, rev ? { rev: rev } : null, false, waitForReady);
@ -2331,7 +2343,7 @@ Or if you are sure know what had been happened, we can unlock the database from
const dK = `${file.path}-diff`;
const isLastDiff = dK in caches ? caches[dK] : { storageMtime: 0, docMtime: 0 };
if (isLastDiff.docMtime == docMtime && isLastDiff.storageMtime == storageMtime) {
// Logger("STORAGE .. DB :" + file.path, LOG_LEVEL.VERBOSE);
// Logger("STORAGE .. DB :" + file.path, LOG_LEVEL_VERBOSE);
caches[dK] = { storageMtime, docMtime };
return caches;
}
@ -2355,14 +2367,14 @@ Or if you are sure know what had been happened, we can unlock the database from
caches[dK] = { storageMtime, docMtime };
return caches;
}
Logger("STORAGE == DB :" + file.path + "", LOG_LEVEL.VERBOSE);
Logger("STORAGE == DB :" + file.path + "", LOG_LEVEL_VERBOSE);
caches[dK] = { storageMtime, docMtime };
return caches;
}
async updateIntoDB(file: TFile, initialScan?: boolean, cache?: CacheData, force?: boolean) {
if (!this.isTargetFile(file)) return true;
if (!await this.isTargetFile(file)) return true;
if (shouldBeIgnored(file.path)) {
return true;
}
@ -2370,14 +2382,14 @@ Or if you are sure know what had been happened, we can unlock the database from
let datatype: "plain" | "newnote" = "newnote";
if (!cache) {
if (!isPlainText(file.name)) {
Logger(`Reading : ${file.path}`, LOG_LEVEL.VERBOSE);
Logger(`Reading : ${file.path}`, LOG_LEVEL_VERBOSE);
const contentBin = await this.app.vault.readBinary(file);
Logger(`Processing: ${file.path}`, LOG_LEVEL.VERBOSE);
Logger(`Processing: ${file.path}`, LOG_LEVEL_VERBOSE);
try {
content = await arrayBufferToBase64(contentBin);
} catch (ex) {
Logger(`The file ${file.path} could not be encoded`);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
return false;
}
datatype = "newnote";
@ -2387,12 +2399,12 @@ Or if you are sure know what had been happened, we can unlock the database from
}
} else {
if (cache instanceof ArrayBuffer) {
Logger(`Processing: ${file.path}`, LOG_LEVEL.VERBOSE);
Logger(`Processing: ${file.path}`, LOG_LEVEL_VERBOSE);
try {
content = await arrayBufferToBase64(cache);
} catch (ex) {
Logger(`The file ${file.path} could not be encoded`);
Logger(ex, LOG_LEVEL.VERBOSE);
Logger(ex, LOG_LEVEL_VERBOSE);
return false;
}
datatype = "newnote"
@ -2427,15 +2439,15 @@ Or if you are sure know what had been happened, we can unlock the database from
const newData = { data: d.data, deleted: d._deleted || d.deleted };
if (oldData.deleted != newData.deleted) return false;
if (!isDocContentSame(old.data, newData.data)) return false;
Logger(msg + "Skipped (not changed) " + fullPath + ((d._deleted || d.deleted) ? " (deleted)" : ""), LOG_LEVEL.VERBOSE);
Logger(msg + "Skipped (not changed) " + fullPath + ((d._deleted || d.deleted) ? " (deleted)" : ""), LOG_LEVEL_VERBOSE);
return true;
// d._rev = old._rev;
}
} catch (ex) {
if (force) {
Logger(msg + "Error, Could not check the diff for the old one." + (force ? "force writing." : "") + fullPath + ((d._deleted || d.deleted) ? " (deleted)" : ""), LOG_LEVEL.VERBOSE);
Logger(msg + "Error, Could not check the diff for the old one." + (force ? "force writing." : "") + fullPath + ((d._deleted || d.deleted) ? " (deleted)" : ""), LOG_LEVEL_VERBOSE);
} else {
Logger(msg + "Error, Could not check the diff for the old one." + fullPath + ((d._deleted || d.deleted) ? " (deleted)" : ""), LOG_LEVEL.VERBOSE);
Logger(msg + "Error, Could not check the diff for the old one." + fullPath + ((d._deleted || d.deleted) ? " (deleted)" : ""), LOG_LEVEL_VERBOSE);
}
return !force;
}
@ -2453,7 +2465,7 @@ Or if you are sure know what had been happened, we can unlock the database from
}
async deleteFromDB(file: TFile) {
if (!this.isTargetFile(file)) return;
if (!await this.isTargetFile(file)) return;
const fullPath = getPathFromTFile(file);
Logger(`deleteDB By path:${fullPath}`);
await this.deleteFromDBbyPath(fullPath);
@ -2536,15 +2548,49 @@ Or if you are sure know what had been happened, we can unlock the database from
return true;
}
isTargetFile(file: string | TAbstractFile) {
if (file instanceof TFile) {
return this.localDatabase.isTargetFile(file.path);
} else if (typeof file == "string") {
return this.localDatabase.isTargetFile(file);
ignoreFileCache = new LRUCache<string, string[] | false>(300, 250000, true);
ignoreFiles = [] as string[]
async readIgnoreFile(path: string) {
try {
const file = await this.app.vault.adapter.read(path);
const gitignore = file.split(/\r?\n/g);
this.ignoreFileCache.set(path, gitignore);
return gitignore;
} catch (ex) {
this.ignoreFileCache.set(path, false);
return false;
}
}
async getIgnoreFile(path: string) {
if (this.ignoreFileCache.has(path)) {
return this.ignoreFileCache.get(path);
} else {
return await this.readIgnoreFile(path);
}
}
async isIgnoredByIgnoreFiles(file: string | TAbstractFile) {
if (!this.settings.useIgnoreFiles) {
return true;
}
const filepath = file instanceof TFile ? file.path : file as string;
if (this.ignoreFileCache.has(filepath)) {
// Renew
await this.readIgnoreFile(filepath);
}
if (!await isAcceptedAll(stripAllPrefixes(filepath as FilePathWithPrefix), this.ignoreFiles, (filename) => this.getIgnoreFile(filename))) {
return false;
}
return true;
}
async isTargetFile(file: string | TAbstractFile) {
const filepath = file instanceof TFile ? file.path : file as string;
if (this.settings.useIgnoreFiles && !await this.isIgnoredByIgnoreFiles(file)) {
return false;
}
return this.localDatabase.isTargetFile(filepath);
}
async dryRunGC() {
await runWithLock("cleanup", true, async () => {
const remoteDBConn = await this.getReplicator().connectRemoteCouchDBWithSetting(this.settings, this.isMobile)

View File

@ -1,5 +1,5 @@
import { PluginManifest, TFile } from "./deps";
import { DatabaseEntry, EntryBody, FilePath } from "./lib/src/types";
import { type PluginManifest, TFile } from "./deps";
import { type DatabaseEntry, type EntryBody, type FilePath } from "./lib/src/types";
export interface PluginDataEntry extends DatabaseEntry {
deviceVaultName: string;

View File

@ -1,8 +1,8 @@
import { type DataWriteOptions, normalizePath, TFile, Platform, TAbstractFile, App, Plugin_2, type RequestUrlParam, requestUrl } from "./deps";
import { type DataWriteOptions, normalizePath, TFile, Platform, TAbstractFile, App, Plugin, type RequestUrlParam, requestUrl } from "./deps";
import { path2id_base, id2path_base, isValidFilenameInLinux, isValidFilenameInDarwin, isValidFilenameInWidows, isValidFilenameInAndroid, stripAllPrefixes } from "./lib/src/path";
import { Logger } from "./lib/src/logger";
import { type AnyEntry, type DocumentID, type EntryHasPath, type FilePath, type FilePathWithPrefix, LOG_LEVEL } from "./lib/src/types";
import { LOG_LEVEL_VERBOSE, type AnyEntry, type DocumentID, type EntryHasPath, type FilePath, type FilePathWithPrefix } from "./lib/src/types";
import { CHeader, ICHeader, ICHeaderLength, PSCHeader } from "./types";
import { InputStringDialog, PopoverSelectString } from "./dialogs";
import ObsidianLiveSyncPlugin from "./main";
@ -327,7 +327,7 @@ export function isValidPath(filename: string) {
if (Platform.isAndroidApp) return isValidFilenameInAndroid(filename);
if (Platform.isIosApp) return isValidFilenameInDarwin(filename);
//Fallback
Logger("Could not determine platform for checking filename", LOG_LEVEL.VERBOSE);
Logger("Could not determine platform for checking filename", LOG_LEVEL_VERBOSE);
return isValidFilenameInWidows(filename);
}
@ -415,8 +415,8 @@ export const askString = (app: App, title: string, key: string, placeholder: str
export class PeriodicProcessor {
_process: () => Promise<any>;
_timer?: number;
_plugin: Plugin_2;
constructor(plugin: Plugin_2, process: () => Promise<any>) {
_plugin: Plugin;
constructor(plugin: Plugin, process: () => Promise<any>) {
this._plugin = plugin;
this._process = process;
}