mirror of
https://github.com/vrtmrz/obsidian-livesync.git
synced 2024-12-12 09:04:06 +02:00
Implemented:
- The target selecting filter was implemented. - We can configure size of chunks. - Read chunks online. Fixed: - Typos
This commit is contained in:
parent
e7f4d8c9c2
commit
9d0ffd1848
@ -24,9 +24,9 @@ import {
|
||||
} from "./lib/src/types";
|
||||
import { RemoteDBSettings } from "./lib/src/types";
|
||||
import { resolveWithIgnoreKnownError, runWithLock, shouldSplitAsPlainText, splitPieces2, enableEncryption } from "./lib/src/utils";
|
||||
import { path2id } from "./utils";
|
||||
import { id2path, path2id } from "./utils";
|
||||
import { Logger } from "./lib/src/logger";
|
||||
import { checkRemoteVersion, connectRemoteCouchDBWithSetting, getLastPostFailedBySize } from "./utils_couchdb";
|
||||
import { checkRemoteVersion, connectRemoteCouchDBWithSetting, getLastPostFailedBySize, putDesignDocuments } from "./utils_couchdb";
|
||||
import { KeyValueDatabase, OpenKeyValueDatabase } from "./KeyValueDB";
|
||||
import { LRUCache } from "./lib/src/LRUCache";
|
||||
|
||||
@ -72,6 +72,7 @@ export class LocalPouchDB {
|
||||
chunkVersion = -1;
|
||||
maxChunkVersion = -1;
|
||||
minChunkVersion = -1;
|
||||
needScanning = false;
|
||||
|
||||
cancelHandler<T extends PouchDB.Core.Changes<EntryDoc> | PouchDB.Replication.Sync<EntryDoc> | PouchDB.Replication.Replication<EntryDoc>>(handler: T): T {
|
||||
if (handler != null) {
|
||||
@ -160,6 +161,7 @@ export class LocalPouchDB {
|
||||
this.localDatabase.removeAllListeners();
|
||||
});
|
||||
this.nodeid = nodeinfo.nodeid;
|
||||
await putDesignDocuments(this.localDatabase);
|
||||
|
||||
// Traceing the leaf id
|
||||
const changes = this.localDatabase
|
||||
@ -299,6 +301,10 @@ export class LocalPouchDB {
|
||||
}
|
||||
|
||||
async getDBEntryMeta(path: string, opt?: PouchDB.Core.GetOptions, includeDeleted = false): Promise<false | LoadedEntry> {
|
||||
// safety valve
|
||||
if (!this.isTargetFile(path)) {
|
||||
return false;
|
||||
}
|
||||
const id = path2id(path);
|
||||
try {
|
||||
let obj: EntryDocResponse = null;
|
||||
@ -348,6 +354,10 @@ export class LocalPouchDB {
|
||||
return false;
|
||||
}
|
||||
async getDBEntry(path: string, opt?: PouchDB.Core.GetOptions, dump = false, waitForReady = true, includeDeleted = false): Promise<false | LoadedEntry> {
|
||||
// safety valve
|
||||
if (!this.isTargetFile(path)) {
|
||||
return false;
|
||||
}
|
||||
const id = path2id(path);
|
||||
try {
|
||||
let obj: EntryDocResponse = null;
|
||||
@ -392,26 +402,51 @@ export class LocalPouchDB {
|
||||
// simple note
|
||||
}
|
||||
if (obj.type == "newnote" || obj.type == "plain") {
|
||||
// search childrens
|
||||
// search children
|
||||
try {
|
||||
if (dump) {
|
||||
Logger(`Enhanced doc`);
|
||||
Logger(obj);
|
||||
}
|
||||
let childrens: string[];
|
||||
try {
|
||||
childrens = await Promise.all(obj.children.map((e) => this.getDBLeaf(e, waitForReady)));
|
||||
if (dump) {
|
||||
Logger(`Chunks:`);
|
||||
Logger(childrens);
|
||||
let children: string[] = [];
|
||||
|
||||
if (this.settings.readChunksOnline) {
|
||||
const items = await this.fetchLeafFromRemote(obj.children);
|
||||
if (items) {
|
||||
for (const v of items) {
|
||||
if (v.doc && v.doc.type == "leaf") {
|
||||
children.push(v.doc.data);
|
||||
} else {
|
||||
if (!opt) {
|
||||
Logger(`Chunks of ${obj._id} are not valid.`, LOG_LEVEL.NOTICE);
|
||||
this.needScanning = true;
|
||||
this.corruptedEntries[obj._id] = obj;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (opt) {
|
||||
Logger(`Could not retrieve chunks of ${obj._id}. we have to `, LOG_LEVEL.NOTICE);
|
||||
this.needScanning = true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
children = await Promise.all(obj.children.map((e) => this.getDBLeaf(e, waitForReady)));
|
||||
if (dump) {
|
||||
Logger(`Chunks:`);
|
||||
Logger(children);
|
||||
}
|
||||
} catch (ex) {
|
||||
Logger(`Something went wrong on reading chunks of ${obj._id} from database, see verbose info for detail.`, LOG_LEVEL.NOTICE);
|
||||
Logger(ex, LOG_LEVEL.VERBOSE);
|
||||
this.corruptedEntries[obj._id] = obj;
|
||||
return false;
|
||||
}
|
||||
} catch (ex) {
|
||||
Logger(`Something went wrong on reading chunks of ${obj._id} from database, see verbose info for detail.`, LOG_LEVEL.NOTICE);
|
||||
Logger(ex, LOG_LEVEL.VERBOSE);
|
||||
this.corruptedEntries[obj._id] = obj;
|
||||
return false;
|
||||
}
|
||||
const data = childrens.join("");
|
||||
const data = children.join("");
|
||||
const doc: LoadedEntry & PouchDB.Core.IdMeta & PouchDB.Core.GetMeta = {
|
||||
data: data,
|
||||
_id: obj._id,
|
||||
@ -452,6 +487,10 @@ export class LocalPouchDB {
|
||||
return false;
|
||||
}
|
||||
async deleteDBEntry(path: string, opt?: PouchDB.Core.GetOptions): Promise<boolean> {
|
||||
// safety valve
|
||||
if (!this.isTargetFile(path)) {
|
||||
return false;
|
||||
}
|
||||
const id = path2id(path);
|
||||
|
||||
try {
|
||||
@ -521,7 +560,7 @@ export class LocalPouchDB {
|
||||
for (const v of result.rows) {
|
||||
// let doc = v.doc;
|
||||
if (v.id.startsWith(prefix) || v.id.startsWith("/" + prefix)) {
|
||||
delDocs.push(v.id);
|
||||
if (this.isTargetFile(id2path(v.id))) delDocs.push(v.id);
|
||||
// console.log("!" + v.id);
|
||||
} else {
|
||||
if (!v.id.startsWith("h:")) {
|
||||
@ -566,12 +605,17 @@ export class LocalPouchDB {
|
||||
return true;
|
||||
}
|
||||
async putDBEntry(note: LoadedEntry, saveAsBigChunk?: boolean) {
|
||||
//safety valve
|
||||
if (!this.isTargetFile(id2path(note._id))) {
|
||||
return;
|
||||
}
|
||||
|
||||
// let leftData = note.data;
|
||||
const savenNotes = [];
|
||||
let processed = 0;
|
||||
let made = 0;
|
||||
let skiped = 0;
|
||||
let pieceSize = MAX_DOC_SIZE_BIN;
|
||||
let pieceSize = MAX_DOC_SIZE_BIN * Math.max(this.settings.customChunkSize, 1);
|
||||
let plainSplit = false;
|
||||
let cacheUsed = 0;
|
||||
const userpasswordHash = this.h32Raw(new TextEncoder().encode(this.settings.passphrase));
|
||||
@ -727,7 +771,7 @@ export class LocalPouchDB {
|
||||
}
|
||||
});
|
||||
} else {
|
||||
Logger(`note coud not saved:${note._id}`);
|
||||
Logger(`note could not saved:${note._id}`);
|
||||
}
|
||||
}
|
||||
|
||||
@ -779,6 +823,7 @@ export class LocalPouchDB {
|
||||
}
|
||||
|
||||
if (!skipCheck) {
|
||||
await putDesignDocuments(dbret.db);
|
||||
if (!(await checkRemoteVersion(dbret.db, this.migrate.bind(this), VER))) {
|
||||
Logger("Remote database is newer or corrupted, make sure to latest version of self-hosted-livesync installed", LOG_LEVEL.NOTICE);
|
||||
return false;
|
||||
@ -850,6 +895,10 @@ export class LocalPouchDB {
|
||||
batches_limit: setting.batches_limit,
|
||||
batch_size: setting.batch_size,
|
||||
};
|
||||
if (setting.readChunksOnline) {
|
||||
syncOptionBase.push = { filter: 'replicate/push' };
|
||||
syncOptionBase.pull = { filter: 'replicate/pull' };
|
||||
}
|
||||
const syncOption: PouchDB.Replication.SyncOptions = keepAlive ? { live: true, retry: true, heartbeat: 30000, ...syncOptionBase } : { ...syncOptionBase };
|
||||
|
||||
return { db: dbret.db, info: dbret.info, syncOptionBase, syncOption };
|
||||
@ -902,6 +951,8 @@ export class LocalPouchDB {
|
||||
this.syncStatus = "ERRORED";
|
||||
this.syncHandler = this.cancelHandler(this.syncHandler);
|
||||
this.updateInfo();
|
||||
Logger("Replication error", LOG_LEVEL.NOTICE, "sync");
|
||||
Logger(e);
|
||||
}
|
||||
replicationPaused() {
|
||||
this.syncStatus = "PAUSED";
|
||||
@ -962,7 +1013,7 @@ export class LocalPouchDB {
|
||||
}
|
||||
});
|
||||
} else if (syncmode == "pullOnly") {
|
||||
this.syncHandler = this.localDatabase.replicate.from(db, { checkpoint: "target", ...syncOptionBase });
|
||||
this.syncHandler = this.localDatabase.replicate.from(db, { checkpoint: "target", ...syncOptionBase, ...(this.settings.readChunksOnline ? { filter: "replicate/pull" } : {}) });
|
||||
this.syncHandler
|
||||
.on("change", async (e) => {
|
||||
await this.replicationChangeDetected({ direction: "pull", change: e }, showResult, docSentOnStart, docArrivedOnStart, callback);
|
||||
@ -982,7 +1033,7 @@ export class LocalPouchDB {
|
||||
}
|
||||
});
|
||||
} else if (syncmode == "pushOnly") {
|
||||
this.syncHandler = this.localDatabase.replicate.to(db, { checkpoint: "target", ...syncOptionBase });
|
||||
this.syncHandler = this.localDatabase.replicate.to(db, { checkpoint: "target", ...syncOptionBase, ...(this.settings.readChunksOnline ? { filter: "replicate/push" } : {}) });
|
||||
this.syncHandler.on("change", async (e) => {
|
||||
await this.replicationChangeDetected({ direction: "push", change: e }, showResult, docSentOnStart, docArrivedOnStart, callback);
|
||||
if (retrying) {
|
||||
@ -1293,4 +1344,29 @@ export class LocalPouchDB {
|
||||
if (this.minChunkVersion > 0 && this.minChunkVersion > ver) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
isTargetFile(file: string) {
|
||||
if (file.includes(":")) return true;
|
||||
if (this.settings.syncOnlyRegEx) {
|
||||
const syncOnly = new RegExp(this.settings.syncOnlyRegEx);
|
||||
if (!file.match(syncOnly)) return false;
|
||||
}
|
||||
if (this.settings.syncIgnoreRegEx) {
|
||||
const syncIgnore = new RegExp(this.settings.syncIgnoreRegEx);
|
||||
if (file.match(syncIgnore)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
async fetchLeafFromRemote(ids: string[], showResult = false) {
|
||||
const ret = await connectRemoteCouchDBWithSetting(this.settings, this.isMobile);
|
||||
if (typeof (ret) === "string") {
|
||||
|
||||
Logger(`Could not connect to server.${ret} `, showResult ? LOG_LEVEL.NOTICE : LOG_LEVEL.INFO, "fetch");
|
||||
return;
|
||||
}
|
||||
|
||||
const leafs = await ret.db.allDocs({ keys: ids, include_docs: true });
|
||||
return leafs.rows;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -115,12 +115,12 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
};
|
||||
const applyDisplayEnabled = () => {
|
||||
if (isAnySyncEnabled()) {
|
||||
dbsettings.forEach((e) => {
|
||||
dbSettings.forEach((e) => {
|
||||
e.setDisabled(true).setTooltip("Could not change this while any synchronization options are enabled.");
|
||||
});
|
||||
syncWarn.removeClass("sls-hidden");
|
||||
} else {
|
||||
dbsettings.forEach((e) => {
|
||||
dbSettings.forEach((e) => {
|
||||
e.setDisabled(false).setTooltip("");
|
||||
});
|
||||
syncWarn.addClass("sls-hidden");
|
||||
@ -149,8 +149,8 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
}
|
||||
};
|
||||
|
||||
const dbsettings: Setting[] = [];
|
||||
dbsettings.push(
|
||||
const dbSettings: Setting[] = [];
|
||||
dbSettings.push(
|
||||
new Setting(containerRemoteDatabaseEl).setName("URI").addText((text) =>
|
||||
text
|
||||
.setPlaceholder("https://........")
|
||||
@ -652,7 +652,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
|
||||
new Setting(containerGeneralSettingsEl)
|
||||
.setName("Do not show low-priority Log")
|
||||
.setDesc("Reduce log infomations")
|
||||
.setDesc("Reduce log information")
|
||||
.addToggle((toggle) =>
|
||||
toggle.setValue(this.plugin.settings.lessInformationInLog).onChange(async (value) => {
|
||||
this.plugin.settings.lessInformationInLog = value;
|
||||
@ -661,7 +661,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
);
|
||||
new Setting(containerGeneralSettingsEl)
|
||||
.setName("Verbose Log")
|
||||
.setDesc("Show verbose log ")
|
||||
.setDesc("Show verbose log")
|
||||
.addToggle((toggle) =>
|
||||
toggle.setValue(this.plugin.settings.showVerboseLog).onChange(async (value) => {
|
||||
this.plugin.settings.showVerboseLog = value;
|
||||
@ -810,15 +810,6 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
})
|
||||
);
|
||||
|
||||
// new Setting(containerSyncSettingEl)
|
||||
// .setName("Skip old files on sync")
|
||||
// .setDesc("Skip old incoming if incoming changes older than storage.")
|
||||
// .addToggle((toggle) =>
|
||||
// toggle.setValue(this.plugin.settings.skipOlderFilesOnSync).onChange(async (value) => {
|
||||
// this.plugin.settings.skipOlderFilesOnSync = value;
|
||||
// await this.plugin.saveSettings();
|
||||
// })
|
||||
// );
|
||||
new Setting(containerSyncSettingEl)
|
||||
.setName("Check conflict only on opened files")
|
||||
.setDesc("Do not check conflict for replication")
|
||||
@ -829,9 +820,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
})
|
||||
);
|
||||
|
||||
containerSyncSettingEl.createEl("h3", {
|
||||
text: sanitizeHTMLToDom(`Experimental`),
|
||||
});
|
||||
|
||||
new Setting(containerSyncSettingEl)
|
||||
.setName("Sync hidden files")
|
||||
.addToggle((toggle) =>
|
||||
@ -926,6 +915,86 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
})
|
||||
)
|
||||
|
||||
containerSyncSettingEl.createEl("h3", {
|
||||
text: sanitizeHTMLToDom(`Experimental`),
|
||||
});
|
||||
new Setting(containerSyncSettingEl)
|
||||
.setName("Regular expression to ignore files")
|
||||
.setDesc("If this is set, any changes to local and remote files that match this will be skipped.")
|
||||
.addTextArea((text) => {
|
||||
text
|
||||
.setValue(this.plugin.settings.syncIgnoreRegEx)
|
||||
.setPlaceholder("\\.pdf$")
|
||||
.onChange(async (value) => {
|
||||
let isValidRegExp = false;
|
||||
try {
|
||||
new RegExp(value);
|
||||
isValidRegExp = true;
|
||||
} catch (_) {
|
||||
// NO OP.
|
||||
}
|
||||
if (isValidRegExp || value.trim() == "") {
|
||||
this.plugin.settings.syncIgnoreRegEx = value;
|
||||
await this.plugin.saveSettings();
|
||||
}
|
||||
})
|
||||
return text;
|
||||
}
|
||||
);
|
||||
new Setting(containerSyncSettingEl)
|
||||
.setName("Regular expression for restricting synchronization targets")
|
||||
.setDesc("If this is set, changes to local and remote files that only match this will be processed.")
|
||||
.addTextArea((text) => {
|
||||
text
|
||||
.setValue(this.plugin.settings.syncOnlyRegEx)
|
||||
.setPlaceholder("\\.md$|\\.txt")
|
||||
.onChange(async (value) => {
|
||||
let isValidRegExp = false;
|
||||
try {
|
||||
new RegExp(value);
|
||||
isValidRegExp = true;
|
||||
} catch (_) {
|
||||
// NO OP.
|
||||
}
|
||||
if (isValidRegExp || value.trim() == "") {
|
||||
this.plugin.settings.syncOnlyRegEx = value;
|
||||
await this.plugin.saveSettings();
|
||||
}
|
||||
})
|
||||
return text;
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
new Setting(containerSyncSettingEl)
|
||||
.setName("Chunk size")
|
||||
.setDesc("Customize chunk size for binary files (0.1MBytes). This cannot be increased when using IBM Cloudant.")
|
||||
.addText((text) => {
|
||||
text.setPlaceholder("")
|
||||
.setValue(this.plugin.settings.customChunkSize + "")
|
||||
.onChange(async (value) => {
|
||||
let v = Number(value);
|
||||
if (isNaN(v) || v < 100) {
|
||||
v = 100;
|
||||
}
|
||||
this.plugin.settings.customChunkSize = v;
|
||||
await this.plugin.saveSettings();
|
||||
});
|
||||
text.inputEl.setAttribute("type", "number");
|
||||
});
|
||||
new Setting(containerSyncSettingEl)
|
||||
.setName("Read chunks online.")
|
||||
.setDesc("If this option is enabled, LiveSync reads chunks online directly instead of replicating them locally. Increasing Custom chunk size is recommended.")
|
||||
.addToggle((toggle) => {
|
||||
toggle
|
||||
.setValue(this.plugin.settings.readChunksOnline)
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.readChunksOnline = value;
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
return toggle;
|
||||
}
|
||||
);
|
||||
containerSyncSettingEl.createEl("h3", {
|
||||
text: sanitizeHTMLToDom(`Advanced settings`),
|
||||
});
|
||||
|
2
src/lib
2
src/lib
@ -1 +1 @@
|
||||
Subproject commit a49a096a6a6d93185bb0a590b3e84e6d7c5431d0
|
||||
Subproject commit 3ca623780c1c26b153fd663e997b53e32c4de021
|
165
src/main.ts
165
src/main.ts
@ -48,7 +48,7 @@ const ICHeaderLength = ICHeader.length;
|
||||
* @param str ID
|
||||
* @returns
|
||||
*/
|
||||
function isInteralChunk(str: string): boolean {
|
||||
function isInternalChunk(str: string): boolean {
|
||||
return str.startsWith(ICHeader);
|
||||
}
|
||||
function id2filenameInternalChunk(str: string): string {
|
||||
@ -185,7 +185,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
const doc = row.doc;
|
||||
nextKey = `${row.id}\u{10ffff}`;
|
||||
if (!("_conflicts" in doc)) continue;
|
||||
if (isInteralChunk(row.id)) continue;
|
||||
if (isInternalChunk(row.id)) continue;
|
||||
if (doc._deleted) continue;
|
||||
if ("deleted" in doc && doc.deleted) continue;
|
||||
if (doc.type == "newnote" || doc.type == "plain") {
|
||||
@ -206,7 +206,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
const target = await askSelectString(this.app, "File to view History", notesList);
|
||||
if (target) {
|
||||
if (isInteralChunk(target)) {
|
||||
if (isInternalChunk(target)) {
|
||||
//NOP
|
||||
} else {
|
||||
await this.showIfConflicted(this.app.vault.getAbstractFileByPath(target) as TFile);
|
||||
@ -286,7 +286,8 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
this.watchVaultDelete = this.watchVaultDelete.bind(this);
|
||||
this.watchVaultRename = this.watchVaultRename.bind(this);
|
||||
this.watchWorkspaceOpen = debounce(this.watchWorkspaceOpen.bind(this), 1000, false);
|
||||
this.watchWindowVisiblity = debounce(this.watchWindowVisiblity.bind(this), 1000, false);
|
||||
this.watchWindowVisibility = debounce(this.watchWindowVisibility.bind(this), 1000, false);
|
||||
this.watchOnline = debounce(this.watchOnline.bind(this), 500, false);
|
||||
|
||||
this.parseReplicationResult = this.parseReplicationResult.bind(this);
|
||||
|
||||
@ -320,8 +321,8 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
if (this.settings.suspendFileWatching) {
|
||||
Logger("'Suspend file watching' turned on. Are you sure this is what you intended? Every modification on the vault will be ignored.", LOG_LEVEL.NOTICE);
|
||||
}
|
||||
const isInitalized = await this.initializeDatabase();
|
||||
if (!isInitalized) {
|
||||
const isInitialized = await this.initializeDatabase();
|
||||
if (!isInitialized) {
|
||||
//TODO:stop all sync.
|
||||
return false;
|
||||
}
|
||||
@ -361,19 +362,19 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
const config = decodeURIComponent(setupURI.substring(configURIBase.length));
|
||||
console.dir(config)
|
||||
await setupwizard(config);
|
||||
await setupWizard(config);
|
||||
},
|
||||
});
|
||||
const setupwizard = async (confString: string) => {
|
||||
const setupWizard = async (confString: string) => {
|
||||
try {
|
||||
const oldConf = JSON.parse(JSON.stringify(this.settings));
|
||||
const encryptingPassphrase = await askString(this.app, "Passphrase", "Passphrase for your settings", "");
|
||||
if (encryptingPassphrase === false) return;
|
||||
const newconf = await JSON.parse(await decrypt(confString, encryptingPassphrase));
|
||||
if (newconf) {
|
||||
const newConf = await JSON.parse(await decrypt(confString, encryptingPassphrase));
|
||||
if (newConf) {
|
||||
const result = await askYesNo(this.app, "Importing LiveSync's conf, OK?");
|
||||
if (result == "yes") {
|
||||
const newSettingW = Object.assign({}, this.settings, newconf);
|
||||
const newSettingW = Object.assign({}, this.settings, newConf);
|
||||
// stopping once.
|
||||
this.localDatabase.closeReplication();
|
||||
this.settings.suspendFileWatching = true;
|
||||
@ -437,7 +438,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
};
|
||||
this.registerObsidianProtocolHandler("setuplivesync", async (conf: any) => {
|
||||
await setupwizard(conf.settings);
|
||||
await setupWizard(conf.settings);
|
||||
});
|
||||
this.addCommand({
|
||||
id: "livesync-replicate",
|
||||
@ -448,7 +449,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
});
|
||||
this.addCommand({
|
||||
id: "livesync-dump",
|
||||
name: "Dump informations of this doc ",
|
||||
name: "Dump information of this doc ",
|
||||
editorCallback: (editor: Editor, view: MarkdownView) => {
|
||||
this.localDatabase.getDBEntry(view.file.path, {}, true, false);
|
||||
},
|
||||
@ -504,6 +505,13 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
this.showHistory(view.file);
|
||||
},
|
||||
});
|
||||
this.addCommand({
|
||||
id: "livesync-scan-files",
|
||||
name: "Scan storage and database again",
|
||||
callback: async () => {
|
||||
await this.syncAllFiles(true)
|
||||
}
|
||||
})
|
||||
|
||||
this.triggerRealizeSettingSyncMode = debounce(this.triggerRealizeSettingSyncMode.bind(this), 1000);
|
||||
this.triggerCheckPluginUpdate = debounce(this.triggerCheckPluginUpdate.bind(this), 3000);
|
||||
@ -534,14 +542,14 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
});
|
||||
this.addCommand({
|
||||
id: "livesync-conflictcheck",
|
||||
name: "Pick a file to resolive conflict",
|
||||
name: "Pick a file to resolve conflict",
|
||||
callback: () => {
|
||||
this.pickFileForResolve();
|
||||
},
|
||||
})
|
||||
this.addCommand({
|
||||
id: "livesync-runbatch",
|
||||
name: "Run pending batch processes",
|
||||
name: "Run pended batch processes",
|
||||
callback: async () => {
|
||||
await this.applyBatchChange();
|
||||
},
|
||||
@ -585,7 +593,8 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
clearAllPeriodic();
|
||||
clearAllTriggers();
|
||||
window.removeEventListener("visibilitychange", this.watchWindowVisiblity);
|
||||
window.removeEventListener("visibilitychange", this.watchWindowVisibility);
|
||||
window.removeEventListener("online", this.watchOnline)
|
||||
Logger("unloading plugin");
|
||||
}
|
||||
|
||||
@ -666,14 +675,26 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
this.registerEvent(this.app.vault.on("rename", this.watchVaultRename));
|
||||
this.registerEvent(this.app.vault.on("create", this.watchVaultCreate));
|
||||
this.registerEvent(this.app.workspace.on("file-open", this.watchWorkspaceOpen));
|
||||
window.addEventListener("visibilitychange", this.watchWindowVisiblity);
|
||||
window.addEventListener("visibilitychange", this.watchWindowVisibility);
|
||||
window.addEventListener("online", this.watchOnline);
|
||||
}
|
||||
|
||||
watchWindowVisiblity() {
|
||||
this.watchWindowVisiblityAsync();
|
||||
|
||||
watchOnline() {
|
||||
this.watchOnlineAsync();
|
||||
}
|
||||
async watchOnlineAsync() {
|
||||
// If some files were failed to retrieve, scan files again.
|
||||
if (navigator.onLine && this.localDatabase.needScanning) {
|
||||
this.localDatabase.needScanning = false;
|
||||
await this.syncAllFiles();
|
||||
}
|
||||
}
|
||||
watchWindowVisibility() {
|
||||
this.watchWindowVisibilityAsync();
|
||||
}
|
||||
|
||||
async watchWindowVisiblityAsync() {
|
||||
async watchWindowVisibilityAsync() {
|
||||
if (this.settings.suspendFileWatching) return;
|
||||
// if (this.suspended) return;
|
||||
const isHidden = document.hidden;
|
||||
@ -718,6 +739,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
|
||||
watchVaultCreate(file: TFile, ...args: any[]) {
|
||||
if (!this.isTargetFile(file)) return;
|
||||
if (this.settings.suspendFileWatching) return;
|
||||
if (recentlyTouched(file)) {
|
||||
return;
|
||||
@ -726,6 +748,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
|
||||
watchVaultChange(file: TAbstractFile, ...args: any[]) {
|
||||
if (!this.isTargetFile(file)) return;
|
||||
if (!(file instanceof TFile)) {
|
||||
return;
|
||||
}
|
||||
@ -799,6 +822,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
|
||||
watchVaultDelete(file: TAbstractFile) {
|
||||
if (!this.isTargetFile(file)) return;
|
||||
// When save is delayed, it should be cancelled.
|
||||
this.batchFileChange = this.batchFileChange.filter((e) => e != file.path);
|
||||
if (this.settings.suspendFileWatching) return;
|
||||
@ -830,6 +854,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
|
||||
watchVaultRename(file: TAbstractFile, oldFile: any) {
|
||||
if (!this.isTargetFile(file)) return;
|
||||
if (this.settings.suspendFileWatching) return;
|
||||
this.watchVaultRenameAsync(file, oldFile).then(() => { });
|
||||
}
|
||||
@ -899,32 +924,32 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
if (this.settings && !this.settings.showVerboseLog && level == LOG_LEVEL.VERBOSE) {
|
||||
return;
|
||||
}
|
||||
const valutName = this.getVaultName();
|
||||
const vaultName = this.getVaultName();
|
||||
const timestamp = new Date().toLocaleString();
|
||||
const messagecontent = typeof message == "string" ? message : message instanceof Error ? `${message.name}:${message.message}` : JSON.stringify(message, null, 2);
|
||||
const newmessage = timestamp + "->" + messagecontent;
|
||||
const messageContent = typeof message == "string" ? message : message instanceof Error ? `${message.name}:${message.message}` : JSON.stringify(message, null, 2);
|
||||
const newMessage = timestamp + "->" + messageContent;
|
||||
|
||||
this.logMessage = [].concat(this.logMessage).concat([newmessage]).slice(-100);
|
||||
console.log(valutName + ":" + newmessage);
|
||||
this.setStatusBarText(null, messagecontent.substring(0, 30));
|
||||
this.logMessage = [].concat(this.logMessage).concat([newMessage]).slice(-100);
|
||||
console.log(vaultName + ":" + newMessage);
|
||||
this.setStatusBarText(null, messageContent.substring(0, 30));
|
||||
// if (message instanceof Error) {
|
||||
// console.trace(message);
|
||||
// }
|
||||
|
||||
if (level >= LOG_LEVEL.NOTICE) {
|
||||
if (!key) key = messagecontent;
|
||||
if (!key) key = messageContent;
|
||||
if (key in this.notifies) {
|
||||
// @ts-ignore
|
||||
const isShown = this.notifies[key].notice.noticeEl?.isShown()
|
||||
if (!isShown) {
|
||||
this.notifies[key].notice = new Notice(messagecontent, 0);
|
||||
this.notifies[key].notice = new Notice(messageContent, 0);
|
||||
}
|
||||
clearTimeout(this.notifies[key].timer);
|
||||
if (key == messagecontent) {
|
||||
if (key == messageContent) {
|
||||
this.notifies[key].count++;
|
||||
this.notifies[key].notice.setMessage(`(${this.notifies[key].count}):${messagecontent}`);
|
||||
this.notifies[key].notice.setMessage(`(${this.notifies[key].count}):${messageContent}`);
|
||||
} else {
|
||||
this.notifies[key].notice.setMessage(`${messagecontent}`);
|
||||
this.notifies[key].notice.setMessage(`${messageContent}`);
|
||||
}
|
||||
|
||||
this.notifies[key].timer = setTimeout(() => {
|
||||
@ -937,7 +962,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
}, 5000);
|
||||
} else {
|
||||
const notify = new Notice(messagecontent, 0);
|
||||
const notify = new Notice(messageContent, 0);
|
||||
this.notifies[key] = {
|
||||
count: 0,
|
||||
notice: notify,
|
||||
@ -951,8 +976,8 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
if (this.addLogHook != null) this.addLogHook();
|
||||
}
|
||||
|
||||
async ensureDirectory(fullpath: string) {
|
||||
const pathElements = fullpath.split("/");
|
||||
async ensureDirectory(fullPath: string) {
|
||||
const pathElements = fullPath.split("/");
|
||||
pathElements.pop();
|
||||
let c = "";
|
||||
for (const v of pathElements) {
|
||||
@ -962,7 +987,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
} catch (ex) {
|
||||
// basically skip exceptions.
|
||||
if (ex.message && ex.message == "Folder already exists.") {
|
||||
// especialy this message is.
|
||||
// especially this message is.
|
||||
} else {
|
||||
Logger("Folder Create Error");
|
||||
Logger(ex);
|
||||
@ -977,6 +1002,8 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
if (shouldBeIgnored(pathSrc)) {
|
||||
return;
|
||||
}
|
||||
if (!this.isTargetFile(pathSrc)) return;
|
||||
|
||||
const doc = await this.localDatabase.getDBEntry(pathSrc, { rev: docEntry._rev });
|
||||
if (doc === false) return;
|
||||
const msg = `DB -> STORAGE (create${force ? ",force" : ""},${doc.datatype}) `;
|
||||
@ -990,14 +1017,14 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
await this.ensureDirectory(path);
|
||||
try {
|
||||
const newfile = await this.app.vault.createBinary(normalizePath(path), bin, {
|
||||
const newFile = await this.app.vault.createBinary(normalizePath(path), bin, {
|
||||
ctime: doc.ctime,
|
||||
mtime: doc.mtime,
|
||||
});
|
||||
this.batchFileChange = this.batchFileChange.filter((e) => e != newfile.path);
|
||||
this.batchFileChange = this.batchFileChange.filter((e) => e != newFile.path);
|
||||
Logger(msg + path);
|
||||
touch(newfile);
|
||||
this.app.vault.trigger("create", newfile);
|
||||
touch(newFile);
|
||||
this.app.vault.trigger("create", newFile);
|
||||
} catch (ex) {
|
||||
Logger(msg + "ERROR, Could not write: " + path, LOG_LEVEL.NOTICE);
|
||||
Logger(ex, LOG_LEVEL.VERBOSE);
|
||||
@ -1010,14 +1037,14 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
await this.ensureDirectory(path);
|
||||
try {
|
||||
const newfile = await this.app.vault.create(normalizePath(path), doc.data, {
|
||||
const newFile = await this.app.vault.create(normalizePath(path), doc.data, {
|
||||
ctime: doc.ctime,
|
||||
mtime: doc.mtime,
|
||||
});
|
||||
this.batchFileChange = this.batchFileChange.filter((e) => e != newfile.path);
|
||||
this.batchFileChange = this.batchFileChange.filter((e) => e != newFile.path);
|
||||
Logger(msg + path);
|
||||
touch(newfile);
|
||||
this.app.vault.trigger("create", newfile);
|
||||
touch(newFile);
|
||||
this.app.vault.trigger("create", newFile);
|
||||
} catch (ex) {
|
||||
Logger(msg + "ERROR, Could not parse: " + path + "(" + doc.datatype + ")", LOG_LEVEL.NOTICE);
|
||||
Logger(ex, LOG_LEVEL.VERBOSE);
|
||||
@ -1028,6 +1055,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
|
||||
async deleteVaultItem(file: TFile | TFolder) {
|
||||
if (!this.isTargetFile(file)) return;
|
||||
const dir = file.parent;
|
||||
if (this.settings.trashInsteadDelete) {
|
||||
await this.app.vault.trash(file, false);
|
||||
@ -1049,6 +1077,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
if (shouldBeIgnored(pathSrc)) {
|
||||
return;
|
||||
}
|
||||
if (!this.isTargetFile(pathSrc)) return;
|
||||
if (docEntry._deleted || docEntry.deleted) {
|
||||
// This occurs not only when files are deleted, but also when conflicts are resolved.
|
||||
// We have to check no other revisions are left.
|
||||
@ -1137,7 +1166,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
await runWithLock("dbchanged", false, async () => {
|
||||
const w = [...this.queuedEntries];
|
||||
this.queuedEntries = [];
|
||||
Logger(`Applyng ${w.length} files`);
|
||||
Logger(`Applying ${w.length} files`);
|
||||
for (const entry of w) {
|
||||
Logger(`Applying ${entry._id} (${entry._rev}) change...`, LOG_LEVEL.VERBOSE);
|
||||
await this.handleDBChangedAsync(entry);
|
||||
@ -1220,7 +1249,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
const now = new Date().getTime();
|
||||
if (queue.missingChildren.length == 0) {
|
||||
queue.done = true;
|
||||
if (isInteralChunk(queue.entry._id)) {
|
||||
if (isInternalChunk(queue.entry._id)) {
|
||||
//system file
|
||||
const filename = id2path(id2filenameInternalChunk(queue.entry._id));
|
||||
// await this.syncInternalFilesAndDatabase("pull", false, false, [filename])
|
||||
@ -1260,8 +1289,9 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
if (isNewFileCompleted) this.procQueuedFiles();
|
||||
}
|
||||
async parseIncomingDoc(doc: PouchDB.Core.ExistingDocument<EntryBody>) {
|
||||
if (!this.isTargetFile(id2path(doc._id))) return;
|
||||
const skipOldFile = this.settings.skipOlderFilesOnSync && false; //patched temporary.
|
||||
if ((!isInteralChunk(doc._id)) && skipOldFile) {
|
||||
if ((!isInternalChunk(doc._id)) && skipOldFile) {
|
||||
const info = this.app.vault.getAbstractFileByPath(id2path(doc._id));
|
||||
|
||||
if (info && info instanceof TFile) {
|
||||
@ -1280,9 +1310,11 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
missingChildren: [] as string[],
|
||||
timeout: now + this.chunkWaitTimeout,
|
||||
};
|
||||
if ("children" in doc) {
|
||||
// If `Read chunks online` is enabled, retrieve chunks from the remote CouchDB directly.
|
||||
if ((!this.settings.readChunksOnline) && "children" in doc) {
|
||||
const c = await this.localDatabase.localDatabase.allDocs({ keys: doc.children, include_docs: false });
|
||||
const missing = c.rows.filter((e) => "error" in e).map((e) => e.key);
|
||||
// fetch from remote
|
||||
if (missing.length > 0) Logger(`${doc._id}(${doc._rev}) Queued (waiting ${missing.length} items)`, LOG_LEVEL.VERBOSE);
|
||||
newQueue.missingChildren = missing;
|
||||
this.queuedFiles.push(newQueue);
|
||||
@ -1561,10 +1593,10 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
Logger("Initializing", LOG_LEVEL.NOTICE, "syncAll");
|
||||
}
|
||||
|
||||
const filesStorage = this.app.vault.getFiles();
|
||||
const filesStorage = this.app.vault.getFiles().filter(e => this.isTargetFile(e));
|
||||
const filesStorageName = filesStorage.map((e) => e.path);
|
||||
const wf = await this.localDatabase.localDatabase.allDocs();
|
||||
const filesDatabase = wf.rows.filter((e) => !isChunk(e.id) && !isPluginChunk(e.id) && e.id != "obsydian_livesync_version").filter(e => isValidPath(e.id)).map((e) => id2path(e.id));
|
||||
const filesDatabase = wf.rows.filter((e) => !isChunk(e.id) && !isPluginChunk(e.id) && e.id != "obsydian_livesync_version").filter(e => isValidPath(e.id)).map((e) => id2path(e.id)).filter(e => this.isTargetFile(e));
|
||||
const isInitialized = await (this.localDatabase.kvDB.get<boolean>("initialized")) || false;
|
||||
// Make chunk bigger if it is the initial scan. There must be non-active docs.
|
||||
if (filesDatabase.length == 0 && !isInitialized) {
|
||||
@ -1693,7 +1725,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
if (ex.code && ex.code == "ENOENT") {
|
||||
//NO OP.
|
||||
} else {
|
||||
Logger(`error while delete filder:${folder.path}`, LOG_LEVEL.NOTICE);
|
||||
Logger(`error while delete folder:${folder.path}`, LOG_LEVEL.NOTICE);
|
||||
Logger(ex);
|
||||
}
|
||||
}
|
||||
@ -1896,6 +1928,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
|
||||
async pullFile(filename: string, fileList?: TFile[], force?: boolean, rev?: string, waitForReady = true) {
|
||||
const targetFile = this.app.vault.getAbstractFileByPath(id2path(filename));
|
||||
if (!this.isTargetFile(targetFile)) return;
|
||||
if (targetFile == null) {
|
||||
//have to create;
|
||||
const doc = await this.localDatabase.getDBEntry(filename, rev ? { rev: rev } : null, false, waitForReady);
|
||||
@ -1971,6 +2004,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
|
||||
async updateIntoDB(file: TFile, initialScan?: boolean) {
|
||||
if (!this.isTargetFile(file)) return;
|
||||
if (shouldBeIgnored(file.path)) {
|
||||
return;
|
||||
}
|
||||
@ -2025,6 +2059,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
|
||||
async deleteFromDB(file: TFile) {
|
||||
if (!this.isTargetFile(file)) return;
|
||||
const fullpath = file.path;
|
||||
Logger(`deleteDB By path:${fullpath}`);
|
||||
await this.deleteFromDBbyPath(fullpath);
|
||||
@ -2294,7 +2329,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
return result;
|
||||
}
|
||||
|
||||
async storeInternaFileToDatabase(file: InternalFileInfo, forceWrite = false) {
|
||||
async storeInternalFileToDatabase(file: InternalFileInfo, forceWrite = false) {
|
||||
const id = filename2idInternalChunk(path2id(file.path));
|
||||
const contentBin = await this.app.vault.adapter.readBinary(file.path);
|
||||
const content = await arrayBufferToBase64(contentBin);
|
||||
@ -2336,7 +2371,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
});
|
||||
}
|
||||
|
||||
async deleteInternaFileOnDatabase(filename: string, forceWrite = false) {
|
||||
async deleteInternalFileOnDatabase(filename: string, forceWrite = false) {
|
||||
const id = filename2idInternalChunk(path2id(filename));
|
||||
const mtime = new Date().getTime();
|
||||
await runWithLock("file-" + id, false, async () => {
|
||||
@ -2392,7 +2427,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
c += "/";
|
||||
}
|
||||
}
|
||||
async extractInternaFileFromDatabase(filename: string, force = false) {
|
||||
async extractInternalFileFromDatabase(filename: string, force = false) {
|
||||
const isExists = await this.app.vault.adapter.exists(filename);
|
||||
const id = filename2idInternalChunk(path2id(filename));
|
||||
|
||||
@ -2455,7 +2490,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
for (const row of docs.rows) {
|
||||
const doc = row.doc;
|
||||
if (!("_conflicts" in doc)) continue;
|
||||
if (isInteralChunk(row.id)) {
|
||||
if (isInternalChunk(row.id)) {
|
||||
await this.resolveConflictOnInternalFile(row.id);
|
||||
}
|
||||
}
|
||||
@ -2555,11 +2590,11 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
const nw = compareMTime(fileOnStorage.mtime, fileOnDatabase.mtime);
|
||||
if (nw > 0) {
|
||||
await this.storeInternaFileToDatabase(fileOnStorage);
|
||||
await this.storeInternalFileToDatabase(fileOnStorage);
|
||||
}
|
||||
if (nw < 0) {
|
||||
// skip if not extraction performed.
|
||||
if (!await this.extractInternaFileFromDatabase(filename)) return;
|
||||
if (!await this.extractInternalFileFromDatabase(filename)) return;
|
||||
}
|
||||
// If process successfly updated or file contents are same, update cache.
|
||||
cache.docMtime = fileOnDatabase.mtime;
|
||||
@ -2567,22 +2602,21 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
caches[filename] = cache;
|
||||
countUpdatedFolder(filename);
|
||||
} else if (!fileOnStorage && fileOnDatabase) {
|
||||
console.log("pushpull")
|
||||
if (direction == "push") {
|
||||
if (fileOnDatabase.deleted) return;
|
||||
await this.deleteInternaFileOnDatabase(filename);
|
||||
await this.deleteInternalFileOnDatabase(filename);
|
||||
} else if (direction == "pull") {
|
||||
if (await this.extractInternaFileFromDatabase(filename)) {
|
||||
if (await this.extractInternalFileFromDatabase(filename)) {
|
||||
countUpdatedFolder(filename);
|
||||
}
|
||||
} else if (direction == "safe") {
|
||||
if (fileOnDatabase.deleted) return
|
||||
if (await this.extractInternaFileFromDatabase(filename)) {
|
||||
if (await this.extractInternalFileFromDatabase(filename)) {
|
||||
countUpdatedFolder(filename);
|
||||
}
|
||||
}
|
||||
} else if (fileOnStorage && !fileOnDatabase) {
|
||||
await this.storeInternaFileToDatabase(fileOnStorage);
|
||||
await this.storeInternalFileToDatabase(fileOnStorage);
|
||||
} else {
|
||||
throw new Error("Invalid state on hidden file sync");
|
||||
// Something corrupted?
|
||||
@ -2691,4 +2725,13 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
|
||||
Logger(`Hidden files scanned: ${filesChanged} files had been modified`, logLevel, "sync_internal");
|
||||
}
|
||||
|
||||
isTargetFile(file: string | TAbstractFile) {
|
||||
if (file instanceof TFile) {
|
||||
return this.localDatabase.isTargetFile(file.path);
|
||||
} else if (typeof file == "string") {
|
||||
return this.localDatabase.isTargetFile(file);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -40,8 +40,8 @@ export const connectRemoteCouchDBWithSetting = (settings: RemoteDBSettings, isMo
|
||||
|
||||
const connectRemoteCouchDB = async (uri: string, auth: { username: string; password: string }, disableRequestURI: boolean, passphrase: string | boolean): Promise<string | { db: PouchDB.Database<EntryDoc>; info: PouchDB.Core.DatabaseInfo }> => {
|
||||
if (!isValidRemoteCouchDBURI(uri)) return "Remote URI is not valid";
|
||||
if (uri.toLowerCase() != uri) return "Remote URI and database name cound not contain capital letters.";
|
||||
if (uri.indexOf(" ") !== -1) return "Remote URI and database name cound not contain spaces.";
|
||||
if (uri.toLowerCase() != uri) return "Remote URI and database name could not contain capital letters.";
|
||||
if (uri.indexOf(" ") !== -1) return "Remote URI and database name could not contain spaces.";
|
||||
let authHeader = "";
|
||||
if (auth.username && auth.password) {
|
||||
const utf8str = String.fromCharCode.apply(null, new TextEncoder().encode(`${auth.username}:${auth.password}`));
|
||||
@ -225,3 +225,55 @@ export const checkSyncInfo = async (db: PouchDB.Database): Promise<boolean> => {
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
export async function putDesignDocuments(db: PouchDB.Database) {
|
||||
type DesignDoc = {
|
||||
_id: string;
|
||||
_rev: string;
|
||||
ver: number;
|
||||
filters: {
|
||||
default: string,
|
||||
push: string,
|
||||
pull: string,
|
||||
};
|
||||
}
|
||||
const design: DesignDoc = {
|
||||
"_id": "_design/replicate",
|
||||
"_rev": undefined as string | undefined,
|
||||
"ver": 2,
|
||||
"filters": {
|
||||
"default": function (doc: any, req: any) {
|
||||
return !("remote" in doc && doc.remote);
|
||||
}.toString(),
|
||||
"push": function (doc: any, req: any) {
|
||||
return true;
|
||||
}.toString(),
|
||||
"pull": function (doc: any, req: any) {
|
||||
return !(doc.type && doc.type == "leaf")
|
||||
}.toString(),
|
||||
}
|
||||
}
|
||||
|
||||
// We can use the filter on replication : filter: 'replicate/default',
|
||||
|
||||
try {
|
||||
const w = await db.get<DesignDoc>(design._id);
|
||||
if (w.ver < design.ver) {
|
||||
design._rev = w._rev;
|
||||
//@ts-ignore
|
||||
await db.put(design);
|
||||
return true;
|
||||
}
|
||||
} catch (ex) {
|
||||
if (ex.status && ex.status == 404) {
|
||||
delete design._rev;
|
||||
//@ts-ignore
|
||||
await db.put(design);
|
||||
return true;
|
||||
} else {
|
||||
Logger("Could not make design documents", LOG_LEVEL.INFO);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
Loading…
Reference in New Issue
Block a user