1
0
mirror of https://github.com/vrtmrz/obsidian-livesync.git synced 2024-12-03 08:45:34 +02:00

- Refined:

- Task scheduling logics has been rewritten.
  - Possibly many bugs and fragile behaviour has been fixed
- Fixed:
  - Remote-chunk-fetching now works with keeping request intervals
- New feature:
  - We can show only the icons in the editor.
This commit is contained in:
vorotamoroz 2024-01-12 09:36:49 +00:00
parent c2491fdfad
commit 7f422d58f2
11 changed files with 545 additions and 650 deletions

View File

@ -7,14 +7,16 @@ import { ICXHeader, PERIODIC_PLUGIN_SWEEP, } from "./types";
import { createTextBlob, delay, getDocData } from "./lib/src/utils";
import { Logger } from "./lib/src/logger";
import { WrappedNotice } from "./lib/src/wrapper";
import { readString, crc32CKHash, decodeBinary, arrayBufferToBase64 } from "./lib/src/strbin";
import { readString, decodeBinary, arrayBufferToBase64, sha1 } from "./lib/src/strbin";
import { serialized } from "./lib/src/lock";
import { LiveSyncCommands } from "./LiveSyncCommands";
import { stripAllPrefixes } from "./lib/src/path";
import { PeriodicProcessor, askYesNo, disposeMemoObject, memoIfNotExist, memoObject, retrieveMemoObject, scheduleTask } from "./utils";
import { PluginDialogModal } from "./dialogs";
import { JsonResolveModal } from "./JsonResolveModal";
import { pipeGeneratorToGenerator, processAllGeneratorTasksWithConcurrencyLimit } from './lib/src/task';
import { QueueProcessor } from './lib/src/processor';
import { pluginScanningCount } from './lib/src/stores';
import type ObsidianLiveSyncPlugin from './main';
const d = "\u200b";
const d2 = "\n";
@ -162,6 +164,16 @@ export type PluginDataEx = {
mtime: number,
};
export class ConfigSync extends LiveSyncCommands {
constructor(plugin: ObsidianLiveSyncPlugin) {
super(plugin);
pluginScanningCount.onChanged((e) => {
const total = e.value;
pluginIsEnumerating.set(total != 0);
if (total == 0) {
Logger(`Processing configurations done`, LOG_LEVEL_INFO, "get-plugins");
}
})
}
confirmPopup: WrappedNotice = null;
get kvDB() {
return this.plugin.kvDB;
@ -270,7 +282,7 @@ export class ConfigSync extends LiveSyncCommands {
for (const file of data.files) {
const work = { ...file };
const tempStr = getDocData(work.data);
work.data = [crc32CKHash(tempStr)];
work.data = [await sha1(tempStr)];
xFiles.push(work);
}
return ({
@ -302,65 +314,65 @@ export class ConfigSync extends LiveSyncCommands {
this.plugin.saveSettingData();
}
}
pluginScanProcessor = new QueueProcessor(async (v: AnyEntry[]) => {
const plugin = v[0];
const path = plugin.path || this.getPath(plugin);
const oldEntry = (this.pluginList.find(e => e.documentPath == path));
if (oldEntry && oldEntry.mtime == plugin.mtime) return;
try {
const pluginData = await this.loadPluginData(path);
if (pluginData) {
return [pluginData];
}
// Failed to load
return;
} catch (ex) {
Logger(`Something happened at enumerating customization :${path}`, LOG_LEVEL_NOTICE);
Logger(ex, LOG_LEVEL_VERBOSE);
}
return;
}, { suspended: true, batchSize: 1, concurrentLimit: 5, delay: 300, yieldThreshold: 10 }).pipeTo(
new QueueProcessor(
(pluginDataList) => {
let newList = [...this.pluginList];
for (const item of pluginDataList) {
newList = newList.filter(x => x.documentPath != item.documentPath);
newList.push(item)
}
this.pluginList = newList;
pluginList.set(newList);
return;
}
, { suspended: true, batchSize: 1000, concurrentLimit: 10, delay: 200, yieldThreshold: 25, totalRemainingReactiveSource: pluginScanningCount })).startPipeline().root.onIdle(() => {
Logger(`All files enumerated`, LOG_LEVEL_INFO, "get-plugins");
this.createMissingConfigurationEntry();
});
async updatePluginList(showMessage: boolean, updatedDocumentPath?: FilePathWithPrefix): Promise<void> {
const logLevel = showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO;
// pluginList.set([]);
if (!this.settings.usePluginSync) {
this.pluginScanProcessor.clearQueue();
this.pluginList = [];
pluginList.set(this.pluginList)
return;
}
await Promise.resolve(); // Just to prevent warning.
scheduleTask("update-plugin-list-task", 200, async () => {
await serialized("update-plugin-list", async () => {
try {
const updatedDocumentId = updatedDocumentPath ? await this.path2id(updatedDocumentPath) : "";
const plugins = updatedDocumentPath ?
this.localDatabase.findEntries(updatedDocumentId, updatedDocumentId + "\u{10ffff}", { include_docs: true, key: updatedDocumentId, limit: 1 }) :
this.localDatabase.findEntries(ICXHeader + "", `${ICXHeader}\u{10ffff}`, { include_docs: true });
let count = 0;
pluginIsEnumerating.set(true);
for await (const v of processAllGeneratorTasksWithConcurrencyLimit(20, pipeGeneratorToGenerator(plugins, async plugin => {
const path = plugin.path || this.getPath(plugin);
if (updatedDocumentPath && updatedDocumentPath != path) {
return false;
}
const oldEntry = (this.pluginList.find(e => e.documentPath == path));
if (oldEntry && oldEntry.mtime == plugin.mtime) return false;
try {
count++;
if (count % 10 == 0) Logger(`Enumerating files... ${count}`, logLevel, "get-plugins");
Logger(`plugin-${path}`, LOG_LEVEL_VERBOSE);
return this.loadPluginData(path);
// return entries;
} catch (ex) {
//TODO
Logger(`Something happened at enumerating customization :${path}`, LOG_LEVEL_NOTICE);
console.warn(ex);
}
return false;
}))) {
if ("ok" in v) {
if (v.ok !== false) {
let newList = [...this.pluginList];
const item = v.ok;
newList = newList.filter(x => x.documentPath != item.documentPath);
newList.push(item)
if (updatedDocumentPath != "") newList = newList.filter(e => e.documentPath != updatedDocumentPath);
this.pluginList = newList;
pluginList.set(newList);
}
}
}
Logger(`All files enumerated`, logLevel, "get-plugins");
pluginIsEnumerating.set(false);
this.createMissingConfigurationEntry();
} finally {
pluginIsEnumerating.set(false);
}
});
try {
const updatedDocumentId = updatedDocumentPath ? await this.path2id(updatedDocumentPath) : "";
const plugins = updatedDocumentPath ?
this.localDatabase.findEntries(updatedDocumentId, updatedDocumentId + "\u{10ffff}", { include_docs: true, key: updatedDocumentId, limit: 1 }) :
this.localDatabase.findEntries(ICXHeader + "", `${ICXHeader}\u{10ffff}`, { include_docs: true });
for await (const v of plugins) {
const path = v.path || this.getPath(v);
if (updatedDocumentPath && updatedDocumentPath != path) continue;
this.pluginScanProcessor.enqueue(v);
}
} finally {
pluginIsEnumerating.set(false);
});
}
pluginIsEnumerating.set(false);
// return entries;
}
async compareUsingDisplayData(dataA: PluginDataExDisplay, dataB: PluginDataExDisplay) {

View File

@ -1,16 +1,18 @@
import { normalizePath, type PluginManifest } from "./deps";
import { type EntryDoc, type LoadedEntry, type InternalFileEntry, type FilePathWithPrefix, type FilePath, LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE, MODE_SELECTIVE, MODE_PAUSED, type SavingEntry } from "./lib/src/types";
import { type InternalFileInfo, ICHeader, ICHeaderEnd } from "./types";
import { Parallels, createBinaryBlob, delay, isDocContentSame } from "./lib/src/utils";
import { createBinaryBlob, delay, isDocContentSame } from "./lib/src/utils";
import { Logger } from "./lib/src/logger";
import { PouchDB } from "./lib/src/pouchdb-browser.js";
import { scheduleTask, isInternalMetadata, PeriodicProcessor } from "./utils";
import { isInternalMetadata, PeriodicProcessor } from "./utils";
import { WrappedNotice } from "./lib/src/wrapper";
import { decodeBinary, encodeBinary } from "./lib/src/strbin";
import { serialized } from "./lib/src/lock";
import { JsonResolveModal } from "./JsonResolveModal";
import { LiveSyncCommands } from "./LiveSyncCommands";
import { addPrefix, stripAllPrefixes } from "./lib/src/path";
import { KeyedQueueProcessor, QueueProcessor } from "./lib/src/processor";
import { hiddenFilesEventCount, hiddenFilesProcessingCount } from "./lib/src/stores";
export class HiddenFileSync extends LiveSyncCommands {
periodicInternalFileScanProcessor: PeriodicProcessor = new PeriodicProcessor(this.plugin, async () => this.settings.syncInternalFiles && this.localDatabase.isReady && await this.syncInternalFilesAndDatabase("push", false));
@ -75,22 +77,17 @@ export class HiddenFileSync extends LiveSyncCommands {
return;
}
procInternalFiles: string[] = [];
async execInternalFile() {
await serialized("execInternal", async () => {
const w = [...this.procInternalFiles];
this.procInternalFiles = [];
Logger(`Applying hidden ${w.length} files change...`);
await this.syncInternalFilesAndDatabase("pull", false, false, w);
Logger(`Applying hidden ${w.length} files changed`);
});
}
procInternalFile(filename: string) {
this.procInternalFiles.push(filename);
scheduleTask("procInternal", 500, async () => {
await this.execInternalFile();
});
this.internalFileProcessor.enqueueWithKey(filename, filename);
}
internalFileProcessor = new KeyedQueueProcessor<string, any>(
async (filenames) => {
Logger(`START :Applying hidden ${filenames.length} files change`, LOG_LEVEL_VERBOSE);
await this.syncInternalFilesAndDatabase("pull", false, false, filenames);
Logger(`DONE :Applying hidden ${filenames.length} files change`, LOG_LEVEL_VERBOSE);
return;
}, { batchSize: 100, concurrentLimit: 1, delay: 100, yieldThreshold: 10, suspended: false, totalRemainingReactiveSource: hiddenFilesEventCount }
);
recentProcessedInternalFiles = [] as string[];
async watchVaultRawEventsAsync(path: FilePath) {
@ -278,28 +275,38 @@ export class HiddenFileSync extends LiveSyncCommands {
acc[stripAllPrefixes(this.getPath(cur))] = cur;
return acc;
}, {} as { [key: string]: InternalFileEntry; });
const para = Parallels();
for (const filename of allFileNames) {
await new QueueProcessor(async (filenames: FilePath[]) => {
const filename = filenames[0];
processed++;
if (processed % 100 == 0) {
Logger(`Hidden file: ${processed}/${fileCount}`, logLevel, "sync_internal");
}
if (!filename) continue;
if (!filename) return;
if (ignorePatterns.some(e => filename.match(e)))
continue;
return;
if (await this.plugin.isIgnoredByIgnoreFiles(filename)) {
continue;
return;
}
const fileOnStorage = filename in filesMap ? filesMap[filename] : undefined;
const fileOnDatabase = filename in filesOnDBMap ? filesOnDBMap[filename] : undefined;
const cache = filename in caches ? caches[filename] : { storageMtime: 0, docMtime: 0 };
await para.wait(5);
const proc = (async (xFileOnStorage: InternalFileInfo, xFileOnDatabase: InternalFileEntry) => {
return [{
filename,
fileOnStorage,
fileOnDatabase,
}]
}, { suspended: true, batchSize: 1, concurrentLimit: 10, delay: 0, totalRemainingReactiveSource: hiddenFilesProcessingCount })
.pipeTo(new QueueProcessor(async (params) => {
const
{
filename,
fileOnStorage: xFileOnStorage,
fileOnDatabase: xFileOnDatabase
} = params[0];
if (xFileOnStorage && xFileOnDatabase) {
const cache = filename in caches ? caches[filename] : { storageMtime: 0, docMtime: 0 };
// Both => Synchronize
if ((direction != "pullForce" && direction != "pushForce") && xFileOnDatabase.mtime == cache.docMtime && xFileOnStorage.mtime == cache.storageMtime) {
return;
@ -340,11 +347,12 @@ export class HiddenFileSync extends LiveSyncCommands {
throw new Error("Invalid state on hidden file sync");
// Something corrupted?
}
return;
}, { suspended: true, batchSize: 1, concurrentLimit: 5, delay: 0 }))
.root
.enqueueAll(allFileNames)
.startPipeline().waitForPipeline();
});
para.add(proc(fileOnStorage, fileOnDatabase))
}
await para.all();
await this.kvDB.set("diff-caches-internal", caches);
// When files has been retrieved from the database. they must be reloaded.

View File

@ -321,7 +321,6 @@ Of course, we are able to disable these features.`
this.plugin.settings.suspendFileWatching = false;
await this.plugin.syncAllFiles(true);
await this.plugin.loadQueuedFiles();
this.plugin.procQueuedFiles();
await this.plugin.saveSettings();
}

View File

@ -1,5 +1,6 @@
import { App, Modal } from "./deps";
import { logMessageStore } from "./lib/src/stores";
import type { ReactiveInstance, } from "./lib/src/reactive";
import { logMessages } from "./lib/src/stores";
import { escapeStringToHTML } from "./lib/src/strbin";
import ObsidianLiveSyncPlugin from "./main";
@ -21,14 +22,16 @@ export class LogDisplayModal extends Modal {
div.addClass("op-scrollable");
div.addClass("op-pre");
this.logEl = div;
this.unsubscribe = logMessageStore.observe((e) => {
function updateLog(logs: ReactiveInstance<string[]>) {
const e = logs.value;
let msg = "";
for (const v of e) {
msg += escapeStringToHTML(v) + "<br>";
}
this.logEl.innerHTML = msg;
})
logMessageStore.invalidate();
}
logMessages.onChanged(updateLog);
this.unsubscribe = () => logMessages.offChanged(updateLog);
}
onClose() {
const { contentEl } = this;

View File

@ -1,26 +1,27 @@
<script lang="ts">
import { onDestroy, onMount } from "svelte";
import { logMessageStore } from "./lib/src/stores";
import { logMessages } from "./lib/src/stores";
import type { ReactiveInstance } from "./lib/src/reactive";
import { Logger } from "./lib/src/logger";
let unsubscribe: () => void;
let messages = [] as string[];
let wrapRight = false;
let autoScroll = true;
let suspended = false;
function updateLog(logs: ReactiveInstance<string[]>) {
const e = logs.value;
if (!suspended) {
messages = [...e];
setTimeout(() => {
if (scroll) scroll.scrollTop = scroll.scrollHeight;
}, 10);
}
}
onMount(async () => {
unsubscribe = logMessageStore.observe((e) => {
if (!suspended) {
messages = [...e];
if (autoScroll) {
if (scroll) scroll.scrollTop = scroll.scrollHeight;
}
}
});
logMessageStore.invalidate();
setTimeout(() => {
if (scroll) scroll.scrollTop = scroll.scrollHeight;
}, 100);
logMessages.onChanged(updateLog);
Logger("Log window opened");
unsubscribe = () => logMessages.offChanged(updateLog);
});
onDestroy(() => {
if (unsubscribe) unsubscribe();

View File

@ -4,7 +4,7 @@ import { delay } from "./lib/src/utils";
import { Semaphore } from "./lib/src/semaphore";
import { versionNumberString2Number } from "./lib/src/strbin";
import { Logger } from "./lib/src/logger";
import { checkSyncInfo, isCloudantURI } from "./lib/src/utils_couchdb.js";
import { checkSyncInfo, isCloudantURI } from "./lib/src/utils_couchdb";
import { testCrypt } from "./lib/src/e2ee_v2";
import ObsidianLiveSyncPlugin from "./main";
import { askYesNo, performRebuildDB, requestToCouchDB, scheduleTask } from "./utils";
@ -746,8 +746,20 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
toggle.setValue(this.plugin.settings.showStatusOnEditor).onChange(async (value) => {
this.plugin.settings.showStatusOnEditor = value;
await this.plugin.saveSettings();
this.display();
})
);
if (this.plugin.settings.showStatusOnEditor) {
new Setting(containerGeneralSettingsEl)
.setName("Show status as icons only")
.setDesc("")
.addToggle((toggle) =>
toggle.setValue(this.plugin.settings.showOnlyIconsOnEditor).onChange(async (value) => {
this.plugin.settings.showOnlyIconsOnEditor = value;
await this.plugin.saveSettings();
})
);
}
containerGeneralSettingsEl.createEl("h4", { text: "Logging" });
new Setting(containerGeneralSettingsEl)

View File

@ -1,15 +1,13 @@
import type { SerializedFileAccess } from "./SerializedFileAccess";
import { Plugin, TAbstractFile, TFile, TFolder } from "./deps";
import { isPlainText, shouldBeIgnored } from "./lib/src/path";
import { getGlobalStore } from "./lib/src/store";
import type { KeyedQueueProcessor } from "./lib/src/processor";
import { type FilePath, type ObsidianLiveSyncSettings } from "./lib/src/types";
import { type FileEventItem, type FileEventType, type FileInfo, type InternalFileInfo, type queueItem } from "./types";
import { type FileEventItem, type FileEventType, type FileInfo, type InternalFileInfo } from "./types";
export abstract class StorageEventManager {
abstract fetchEvent(): FileEventItem | false;
abstract cancelRelativeEvent(item: FileEventItem): void;
abstract getQueueLength(): number;
abstract beginWatch(): void;
}
type LiveSyncForStorageEventManager = Plugin &
@ -19,19 +17,18 @@ type LiveSyncForStorageEventManager = Plugin &
vaultAccess: SerializedFileAccess
} & {
isTargetFile: (file: string | TAbstractFile) => Promise<boolean>,
procFileEvent: (applyBatch?: boolean) => Promise<any>,
fileEventQueue: KeyedQueueProcessor<FileEventItem, any>
};
export class StorageEventManagerObsidian extends StorageEventManager {
plugin: LiveSyncForStorageEventManager;
queuedFilesStore = getGlobalStore("queuedFiles", { queuedItems: [] as queueItem[], fileEventItems: [] as FileEventItem[] });
watchedFileEventQueue = [] as FileEventItem[];
constructor(plugin: LiveSyncForStorageEventManager) {
super();
this.plugin = plugin;
}
beginWatch() {
const plugin = this.plugin;
this.watchVaultChange = this.watchVaultChange.bind(this);
this.watchVaultCreate = this.watchVaultCreate.bind(this);
this.watchVaultDelete = this.watchVaultDelete.bind(this);
@ -43,6 +40,7 @@ export class StorageEventManagerObsidian extends StorageEventManager {
plugin.registerEvent(plugin.app.vault.on("create", this.watchVaultCreate));
//@ts-ignore : Internal API
plugin.registerEvent(plugin.app.vault.on("raw", this.watchVaultRawEvents));
plugin.fileEventQueue.startPipeline();
}
watchVaultCreate(file: TAbstractFile, ctx?: any) {
@ -90,7 +88,6 @@ export class StorageEventManagerObsidian extends StorageEventManager {
}
// Cache file and waiting to can be proceed.
async appendWatchEvent(params: { type: FileEventType, file: TAbstractFile | InternalFileInfo, oldPath?: string }[], ctx?: any) {
let forcePerform = false;
for (const param of params) {
if (shouldBeIgnored(param.file.path)) {
continue;
@ -116,33 +113,6 @@ export class StorageEventManagerObsidian extends StorageEventManager {
if (!cache) cache = await this.plugin.vaultAccess.vaultRead(file);
}
}
if (type == "DELETE" || type == "RENAME") {
forcePerform = true;
}
if (this.plugin.settings.batchSave && !this.plugin.settings.liveSync) {
// if the latest event is the same type, omit that
// a.md MODIFY <- this should be cancelled when a.md MODIFIED
// b.md MODIFY <- this should be cancelled when b.md MODIFIED
// a.md MODIFY
// a.md CREATE
// :
let i = this.watchedFileEventQueue.length;
L1:
while (i >= 0) {
i--;
if (i < 0) break L1;
if (this.watchedFileEventQueue[i].args.file.path != file.path) {
continue L1;
}
if (this.watchedFileEventQueue[i].type != type) break L1;
this.watchedFileEventQueue.remove(this.watchedFileEventQueue[i]);
//this.queuedFilesStore.set({ queuedItems: this.queuedFiles, fileEventItems: this.watchedFileEventQueue });
this.queuedFilesStore.apply((value) => ({ ...value, fileEventItems: this.watchedFileEventQueue }));
}
}
const fileInfo = file instanceof TFile ? {
ctime: file.stat.ctime,
mtime: file.stat.mtime,
@ -150,7 +120,8 @@ export class StorageEventManagerObsidian extends StorageEventManager {
path: file.path,
size: file.stat.size
} as FileInfo : file as InternalFileInfo;
this.watchedFileEventQueue.push({
this.plugin.fileEventQueue.enqueueWithKey(`file-${fileInfo.path}`, {
type,
args: {
file: fileInfo,
@ -161,21 +132,5 @@ export class StorageEventManagerObsidian extends StorageEventManager {
key: atomicKey
})
}
// this.queuedFilesStore.set({ queuedItems: this.queuedFiles, fileEventItems: this.watchedFileEventQueue });
this.queuedFilesStore.apply((value) => ({ ...value, fileEventItems: this.watchedFileEventQueue }));
this.plugin.procFileEvent(forcePerform);
}
fetchEvent(): FileEventItem | false {
if (this.watchedFileEventQueue.length == 0) return false;
const item = this.watchedFileEventQueue.shift();
this.queuedFilesStore.apply((value) => ({ ...value, fileEventItems: this.watchedFileEventQueue }));
return item;
}
cancelRelativeEvent(item: FileEventItem) {
this.watchedFileEventQueue = [...this.watchedFileEventQueue].filter(e => e.key != item.key);
this.queuedFilesStore.apply((value) => ({ ...value, fileEventItems: this.watchedFileEventQueue }));
}
getQueueLength() {
return this.watchedFileEventQueue.length;
}
}

@ -1 +1 @@
Subproject commit 724c3c364aff46c8f862c354c3c9235621286052
Subproject commit 33f7e69433c45692b73d97291a30186389fa12d2

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
import { normalizePath, TFile, Platform, TAbstractFile, App, Plugin, type RequestUrlParam, requestUrl } from "./deps";
import { normalizePath, Platform, TAbstractFile, App, Plugin, type RequestUrlParam, requestUrl } from "./deps";
import { path2id_base, id2path_base, isValidFilenameInLinux, isValidFilenameInDarwin, isValidFilenameInWidows, isValidFilenameInAndroid, stripAllPrefixes } from "./lib/src/path";
import { Logger } from "./lib/src/logger";
@ -8,6 +8,8 @@ import { InputStringDialog, PopoverSelectString } from "./dialogs";
import ObsidianLiveSyncPlugin from "./main";
import { writeString } from "./lib/src/strbin";
export { scheduleTask, setPeriodicTask, cancelTask, cancelAllTasks, cancelPeriodicTask, cancelAllPeriodicTask, } from "./lib/src/task";
// For backward compatibility, using the path for determining id.
// Only CouchDB unacceptable ID (that starts with an underscore) has been prefixed with "/".
// The first slash will be deleted when the path is normalized.
@ -43,49 +45,6 @@ export function getPathFromTFile(file: TAbstractFile) {
return file.path as FilePath;
}
const tasks: { [key: string]: ReturnType<typeof setTimeout> } = {};
export function scheduleTask(key: string, timeout: number, proc: (() => Promise<any> | void), skipIfTaskExist?: boolean) {
if (skipIfTaskExist && key in tasks) {
return;
}
cancelTask(key);
tasks[key] = setTimeout(async () => {
delete tasks[key];
await proc();
}, timeout);
}
export function cancelTask(key: string) {
if (key in tasks) {
clearTimeout(tasks[key]);
delete tasks[key];
}
}
export function cancelAllTasks() {
for (const v in tasks) {
clearTimeout(tasks[v]);
delete tasks[v];
}
}
const intervals: { [key: string]: ReturnType<typeof setInterval> } = {};
export function setPeriodicTask(key: string, timeout: number, proc: (() => Promise<any> | void)) {
cancelPeriodicTask(key);
intervals[key] = setInterval(async () => {
delete intervals[key];
await proc();
}, timeout);
}
export function cancelPeriodicTask(key: string) {
if (key in intervals) {
clearInterval(intervals[key]);
delete intervals[key];
}
}
export function cancelAllPeriodicTask() {
for (const v in intervals) {
clearInterval(intervals[v]);
delete intervals[v];
}
}
const memos: { [key: string]: any } = {};
export function memoObject<T>(key: string, obj: T): T {

View File

@ -16,6 +16,7 @@
"importHelpers": false,
"alwaysStrict": true,
"allowImportingTsExtensions": true,
"noEmit": true,
"lib": [
"es2018",
"DOM",