mirror of
https://github.com/laurent22/joplin.git
synced 2025-01-11 18:24:43 +02:00
Delete generated .js files (#4717)
This commit is contained in:
parent
a0ec926ba2
commit
5b65186b4d
@ -1,135 +0,0 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const EncryptionService_1 = require("./services/EncryptionService");
|
||||
const shim_1 = require("./shim");
|
||||
const ResourceService_1 = require("./services/ResourceService");
|
||||
class BaseSyncTarget {
|
||||
constructor(db, options = null) {
|
||||
this.synchronizer_ = null;
|
||||
this.initState_ = null;
|
||||
this.logger_ = null;
|
||||
this.db_ = db;
|
||||
this.options_ = options;
|
||||
}
|
||||
static supportsConfigCheck() {
|
||||
return false;
|
||||
}
|
||||
option(name, defaultValue = null) {
|
||||
return this.options_ && name in this.options_ ? this.options_[name] : defaultValue;
|
||||
}
|
||||
logger() {
|
||||
return this.logger_;
|
||||
}
|
||||
setLogger(v) {
|
||||
this.logger_ = v;
|
||||
}
|
||||
db() {
|
||||
return this.db_;
|
||||
}
|
||||
// If [] is returned it means all platforms are supported
|
||||
static unsupportedPlatforms() {
|
||||
return [];
|
||||
}
|
||||
isAuthenticated() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return false;
|
||||
});
|
||||
}
|
||||
authRouteName() {
|
||||
return null;
|
||||
}
|
||||
static id() {
|
||||
throw new Error('id() not implemented');
|
||||
}
|
||||
// Note: it cannot be called just "name()" because that's a reserved keyword and
|
||||
// it would throw an obscure error in React Native.
|
||||
static targetName() {
|
||||
throw new Error('targetName() not implemented');
|
||||
}
|
||||
static label() {
|
||||
throw new Error('label() not implemented');
|
||||
}
|
||||
initSynchronizer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
throw new Error('initSynchronizer() not implemented');
|
||||
});
|
||||
}
|
||||
initFileApi() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
throw new Error('initFileApi() not implemented');
|
||||
});
|
||||
}
|
||||
fileApi() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (this.fileApi_)
|
||||
return this.fileApi_;
|
||||
this.fileApi_ = yield this.initFileApi();
|
||||
return this.fileApi_;
|
||||
});
|
||||
}
|
||||
// Usually each sync target should create and setup its own file API via initFileApi()
|
||||
// but for testing purposes it might be convenient to provide it here so that multiple
|
||||
// clients can share and sync to the same file api (see test-utils.js)
|
||||
setFileApi(v) {
|
||||
this.fileApi_ = v;
|
||||
}
|
||||
synchronizer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (this.synchronizer_)
|
||||
return this.synchronizer_;
|
||||
if (this.initState_ == 'started') {
|
||||
// Synchronizer is already being initialized, so wait here till it's done.
|
||||
return new Promise((resolve, reject) => {
|
||||
const iid = shim_1.default.setInterval(() => {
|
||||
if (this.initState_ == 'ready') {
|
||||
shim_1.default.clearInterval(iid);
|
||||
resolve(this.synchronizer_);
|
||||
}
|
||||
if (this.initState_ == 'error') {
|
||||
shim_1.default.clearInterval(iid);
|
||||
reject(new Error('Could not initialise synchroniser'));
|
||||
}
|
||||
}, 1000);
|
||||
});
|
||||
}
|
||||
else {
|
||||
this.initState_ = 'started';
|
||||
try {
|
||||
this.synchronizer_ = yield this.initSynchronizer();
|
||||
this.synchronizer_.setLogger(this.logger());
|
||||
this.synchronizer_.setEncryptionService(EncryptionService_1.default.instance());
|
||||
this.synchronizer_.setResourceService(ResourceService_1.default.instance());
|
||||
this.synchronizer_.dispatch = BaseSyncTarget.dispatch;
|
||||
this.initState_ = 'ready';
|
||||
return this.synchronizer_;
|
||||
}
|
||||
catch (error) {
|
||||
this.initState_ = 'error';
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
syncStarted() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!this.synchronizer_)
|
||||
return false;
|
||||
if (!(yield this.isAuthenticated()))
|
||||
return false;
|
||||
const sync = yield this.synchronizer();
|
||||
return sync.state() != 'idle';
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.default = BaseSyncTarget;
|
||||
BaseSyncTarget.dispatch = () => { };
|
||||
//# sourceMappingURL=BaseSyncTarget.js.map
|
@ -1,398 +0,0 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const Logger_1 = require("./Logger");
|
||||
const time_1 = require("./time");
|
||||
const shim_1 = require("./shim");
|
||||
const Mutex = require('async-mutex').Mutex;
|
||||
class Database {
|
||||
constructor(driver) {
|
||||
this.debugMode_ = false;
|
||||
this.sqlQueryLogEnabled_ = false;
|
||||
this.logger_ = new Logger_1.default();
|
||||
this.logExcludedQueryTypes_ = [];
|
||||
this.batchTransactionMutex_ = new Mutex();
|
||||
this.profilingEnabled_ = false;
|
||||
this.queryId_ = 1;
|
||||
this.driver_ = driver;
|
||||
}
|
||||
setLogExcludedQueryTypes(v) {
|
||||
this.logExcludedQueryTypes_ = v;
|
||||
}
|
||||
// Converts the SQLite error to a regular JS error
|
||||
// so that it prints a stacktrace when passed to
|
||||
// console.error()
|
||||
sqliteErrorToJsError(error, sql = null, params = null) {
|
||||
return this.driver().sqliteErrorToJsError(error, sql, params);
|
||||
}
|
||||
setLogger(l) {
|
||||
this.logger_ = l;
|
||||
}
|
||||
logger() {
|
||||
return this.logger_;
|
||||
}
|
||||
driver() {
|
||||
return this.driver_;
|
||||
}
|
||||
open(options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield this.driver().open(options);
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Cannot open database: ${error.message}: ${JSON.stringify(options)}`);
|
||||
}
|
||||
this.logger().info('Database was open successfully');
|
||||
});
|
||||
}
|
||||
escapeField(field) {
|
||||
if (field == '*')
|
||||
return '*';
|
||||
const p = field.split('.');
|
||||
if (p.length == 1)
|
||||
return `\`${field}\``;
|
||||
if (p.length == 2)
|
||||
return `${p[0]}.\`${p[1]}\``;
|
||||
throw new Error(`Invalid field format: ${field}`);
|
||||
}
|
||||
escapeFields(fields) {
|
||||
if (fields == '*')
|
||||
return '*';
|
||||
const output = [];
|
||||
for (let i = 0; i < fields.length; i++) {
|
||||
output.push(this.escapeField(fields[i]));
|
||||
}
|
||||
return output;
|
||||
}
|
||||
tryCall(callName, inputSql, inputParams) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let sql = null;
|
||||
let params = null;
|
||||
if (typeof inputSql === 'object') {
|
||||
params = inputSql.params;
|
||||
sql = inputSql.sql;
|
||||
}
|
||||
else {
|
||||
params = inputParams;
|
||||
sql = inputSql;
|
||||
}
|
||||
let waitTime = 50;
|
||||
let totalWaitTime = 0;
|
||||
const callStartTime = Date.now();
|
||||
let profilingTimeoutId = null;
|
||||
while (true) {
|
||||
try {
|
||||
this.logQuery(sql, params);
|
||||
const queryId = this.queryId_++;
|
||||
if (this.profilingEnabled_) {
|
||||
console.info(`SQL START ${queryId}`, sql, params);
|
||||
profilingTimeoutId = shim_1.default.setInterval(() => {
|
||||
console.warn(`SQL ${queryId} has been running for ${Date.now() - callStartTime}: ${sql}`);
|
||||
}, 3000);
|
||||
}
|
||||
const result = yield this.driver()[callName](sql, params);
|
||||
if (this.profilingEnabled_) {
|
||||
shim_1.default.clearInterval(profilingTimeoutId);
|
||||
profilingTimeoutId = null;
|
||||
const elapsed = Date.now() - callStartTime;
|
||||
if (elapsed > 10)
|
||||
console.info(`SQL END ${queryId}`, elapsed, sql, params);
|
||||
}
|
||||
return result; // No exception was thrown
|
||||
}
|
||||
catch (error) {
|
||||
if (error && (error.code == 'SQLITE_IOERR' || error.code == 'SQLITE_BUSY')) {
|
||||
if (totalWaitTime >= 20000)
|
||||
throw this.sqliteErrorToJsError(error, sql, params);
|
||||
// NOTE: don't put logger statements here because it might log to the database, which
|
||||
// could result in an error being thrown again.
|
||||
// this.logger().warn(sprintf('Error %s: will retry in %s milliseconds', error.code, waitTime));
|
||||
// this.logger().warn('Error was: ' + error.toString());
|
||||
yield time_1.default.msleep(waitTime);
|
||||
totalWaitTime += waitTime;
|
||||
waitTime *= 1.5;
|
||||
}
|
||||
else {
|
||||
throw this.sqliteErrorToJsError(error, sql, params);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
if (profilingTimeoutId)
|
||||
shim_1.default.clearInterval(profilingTimeoutId);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
selectOne(sql, params = null) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return this.tryCall('selectOne', sql, params);
|
||||
});
|
||||
}
|
||||
loadExtension( /* path */) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return; // Disabled for now as fuzzy search extension is not in use
|
||||
// let result = null;
|
||||
// try {
|
||||
// result = await this.driver().loadExtension(path);
|
||||
// return result;
|
||||
// } catch (e) {
|
||||
// throw new Error(`Could not load extension ${path}`);
|
||||
// }
|
||||
});
|
||||
}
|
||||
selectAll(sql, params = null) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return this.tryCall('selectAll', sql, params);
|
||||
});
|
||||
}
|
||||
selectAllFields(sql, params, field) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const rows = yield this.tryCall('selectAll', sql, params);
|
||||
const output = [];
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const v = rows[i][field];
|
||||
if (!v)
|
||||
throw new Error(`No such field: ${field}. Query was: ${sql}`);
|
||||
output.push(rows[i][field]);
|
||||
}
|
||||
return output;
|
||||
});
|
||||
}
|
||||
exec(sql, params = null) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return this.tryCall('exec', sql, params);
|
||||
});
|
||||
}
|
||||
transactionExecBatch(queries) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (queries.length <= 0)
|
||||
return;
|
||||
if (queries.length == 1) {
|
||||
const q = this.wrapQuery(queries[0]);
|
||||
yield this.exec(q.sql, q.params);
|
||||
return;
|
||||
}
|
||||
// There can be only one transaction running at a time so use a mutex
|
||||
const release = yield this.batchTransactionMutex_.acquire();
|
||||
try {
|
||||
yield this.exec('BEGIN TRANSACTION');
|
||||
for (let i = 0; i < queries.length; i++) {
|
||||
const query = this.wrapQuery(queries[i]);
|
||||
yield this.exec(query.sql, query.params);
|
||||
}
|
||||
yield this.exec('COMMIT');
|
||||
}
|
||||
catch (error) {
|
||||
yield this.exec('ROLLBACK');
|
||||
throw error;
|
||||
}
|
||||
finally {
|
||||
release();
|
||||
}
|
||||
});
|
||||
}
|
||||
static enumId(type, s) {
|
||||
if (type == 'settings') {
|
||||
if (s == 'int')
|
||||
return 1;
|
||||
if (s == 'string')
|
||||
return 2;
|
||||
}
|
||||
if (type == 'fieldType') {
|
||||
if (s)
|
||||
s = s.toUpperCase();
|
||||
if (s == 'INTEGER')
|
||||
s = 'INT';
|
||||
if (!(`TYPE_${s}` in this))
|
||||
throw new Error(`Unkonwn fieldType: ${s}`);
|
||||
return this[`TYPE_${s}`];
|
||||
}
|
||||
if (type == 'syncTarget') {
|
||||
if (s == 'memory')
|
||||
return 1;
|
||||
if (s == 'filesystem')
|
||||
return 2;
|
||||
if (s == 'onedrive')
|
||||
return 3;
|
||||
}
|
||||
throw new Error(`Unknown enum type or value: ${type}, ${s}`);
|
||||
}
|
||||
static enumName(type, id) {
|
||||
if (type === 'fieldType') {
|
||||
if (id === Database.TYPE_UNKNOWN)
|
||||
return 'unknown';
|
||||
if (id === Database.TYPE_INT)
|
||||
return 'int';
|
||||
if (id === Database.TYPE_TEXT)
|
||||
return 'text';
|
||||
if (id === Database.TYPE_NUMERIC)
|
||||
return 'numeric';
|
||||
throw new Error(`Invalid type id: ${id}`);
|
||||
}
|
||||
// Or maybe an error should be thrown
|
||||
return undefined;
|
||||
}
|
||||
static formatValue(type, value) {
|
||||
if (value === null || value === undefined)
|
||||
return null;
|
||||
if (type == this.TYPE_INT)
|
||||
return Number(value);
|
||||
if (type == this.TYPE_TEXT)
|
||||
return value;
|
||||
if (type == this.TYPE_NUMERIC)
|
||||
return Number(value);
|
||||
throw new Error(`Unknown type: ${type}`);
|
||||
}
|
||||
sqlStringToLines(sql) {
|
||||
const output = [];
|
||||
const lines = sql.split('\n');
|
||||
let statement = '';
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (line == '')
|
||||
continue;
|
||||
if (line.substr(0, 2) == '--')
|
||||
continue;
|
||||
statement += line.trim();
|
||||
if (line[line.length - 1] == ',')
|
||||
statement += ' ';
|
||||
if (line[line.length - 1] == ';') {
|
||||
output.push(statement);
|
||||
statement = '';
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
logQuery(sql, params = null) {
|
||||
if (!this.sqlQueryLogEnabled_)
|
||||
return;
|
||||
if (this.logExcludedQueryTypes_.length) {
|
||||
const temp = sql.toLowerCase();
|
||||
for (let i = 0; i < this.logExcludedQueryTypes_.length; i++) {
|
||||
if (temp.indexOf(this.logExcludedQueryTypes_[i].toLowerCase()) === 0)
|
||||
return;
|
||||
}
|
||||
}
|
||||
this.logger().debug(sql);
|
||||
if (params !== null && params.length)
|
||||
this.logger().debug(JSON.stringify(params));
|
||||
}
|
||||
static insertQuery(tableName, data) {
|
||||
if (!data || !Object.keys(data).length)
|
||||
throw new Error('Data is empty');
|
||||
let keySql = '';
|
||||
let valueSql = '';
|
||||
const params = [];
|
||||
for (const key in data) {
|
||||
if (!data.hasOwnProperty(key))
|
||||
continue;
|
||||
if (key[key.length - 1] == '_')
|
||||
continue;
|
||||
if (keySql != '')
|
||||
keySql += ', ';
|
||||
if (valueSql != '')
|
||||
valueSql += ', ';
|
||||
keySql += `\`${key}\``;
|
||||
valueSql += '?';
|
||||
params.push(data[key]);
|
||||
}
|
||||
return {
|
||||
sql: `INSERT INTO \`${tableName}\` (${keySql}) VALUES (${valueSql})`,
|
||||
params: params,
|
||||
};
|
||||
}
|
||||
static updateQuery(tableName, data, where) {
|
||||
if (!data || !Object.keys(data).length)
|
||||
throw new Error('Data is empty');
|
||||
let sql = '';
|
||||
const params = [];
|
||||
for (const key in data) {
|
||||
if (!data.hasOwnProperty(key))
|
||||
continue;
|
||||
if (key[key.length - 1] == '_')
|
||||
continue;
|
||||
if (sql != '')
|
||||
sql += ', ';
|
||||
sql += `\`${key}\`=?`;
|
||||
params.push(data[key]);
|
||||
}
|
||||
if (typeof where != 'string') {
|
||||
const s = [];
|
||||
for (const n in where) {
|
||||
if (!where.hasOwnProperty(n))
|
||||
continue;
|
||||
params.push(where[n]);
|
||||
s.push(`\`${n}\`=?`);
|
||||
}
|
||||
where = s.join(' AND ');
|
||||
}
|
||||
return {
|
||||
sql: `UPDATE \`${tableName}\` SET ${sql} WHERE ${where}`,
|
||||
params: params,
|
||||
};
|
||||
}
|
||||
alterColumnQueries(tableName, fields) {
|
||||
const fieldsNoType = [];
|
||||
for (const n in fields) {
|
||||
if (!fields.hasOwnProperty(n))
|
||||
continue;
|
||||
fieldsNoType.push(n);
|
||||
}
|
||||
const fieldsWithType = [];
|
||||
for (const n in fields) {
|
||||
if (!fields.hasOwnProperty(n))
|
||||
continue;
|
||||
fieldsWithType.push(`${this.escapeField(n)} ${fields[n]}`);
|
||||
}
|
||||
let sql = `
|
||||
CREATE TEMPORARY TABLE _BACKUP_TABLE_NAME_(_FIELDS_TYPE_);
|
||||
INSERT INTO _BACKUP_TABLE_NAME_ SELECT _FIELDS_NO_TYPE_ FROM _TABLE_NAME_;
|
||||
DROP TABLE _TABLE_NAME_;
|
||||
CREATE TABLE _TABLE_NAME_(_FIELDS_TYPE_);
|
||||
INSERT INTO _TABLE_NAME_ SELECT _FIELDS_NO_TYPE_ FROM _BACKUP_TABLE_NAME_;
|
||||
DROP TABLE _BACKUP_TABLE_NAME_;
|
||||
`;
|
||||
sql = sql.replace(/_BACKUP_TABLE_NAME_/g, this.escapeField(`${tableName}_backup`));
|
||||
sql = sql.replace(/_TABLE_NAME_/g, this.escapeField(tableName));
|
||||
sql = sql.replace(/_FIELDS_NO_TYPE_/g, this.escapeFields(fieldsNoType).join(','));
|
||||
sql = sql.replace(/_FIELDS_TYPE_/g, fieldsWithType.join(','));
|
||||
return sql.trim().split('\n');
|
||||
}
|
||||
wrapQueries(queries) {
|
||||
const output = [];
|
||||
for (let i = 0; i < queries.length; i++) {
|
||||
output.push(this.wrapQuery(queries[i]));
|
||||
}
|
||||
return output;
|
||||
}
|
||||
wrapQuery(sql, params = null) {
|
||||
if (!sql)
|
||||
throw new Error(`Cannot wrap empty string: ${sql}`);
|
||||
if (Array.isArray(sql)) {
|
||||
return {
|
||||
sql: sql[0],
|
||||
params: sql.length >= 2 ? sql[1] : null,
|
||||
};
|
||||
}
|
||||
else if (typeof sql === 'string') {
|
||||
return { sql: sql, params: params };
|
||||
}
|
||||
else {
|
||||
return sql; // Already wrapped
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.default = Database;
|
||||
Database.TYPE_UNKNOWN = 0;
|
||||
Database.TYPE_INT = 1;
|
||||
Database.TYPE_TEXT = 2;
|
||||
Database.TYPE_NUMERIC = 3;
|
||||
//# sourceMappingURL=database.js.map
|
@ -1,429 +0,0 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.basicDelta = exports.FileApi = void 0;
|
||||
const Logger_1 = require("./Logger");
|
||||
const shim_1 = require("./shim");
|
||||
const BaseItem_1 = require("./models/BaseItem");
|
||||
const time_1 = require("./time");
|
||||
const { isHidden } = require('./path-utils');
|
||||
const JoplinError = require('./JoplinError');
|
||||
const ArrayUtils = require('./ArrayUtils');
|
||||
const { sprintf } = require('sprintf-js');
|
||||
const Mutex = require('async-mutex').Mutex;
|
||||
const logger = Logger_1.default.create('FileApi');
|
||||
function requestCanBeRepeated(error) {
|
||||
const errorCode = typeof error === 'object' && error.code ? error.code : null;
|
||||
// The target is explicitely rejecting the item so repeating wouldn't make a difference.
|
||||
if (errorCode === 'rejectedByTarget')
|
||||
return false;
|
||||
// We don't repeat failSafe errors because it's an indication of an issue at the
|
||||
// server-level issue which usually cannot be fixed by repeating the request.
|
||||
// Also we print the previous requests and responses to the log in this case,
|
||||
// so not repeating means there will be less noise in the log.
|
||||
if (errorCode === 'failSafe')
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
function tryAndRepeat(fn, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let retryCount = 0;
|
||||
// Don't use internal fetch retry mechanim since we
|
||||
// are already retrying here.
|
||||
const shimFetchMaxRetryPrevious = shim_1.default.fetchMaxRetrySet(0);
|
||||
const defer = () => {
|
||||
shim_1.default.fetchMaxRetrySet(shimFetchMaxRetryPrevious);
|
||||
};
|
||||
while (true) {
|
||||
try {
|
||||
const result = yield fn();
|
||||
defer();
|
||||
return result;
|
||||
}
|
||||
catch (error) {
|
||||
if (retryCount >= count || !requestCanBeRepeated(error)) {
|
||||
defer();
|
||||
throw error;
|
||||
}
|
||||
retryCount++;
|
||||
yield time_1.default.sleep(1 + retryCount * 3);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
class FileApi {
|
||||
constructor(baseDir, driver) {
|
||||
this.logger_ = new Logger_1.default();
|
||||
this.syncTargetId_ = null;
|
||||
this.tempDirName_ = null;
|
||||
this.requestRepeatCount_ = null; // For testing purpose only - normally this value should come from the driver
|
||||
this.remoteDateOffset_ = 0;
|
||||
this.remoteDateNextCheckTime_ = 0;
|
||||
this.remoteDateMutex_ = new Mutex();
|
||||
this.initialized_ = false;
|
||||
this.baseDir_ = baseDir;
|
||||
this.driver_ = driver;
|
||||
this.driver_.fileApi_ = this;
|
||||
}
|
||||
initialize() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (this.initialized_)
|
||||
return;
|
||||
this.initialized_ = true;
|
||||
if (this.driver_.initialize)
|
||||
return this.driver_.initialize(this.fullPath(''));
|
||||
});
|
||||
}
|
||||
fetchRemoteDateOffset_() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const tempFile = `${this.tempDirName()}/timeCheck${Math.round(Math.random() * 1000000)}.txt`;
|
||||
const startTime = Date.now();
|
||||
yield this.put(tempFile, 'timeCheck');
|
||||
// Normally it should be possible to read the file back immediately but
|
||||
// just in case, read it in a loop.
|
||||
const loopStartTime = Date.now();
|
||||
let stat = null;
|
||||
while (Date.now() - loopStartTime < 5000) {
|
||||
stat = yield this.stat(tempFile);
|
||||
if (stat)
|
||||
break;
|
||||
yield time_1.default.msleep(200);
|
||||
}
|
||||
if (!stat)
|
||||
throw new Error('Timed out trying to get sync target clock time');
|
||||
void this.delete(tempFile); // No need to await for this call
|
||||
const endTime = Date.now();
|
||||
const expectedTime = Math.round((endTime + startTime) / 2);
|
||||
return stat.updated_time - expectedTime;
|
||||
});
|
||||
}
|
||||
// Approximates the current time on the sync target. It caches the time offset to
|
||||
// improve performance.
|
||||
remoteDate() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const shouldSyncTime = () => {
|
||||
return !this.remoteDateNextCheckTime_ || Date.now() > this.remoteDateNextCheckTime_;
|
||||
};
|
||||
if (shouldSyncTime()) {
|
||||
const release = yield this.remoteDateMutex_.acquire();
|
||||
try {
|
||||
// Another call might have refreshed the time while we were waiting for the mutex,
|
||||
// so check again if we need to refresh.
|
||||
if (shouldSyncTime()) {
|
||||
this.remoteDateOffset_ = yield this.fetchRemoteDateOffset_();
|
||||
// The sync target clock should rarely change but the device one might,
|
||||
// so we need to refresh relatively frequently.
|
||||
this.remoteDateNextCheckTime_ = Date.now() + 10 * 60 * 1000;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
logger.warn('Could not retrieve remote date - defaulting to device date:', error);
|
||||
this.remoteDateOffset_ = 0;
|
||||
this.remoteDateNextCheckTime_ = Date.now() + 60 * 1000;
|
||||
}
|
||||
finally {
|
||||
release();
|
||||
}
|
||||
}
|
||||
return new Date(Date.now() + this.remoteDateOffset_);
|
||||
});
|
||||
}
|
||||
// Ideally all requests repeating should be done at the FileApi level to remove duplicate code in the drivers, but
|
||||
// historically some drivers (eg. OneDrive) are already handling request repeating, so this is optional, per driver,
|
||||
// and it defaults to no repeating.
|
||||
requestRepeatCount() {
|
||||
if (this.requestRepeatCount_ !== null)
|
||||
return this.requestRepeatCount_;
|
||||
if (this.driver_.requestRepeatCount)
|
||||
return this.driver_.requestRepeatCount();
|
||||
return 0;
|
||||
}
|
||||
lastRequests() {
|
||||
return this.driver_.lastRequests ? this.driver_.lastRequests() : [];
|
||||
}
|
||||
clearLastRequests() {
|
||||
if (this.driver_.clearLastRequests)
|
||||
this.driver_.clearLastRequests();
|
||||
}
|
||||
baseDir() {
|
||||
return typeof this.baseDir_ === 'function' ? this.baseDir_() : this.baseDir_;
|
||||
}
|
||||
tempDirName() {
|
||||
if (this.tempDirName_ === null)
|
||||
throw Error('Temp dir not set!');
|
||||
return this.tempDirName_;
|
||||
}
|
||||
setTempDirName(v) {
|
||||
this.tempDirName_ = v;
|
||||
}
|
||||
fsDriver() {
|
||||
return shim_1.default.fsDriver();
|
||||
}
|
||||
driver() {
|
||||
return this.driver_;
|
||||
}
|
||||
setSyncTargetId(v) {
|
||||
this.syncTargetId_ = v;
|
||||
}
|
||||
syncTargetId() {
|
||||
if (this.syncTargetId_ === null)
|
||||
throw new Error('syncTargetId has not been set!!');
|
||||
return this.syncTargetId_;
|
||||
}
|
||||
setLogger(l) {
|
||||
if (!l)
|
||||
l = new Logger_1.default();
|
||||
this.logger_ = l;
|
||||
}
|
||||
logger() {
|
||||
return this.logger_;
|
||||
}
|
||||
fullPath(path) {
|
||||
const output = [];
|
||||
if (this.baseDir())
|
||||
output.push(this.baseDir());
|
||||
if (path)
|
||||
output.push(path);
|
||||
return output.join('/');
|
||||
}
|
||||
// DRIVER MUST RETURN PATHS RELATIVE TO `path`
|
||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||
list(path = '', options = null) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!options)
|
||||
options = {};
|
||||
if (!('includeHidden' in options))
|
||||
options.includeHidden = false;
|
||||
if (!('context' in options))
|
||||
options.context = null;
|
||||
if (!('includeDirs' in options))
|
||||
options.includeDirs = true;
|
||||
if (!('syncItemsOnly' in options))
|
||||
options.syncItemsOnly = false;
|
||||
logger.debug(`list ${this.baseDir()}`);
|
||||
const result = yield tryAndRepeat(() => this.driver_.list(this.fullPath(path), options), this.requestRepeatCount());
|
||||
if (!options.includeHidden) {
|
||||
const temp = [];
|
||||
for (let i = 0; i < result.items.length; i++) {
|
||||
if (!isHidden(result.items[i].path))
|
||||
temp.push(result.items[i]);
|
||||
}
|
||||
result.items = temp;
|
||||
}
|
||||
if (!options.includeDirs) {
|
||||
result.items = result.items.filter((f) => !f.isDir);
|
||||
}
|
||||
if (options.syncItemsOnly) {
|
||||
result.items = result.items.filter((f) => !f.isDir && BaseItem_1.default.isSystemPath(f.path));
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
// Deprectated
|
||||
setTimestamp(path, timestampMs) {
|
||||
logger.debug(`setTimestamp ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.setTimestamp(this.fullPath(path), timestampMs), this.requestRepeatCount());
|
||||
// return this.driver_.setTimestamp(this.fullPath(path), timestampMs);
|
||||
}
|
||||
mkdir(path) {
|
||||
logger.debug(`mkdir ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.mkdir(this.fullPath(path)), this.requestRepeatCount());
|
||||
}
|
||||
stat(path) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
logger.debug(`stat ${this.fullPath(path)}`);
|
||||
const output = yield tryAndRepeat(() => this.driver_.stat(this.fullPath(path)), this.requestRepeatCount());
|
||||
if (!output)
|
||||
return output;
|
||||
output.path = path;
|
||||
return output;
|
||||
// return this.driver_.stat(this.fullPath(path)).then((output) => {
|
||||
// if (!output) return output;
|
||||
// output.path = path;
|
||||
// return output;
|
||||
// });
|
||||
});
|
||||
}
|
||||
// Returns UTF-8 encoded string by default, or a Response if `options.target = 'file'`
|
||||
get(path, options = null) {
|
||||
if (!options)
|
||||
options = {};
|
||||
if (!options.encoding)
|
||||
options.encoding = 'utf8';
|
||||
logger.debug(`get ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.get(this.fullPath(path), options), this.requestRepeatCount());
|
||||
}
|
||||
put(path, content, options = null) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
logger.debug(`put ${this.fullPath(path)}`, options);
|
||||
if (options && options.source === 'file') {
|
||||
if (!(yield this.fsDriver().exists(options.path)))
|
||||
throw new JoplinError(`File not found: ${options.path}`, 'fileNotFound');
|
||||
}
|
||||
return tryAndRepeat(() => this.driver_.put(this.fullPath(path), content, options), this.requestRepeatCount());
|
||||
});
|
||||
}
|
||||
delete(path) {
|
||||
logger.debug(`delete ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.delete(this.fullPath(path)), this.requestRepeatCount());
|
||||
}
|
||||
// Deprectated
|
||||
move(oldPath, newPath) {
|
||||
logger.debug(`move ${this.fullPath(oldPath)} => ${this.fullPath(newPath)}`);
|
||||
return tryAndRepeat(() => this.driver_.move(this.fullPath(oldPath), this.fullPath(newPath)), this.requestRepeatCount());
|
||||
}
|
||||
// Deprectated
|
||||
format() {
|
||||
return tryAndRepeat(() => this.driver_.format(), this.requestRepeatCount());
|
||||
}
|
||||
clearRoot() {
|
||||
return tryAndRepeat(() => this.driver_.clearRoot(this.baseDir()), this.requestRepeatCount());
|
||||
}
|
||||
delta(path, options = null) {
|
||||
logger.debug(`delta ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.delta(this.fullPath(path), options), this.requestRepeatCount());
|
||||
}
|
||||
}
|
||||
exports.FileApi = FileApi;
|
||||
function basicDeltaContextFromOptions_(options) {
|
||||
const output = {
|
||||
timestamp: 0,
|
||||
filesAtTimestamp: [],
|
||||
statsCache: null,
|
||||
statIdsCache: null,
|
||||
deletedItemsProcessed: false,
|
||||
};
|
||||
if (!options || !options.context)
|
||||
return output;
|
||||
const d = new Date(options.context.timestamp);
|
||||
output.timestamp = isNaN(d.getTime()) ? 0 : options.context.timestamp;
|
||||
output.filesAtTimestamp = Array.isArray(options.context.filesAtTimestamp) ? options.context.filesAtTimestamp.slice() : [];
|
||||
output.statsCache = options.context && options.context.statsCache ? options.context.statsCache : null;
|
||||
output.statIdsCache = options.context && options.context.statIdsCache ? options.context.statIdsCache : null;
|
||||
output.deletedItemsProcessed = options.context && 'deletedItemsProcessed' in options.context ? options.context.deletedItemsProcessed : false;
|
||||
return output;
|
||||
}
|
||||
// This is the basic delta algorithm, which can be used in case the cloud service does not have
|
||||
// a built-in delta API. OneDrive and Dropbox have one for example, but Nextcloud and obviously
|
||||
// the file system do not.
|
||||
function basicDelta(path, getDirStatFn, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const outputLimit = 50;
|
||||
const itemIds = yield options.allItemIdsHandler();
|
||||
if (!Array.isArray(itemIds))
|
||||
throw new Error('Delta API not supported - local IDs must be provided');
|
||||
const logger = options && options.logger ? options.logger : new Logger_1.default();
|
||||
const context = basicDeltaContextFromOptions_(options);
|
||||
if (context.timestamp > Date.now()) {
|
||||
logger.warn(`BasicDelta: Context timestamp is greater than current time: ${context.timestamp}`);
|
||||
logger.warn('BasicDelta: Sync will continue but it is likely that nothing will be synced');
|
||||
}
|
||||
const newContext = {
|
||||
timestamp: context.timestamp,
|
||||
filesAtTimestamp: context.filesAtTimestamp.slice(),
|
||||
statsCache: context.statsCache,
|
||||
statIdsCache: context.statIdsCache,
|
||||
deletedItemsProcessed: context.deletedItemsProcessed,
|
||||
};
|
||||
// Stats are cached until all items have been processed (until hasMore is false)
|
||||
if (newContext.statsCache === null) {
|
||||
newContext.statsCache = yield getDirStatFn(path);
|
||||
newContext.statsCache.sort(function (a, b) {
|
||||
return a.updated_time - b.updated_time;
|
||||
});
|
||||
newContext.statIdsCache = newContext.statsCache.filter((item) => BaseItem_1.default.isSystemPath(item.path)).map((item) => BaseItem_1.default.pathToId(item.path));
|
||||
newContext.statIdsCache.sort(); // Items must be sorted to use binary search below
|
||||
}
|
||||
let output = [];
|
||||
const updateReport = {
|
||||
timestamp: context.timestamp,
|
||||
older: 0,
|
||||
newer: 0,
|
||||
equal: 0,
|
||||
};
|
||||
// Find out which files have been changed since the last time. Note that we keep
|
||||
// both the timestamp of the most recent change, *and* the items that exactly match
|
||||
// this timestamp. This to handle cases where an item is modified while this delta
|
||||
// function is running. For example:
|
||||
// t0: Item 1 is changed
|
||||
// t0: Sync items - run delta function
|
||||
// t0: While delta() is running, modify Item 2
|
||||
// Since item 2 was modified within the same millisecond, it would be skipped in the
|
||||
// next sync if we relied exclusively on a timestamp.
|
||||
for (let i = 0; i < newContext.statsCache.length; i++) {
|
||||
const stat = newContext.statsCache[i];
|
||||
if (stat.isDir)
|
||||
continue;
|
||||
if (stat.updated_time < context.timestamp) {
|
||||
updateReport.older++;
|
||||
continue;
|
||||
}
|
||||
// Special case for items that exactly match the timestamp
|
||||
if (stat.updated_time === context.timestamp) {
|
||||
if (context.filesAtTimestamp.indexOf(stat.path) >= 0) {
|
||||
updateReport.equal++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (stat.updated_time > newContext.timestamp) {
|
||||
newContext.timestamp = stat.updated_time;
|
||||
newContext.filesAtTimestamp = [];
|
||||
updateReport.newer++;
|
||||
}
|
||||
newContext.filesAtTimestamp.push(stat.path);
|
||||
output.push(stat);
|
||||
if (output.length >= outputLimit)
|
||||
break;
|
||||
}
|
||||
logger.info(`BasicDelta: Report: ${JSON.stringify(updateReport)}`);
|
||||
if (!newContext.deletedItemsProcessed) {
|
||||
// Find out which items have been deleted on the sync target by comparing the items
|
||||
// we have to the items on the target.
|
||||
// Note that when deleted items are processed it might result in the output having
|
||||
// more items than outputLimit. This is acceptable since delete operations are cheap.
|
||||
const deletedItems = [];
|
||||
for (let i = 0; i < itemIds.length; i++) {
|
||||
const itemId = itemIds[i];
|
||||
if (ArrayUtils.binarySearch(newContext.statIdsCache, itemId) < 0) {
|
||||
deletedItems.push({
|
||||
path: BaseItem_1.default.systemPath(itemId),
|
||||
isDeleted: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
const percentDeleted = itemIds.length ? deletedItems.length / itemIds.length : 0;
|
||||
// If more than 90% of the notes are going to be deleted, it's most likely a
|
||||
// configuration error or bug. For example, if the user moves their Nextcloud
|
||||
// directory, or if a network drive gets disconnected and returns an empty dir
|
||||
// instead of an error. In that case, we don't wipe out the user data, unless
|
||||
// they have switched off the fail-safe.
|
||||
if (options.wipeOutFailSafe && percentDeleted >= 0.90)
|
||||
throw new JoplinError(sprintf('Fail-safe: Sync was interrupted because %d%% of the data (%d items) is about to be deleted. To override this behaviour disable the fail-safe in the sync settings.', Math.round(percentDeleted * 100), deletedItems.length), 'failSafe');
|
||||
output = output.concat(deletedItems);
|
||||
}
|
||||
newContext.deletedItemsProcessed = true;
|
||||
const hasMore = output.length >= outputLimit;
|
||||
if (!hasMore) {
|
||||
// Clear temporary info from context. It's especially important to remove deletedItemsProcessed
|
||||
// so that they are processed again on the next sync.
|
||||
newContext.statsCache = null;
|
||||
newContext.statIdsCache = null;
|
||||
delete newContext.deletedItemsProcessed;
|
||||
}
|
||||
return {
|
||||
hasMore: hasMore,
|
||||
context: newContext,
|
||||
items: output,
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.basicDelta = basicDelta;
|
||||
//# sourceMappingURL=file-api.js.map
|
@ -1,61 +0,0 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const tool_utils_1 = require("./tool-utils");
|
||||
const sqlts = require('@rmp135/sql-ts').default;
|
||||
const fs = require('fs-extra');
|
||||
function main() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Run the CLI app once so as to generate the database file
|
||||
process.chdir(`${tool_utils_1.rootDir}/packages/app-cli`);
|
||||
yield tool_utils_1.execCommand2('npm start -- version');
|
||||
const sqlTsConfig = {
|
||||
'client': 'sqlite3',
|
||||
'connection': {
|
||||
'filename': `${require('os').homedir()}/.config/joplindev-desktop/database.sqlite`,
|
||||
},
|
||||
'tableNameCasing': 'pascal',
|
||||
'singularTableNames': true,
|
||||
'useNullAsDefault': true,
|
||||
'excludedTables': [
|
||||
'main.notes_fts',
|
||||
'main.notes_fts_segments',
|
||||
'main.notes_fts_segdir',
|
||||
'main.notes_fts_docsize',
|
||||
'main.notes_fts_stat',
|
||||
],
|
||||
};
|
||||
const definitions = yield sqlts.toObject(sqlTsConfig);
|
||||
definitions.tables = definitions.tables.map((t) => {
|
||||
t.columns.push({
|
||||
nullable: false,
|
||||
name: 'type_',
|
||||
type: 'int',
|
||||
optional: true,
|
||||
isEnum: false,
|
||||
propertyName: 'type_',
|
||||
propertyType: 'number',
|
||||
});
|
||||
return t;
|
||||
});
|
||||
const tsString = sqlts.fromObject(definitions, sqlTsConfig)
|
||||
.replace(/": /g, '"?: ');
|
||||
const header = `// AUTO-GENERATED BY ${__filename.substr(tool_utils_1.rootDir.length + 1)}`;
|
||||
const targetFile = `${tool_utils_1.rootDir}/packages/lib/services/database/types.ts`;
|
||||
console.info(`Writing type definitions to ${targetFile}...`);
|
||||
yield fs.writeFile(targetFile, `${header}\n\n${tsString}`, 'utf8');
|
||||
});
|
||||
}
|
||||
main().catch((error) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
//# sourceMappingURL=generate-database-types.js.map
|
Loading…
Reference in New Issue
Block a user