You've already forked joplin
mirror of
https://github.com/laurent22/joplin.git
synced 2025-08-24 20:19:10 +02:00
Compare commits
23 Commits
android-v3
...
server_con
Author | SHA1 | Date | |
---|---|---|---|
|
a5b5ef1886 | ||
|
68f77f6bbc | ||
|
b402bc7ff7 | ||
|
0ed0690bf8 | ||
|
467b1156cc | ||
|
6a9d9f6542 | ||
|
69b413ce2b | ||
|
e3d6334372 | ||
|
cc4c50c219 | ||
|
5d646f7ced | ||
|
fa3612405c | ||
|
20df46c066 | ||
|
9b0a659416 | ||
|
a00e0e7043 | ||
|
560523bdc2 | ||
|
a13242e803 | ||
|
72834fcfc4 | ||
|
731142218b | ||
|
17b580b71b | ||
|
f7be45c236 | ||
|
b298861dc3 | ||
|
2343de3763 | ||
|
abb37258d0 |
@@ -4,7 +4,9 @@ const nodeSqlite = require('sqlite3');
|
||||
shimInit({ nodeSqlite });
|
||||
|
||||
// We don't want the tests to fail due to timeout, especially on CI, and certain
|
||||
// tests can take more time since we do integration testing too.
|
||||
jest.setTimeout(30 * 1000);
|
||||
// tests can take more time since we do integration testing too. The share tests
|
||||
// in particular can take a while.
|
||||
|
||||
jest.setTimeout(60 * 1000);
|
||||
|
||||
process.env.JOPLIN_IS_TESTING = '1';
|
||||
|
3673
packages/server/package-lock.json
generated
3673
packages/server/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -21,6 +21,7 @@
|
||||
"watch": "tsc --watch --project tsconfig.json"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.40.0",
|
||||
"@fortawesome/fontawesome-free": "^5.15.1",
|
||||
"@joplin/lib": "~2.6",
|
||||
"@joplin/renderer": "~2.6",
|
||||
|
Binary file not shown.
@@ -5,7 +5,7 @@ import * as Koa from 'koa';
|
||||
import * as fs from 'fs-extra';
|
||||
import Logger, { LoggerWrapper, TargetType } from '@joplin/lib/Logger';
|
||||
import config, { initConfig, runningInDocker } from './config';
|
||||
import { migrateLatest, waitForConnection, sqliteDefaultDir, latestMigration } from './db';
|
||||
import { migrateLatest, waitForConnection, sqliteDefaultDir, latestMigration, DbConnection } from './db';
|
||||
import { AppContext, Env, KoaNext } from './utils/types';
|
||||
import FsDriverNode from '@joplin/lib/fs-driver-node';
|
||||
import routeHandler from './middleware/routeHandler';
|
||||
@@ -17,10 +17,11 @@ import startServices from './utils/startServices';
|
||||
import { credentialFile } from './utils/testing/testUtils';
|
||||
import apiVersionHandler from './middleware/apiVersionHandler';
|
||||
import clickJackingHandler from './middleware/clickJackingHandler';
|
||||
import newModelFactory from './models/factory';
|
||||
import newModelFactory, { Options } from './models/factory';
|
||||
import setupCommands from './utils/setupCommands';
|
||||
import { RouteResponseFormat, routeResponseFormat } from './utils/routeUtils';
|
||||
import { parseEnv } from './env';
|
||||
import storageDriverFromConfig from './models/items/storage/storageDriverFromConfig';
|
||||
|
||||
interface Argv {
|
||||
env?: Env;
|
||||
@@ -61,6 +62,8 @@ function appLogger(): LoggerWrapper {
|
||||
}
|
||||
|
||||
function markPasswords(o: Record<string, any>): Record<string, any> {
|
||||
if (!o) return o;
|
||||
|
||||
const output: Record<string, any> = {};
|
||||
|
||||
for (const k of Object.keys(o)) {
|
||||
@@ -219,6 +222,13 @@ async function main() {
|
||||
fs.writeFileSync(pidFile, `${process.pid}`);
|
||||
}
|
||||
|
||||
const newModelFactoryOptions = async (db: DbConnection): Promise<Options> => {
|
||||
return {
|
||||
storageDriver: await storageDriverFromConfig(config().storageDriver, db, { assignDriverId: env !== 'buildTypes' }),
|
||||
storageDriverFallback: await storageDriverFromConfig(config().storageDriverFallback, db, { assignDriverId: env !== 'buildTypes' }),
|
||||
};
|
||||
};
|
||||
|
||||
let runCommandAndExitApp = true;
|
||||
|
||||
if (selectedCommand) {
|
||||
@@ -235,7 +245,7 @@ async function main() {
|
||||
});
|
||||
} else {
|
||||
const connectionCheck = await waitForConnection(config().database);
|
||||
const models = newModelFactory(connectionCheck.connection, config());
|
||||
const models = newModelFactory(connectionCheck.connection, config(), await newModelFactoryOptions(connectionCheck.connection));
|
||||
|
||||
await selectedCommand.run(commandArgv, {
|
||||
db: connectionCheck.connection,
|
||||
@@ -253,6 +263,8 @@ async function main() {
|
||||
appLogger().info('Log dir:', config().logDir);
|
||||
appLogger().info('DB Config:', markPasswords(config().database));
|
||||
appLogger().info('Mailer Config:', markPasswords(config().mailer));
|
||||
appLogger().info('Content driver:', markPasswords(config().storageDriver));
|
||||
appLogger().info('Content driver (fallback):', markPasswords(config().storageDriverFallback));
|
||||
|
||||
appLogger().info('Trying to connect to database...');
|
||||
const connectionCheck = await waitForConnection(config().database);
|
||||
@@ -263,7 +275,8 @@ async function main() {
|
||||
appLogger().info('Connection check:', connectionCheckLogInfo);
|
||||
const ctx = app.context as AppContext;
|
||||
|
||||
await setupAppContext(ctx, env, connectionCheck.connection, appLogger);
|
||||
await setupAppContext(ctx, env, connectionCheck.connection, appLogger, await newModelFactoryOptions(connectionCheck.connection));
|
||||
|
||||
await initializeJoplinUtils(config(), ctx.joplinBase.models, ctx.joplinBase.services.mustache);
|
||||
|
||||
if (config().database.autoMigration) {
|
||||
|
@@ -3,6 +3,7 @@ import { Config, DatabaseConfig, DatabaseConfigClient, Env, MailerConfig, RouteT
|
||||
import * as pathUtils from 'path';
|
||||
import { loadStripeConfig, StripePublicConfig } from '@joplin/lib/utils/joplinCloud';
|
||||
import { EnvVariables } from './env';
|
||||
import parseStorageDriverConnectionString from './models/items/storage/parseStorageDriverConnectionString';
|
||||
|
||||
interface PackageJson {
|
||||
version: string;
|
||||
@@ -130,6 +131,8 @@ export async function initConfig(envType: Env, env: EnvVariables, overrides: any
|
||||
supportName: env.SUPPORT_NAME || appName,
|
||||
businessEmail: env.BUSINESS_EMAIL || supportEmail,
|
||||
cookieSecure: env.COOKIES_SECURE,
|
||||
storageDriver: parseStorageDriverConnectionString(env.STORAGE_DRIVER),
|
||||
storageDriverFallback: parseStorageDriverConnectionString(env.STORAGE_DRIVER_FALLBACK),
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
@@ -1,70 +1,13 @@
|
||||
export interface EnvVariables {
|
||||
// The possible env variables and their defaults are listed below.
|
||||
//
|
||||
// The env variables can be of type string, integer or boolean. When the type is
|
||||
// boolean, set the variable to "0" or "1" in your env file.
|
||||
|
||||
const defaultEnvValues: EnvVariables = {
|
||||
// ==================================================
|
||||
// General config
|
||||
// ==================================================
|
||||
|
||||
APP_NAME: string;
|
||||
APP_PORT: number;
|
||||
SIGNUP_ENABLED: boolean;
|
||||
TERMS_ENABLED: boolean;
|
||||
ACCOUNT_TYPES_ENABLED: boolean;
|
||||
ERROR_STACK_TRACES: boolean;
|
||||
COOKIES_SECURE: boolean;
|
||||
RUNNING_IN_DOCKER: boolean;
|
||||
|
||||
// ==================================================
|
||||
// URL config
|
||||
// ==================================================
|
||||
|
||||
APP_BASE_URL: string;
|
||||
USER_CONTENT_BASE_URL: string;
|
||||
API_BASE_URL: string;
|
||||
JOPLINAPP_BASE_URL: string;
|
||||
|
||||
// ==================================================
|
||||
// Database config
|
||||
// ==================================================
|
||||
|
||||
DB_CLIENT: string;
|
||||
DB_SLOW_QUERY_LOG_ENABLED: boolean;
|
||||
DB_SLOW_QUERY_LOG_MIN_DURATION: number;
|
||||
DB_AUTO_MIGRATION: boolean;
|
||||
|
||||
POSTGRES_PASSWORD: string;
|
||||
POSTGRES_DATABASE: string;
|
||||
POSTGRES_USER: string;
|
||||
POSTGRES_HOST: string;
|
||||
POSTGRES_PORT: number;
|
||||
|
||||
// This must be the full path to the database file
|
||||
SQLITE_DATABASE: string;
|
||||
|
||||
// ==================================================
|
||||
// Mailer config
|
||||
// ==================================================
|
||||
|
||||
MAILER_ENABLED: boolean;
|
||||
MAILER_HOST: string;
|
||||
MAILER_PORT: number;
|
||||
MAILER_SECURE: boolean;
|
||||
MAILER_AUTH_USER: string;
|
||||
MAILER_AUTH_PASSWORD: string;
|
||||
MAILER_NOREPLY_NAME: string;
|
||||
MAILER_NOREPLY_EMAIL: string;
|
||||
|
||||
SUPPORT_EMAIL: string;
|
||||
SUPPORT_NAME: string;
|
||||
BUSINESS_EMAIL: string;
|
||||
|
||||
// ==================================================
|
||||
// Stripe config
|
||||
// ==================================================
|
||||
|
||||
STRIPE_SECRET_KEY: string;
|
||||
STRIPE_WEBHOOK_SECRET: string;
|
||||
}
|
||||
|
||||
const defaultEnvValues: EnvVariables = {
|
||||
APP_NAME: 'Joplin Server',
|
||||
APP_PORT: 22300,
|
||||
SIGNUP_ENABLED: false,
|
||||
@@ -74,11 +17,19 @@ const defaultEnvValues: EnvVariables = {
|
||||
COOKIES_SECURE: false,
|
||||
RUNNING_IN_DOCKER: false,
|
||||
|
||||
// ==================================================
|
||||
// URL config
|
||||
// ==================================================
|
||||
|
||||
APP_BASE_URL: '',
|
||||
USER_CONTENT_BASE_URL: '',
|
||||
API_BASE_URL: '',
|
||||
JOPLINAPP_BASE_URL: 'https://joplinapp.org',
|
||||
|
||||
// ==================================================
|
||||
// Database config
|
||||
// ==================================================
|
||||
|
||||
DB_CLIENT: 'sqlite3',
|
||||
DB_SLOW_QUERY_LOG_ENABLED: false,
|
||||
DB_SLOW_QUERY_LOG_MIN_DURATION: 1000,
|
||||
@@ -90,8 +41,20 @@ const defaultEnvValues: EnvVariables = {
|
||||
POSTGRES_HOST: '',
|
||||
POSTGRES_PORT: 5432,
|
||||
|
||||
// This must be the full path to the database file
|
||||
SQLITE_DATABASE: '',
|
||||
|
||||
// ==================================================
|
||||
// Content driver config
|
||||
// ==================================================
|
||||
|
||||
STORAGE_DRIVER: 'Type=Database',
|
||||
STORAGE_DRIVER_FALLBACK: '',
|
||||
|
||||
// ==================================================
|
||||
// Mailer config
|
||||
// ==================================================
|
||||
|
||||
MAILER_ENABLED: false,
|
||||
MAILER_HOST: '',
|
||||
MAILER_PORT: 587,
|
||||
@@ -105,10 +68,62 @@ const defaultEnvValues: EnvVariables = {
|
||||
SUPPORT_NAME: '',
|
||||
BUSINESS_EMAIL: '',
|
||||
|
||||
// ==================================================
|
||||
// Stripe config
|
||||
// ==================================================
|
||||
|
||||
STRIPE_SECRET_KEY: '',
|
||||
STRIPE_WEBHOOK_SECRET: '',
|
||||
};
|
||||
|
||||
export interface EnvVariables {
|
||||
APP_NAME: string;
|
||||
APP_PORT: number;
|
||||
SIGNUP_ENABLED: boolean;
|
||||
TERMS_ENABLED: boolean;
|
||||
ACCOUNT_TYPES_ENABLED: boolean;
|
||||
ERROR_STACK_TRACES: boolean;
|
||||
COOKIES_SECURE: boolean;
|
||||
RUNNING_IN_DOCKER: boolean;
|
||||
|
||||
APP_BASE_URL: string;
|
||||
USER_CONTENT_BASE_URL: string;
|
||||
API_BASE_URL: string;
|
||||
JOPLINAPP_BASE_URL: string;
|
||||
|
||||
DB_CLIENT: string;
|
||||
DB_SLOW_QUERY_LOG_ENABLED: boolean;
|
||||
DB_SLOW_QUERY_LOG_MIN_DURATION: number;
|
||||
DB_AUTO_MIGRATION: boolean;
|
||||
|
||||
POSTGRES_PASSWORD: string;
|
||||
POSTGRES_DATABASE: string;
|
||||
POSTGRES_USER: string;
|
||||
POSTGRES_HOST: string;
|
||||
POSTGRES_PORT: number;
|
||||
|
||||
SQLITE_DATABASE: string;
|
||||
|
||||
STORAGE_DRIVER: string;
|
||||
STORAGE_DRIVER_FALLBACK: string;
|
||||
|
||||
MAILER_ENABLED: boolean;
|
||||
MAILER_HOST: string;
|
||||
MAILER_PORT: number;
|
||||
MAILER_SECURE: boolean;
|
||||
MAILER_AUTH_USER: string;
|
||||
MAILER_AUTH_PASSWORD: string;
|
||||
MAILER_NOREPLY_NAME: string;
|
||||
MAILER_NOREPLY_EMAIL: string;
|
||||
|
||||
SUPPORT_EMAIL: string;
|
||||
SUPPORT_NAME: string;
|
||||
BUSINESS_EMAIL: string;
|
||||
|
||||
STRIPE_SECRET_KEY: string;
|
||||
STRIPE_WEBHOOK_SECRET: string;
|
||||
}
|
||||
|
||||
export function parseEnv(rawEnv: any, defaultOverrides: any = null): EnvVariables {
|
||||
const output: EnvVariables = {
|
||||
...defaultEnvValues,
|
||||
@@ -125,7 +140,7 @@ export function parseEnv(rawEnv: any, defaultOverrides: any = null): EnvVariable
|
||||
if (isNaN(v)) throw new Error(`Invalid number value for env variable ${key} = ${rawEnvValue}`);
|
||||
(output as any)[key] = v;
|
||||
} else if (typeof value === 'boolean') {
|
||||
if (rawEnvValue !== '0' && rawEnvValue !== '1') throw new Error(`Invalid boolean for for env variable ${key}: ${rawEnvValue}`);
|
||||
if (rawEnvValue !== '0' && rawEnvValue !== '1') throw new Error(`Invalid boolean value for env variable ${key}: ${rawEnvValue} (Should be either "0" or "1")`);
|
||||
(output as any)[key] = rawEnvValue === '1';
|
||||
} else if (typeof value === 'string') {
|
||||
(output as any)[key] = `${rawEnvValue}`;
|
||||
|
32
packages/server/src/migrations/20211105183559_storage.ts
Normal file
32
packages/server/src/migrations/20211105183559_storage.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { Knex } from 'knex';
|
||||
import { DbConnection } from '../db';
|
||||
|
||||
export async function up(db: DbConnection): Promise<any> {
|
||||
await db.schema.createTable('storages', (table: Knex.CreateTableBuilder) => {
|
||||
table.increments('id').unique().primary().notNullable();
|
||||
table.text('connection_string').notNullable();
|
||||
});
|
||||
|
||||
await db('storages').insert({
|
||||
connection_string: 'Type=Database',
|
||||
});
|
||||
|
||||
// First we create the column and set a default so as to populate the
|
||||
// content_storage_id field.
|
||||
await db.schema.alterTable('items', (table: Knex.CreateTableBuilder) => {
|
||||
table.integer('content_storage_id').defaultTo(1).notNullable();
|
||||
});
|
||||
|
||||
// Once it's set, we remove the default as that should be explicitly set.
|
||||
await db.schema.alterTable('items', (table: Knex.CreateTableBuilder) => {
|
||||
table.integer('content_storage_id').notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(db: DbConnection): Promise<any> {
|
||||
await db.schema.dropTable('storages');
|
||||
|
||||
await db.schema.alterTable('items', (table: Knex.CreateTableBuilder) => {
|
||||
table.dropColumn('content_storage_id');
|
||||
});
|
||||
}
|
@@ -3,7 +3,7 @@ import { DbConnection } from '../db';
|
||||
import TransactionHandler from '../utils/TransactionHandler';
|
||||
import uuidgen from '../utils/uuidgen';
|
||||
import { ErrorUnprocessableEntity, ErrorBadRequest } from '../utils/errors';
|
||||
import { Models } from './factory';
|
||||
import { Models, NewModelFactoryHandler } from './factory';
|
||||
import * as EventEmitter from 'events';
|
||||
import { Config } from '../utils/types';
|
||||
import personalizedUserContentBaseUrl from '@joplin/lib/services/joplinServer/personalizedUserContentBaseUrl';
|
||||
@@ -54,12 +54,12 @@ export default abstract class BaseModel<T> {
|
||||
private defaultFields_: string[] = [];
|
||||
private db_: DbConnection;
|
||||
private transactionHandler_: TransactionHandler;
|
||||
private modelFactory_: Function;
|
||||
private modelFactory_: NewModelFactoryHandler;
|
||||
private static eventEmitter_: EventEmitter = null;
|
||||
private config_: Config;
|
||||
private savePoints_: SavePoint[] = [];
|
||||
|
||||
public constructor(db: DbConnection, modelFactory: Function, config: Config) {
|
||||
public constructor(db: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
||||
this.db_ = db;
|
||||
this.modelFactory_ = modelFactory;
|
||||
this.config_ = config;
|
||||
@@ -71,7 +71,7 @@ export default abstract class BaseModel<T> {
|
||||
// connection is passed to it. That connection can be the regular db
|
||||
// connection, or the active transaction.
|
||||
protected models(db: DbConnection = null): Models {
|
||||
return this.modelFactory_(db || this.db, this.config_);
|
||||
return this.modelFactory_(db || this.db);
|
||||
}
|
||||
|
||||
protected get baseUrl(): string {
|
||||
@@ -90,7 +90,7 @@ export default abstract class BaseModel<T> {
|
||||
return this.config_.appName;
|
||||
}
|
||||
|
||||
protected get db(): DbConnection {
|
||||
public get db(): DbConnection {
|
||||
if (this.transactionHandler_.activeTransaction) return this.transactionHandler_.activeTransaction;
|
||||
return this.db_;
|
||||
}
|
||||
|
@@ -38,12 +38,12 @@ describe('ChangeModel', function() {
|
||||
const changeModel = models().change();
|
||||
|
||||
await msleep(1); const item1 = await models().item().makeTestItem(user.id, 1); // [1] CREATE 1
|
||||
await msleep(1); await itemModel.saveForUser(user.id, { id: item1.id, name: '0000000000000000000000000000001A.md' }); // [2] UPDATE 1a
|
||||
await msleep(1); await itemModel.saveForUser(user.id, { id: item1.id, name: '0000000000000000000000000000001B.md' }); // [3] UPDATE 1b
|
||||
await msleep(1); await itemModel.saveForUser(user.id, { id: item1.id, name: '0000000000000000000000000000001A.md', content: Buffer.from('') }); // [2] UPDATE 1a
|
||||
await msleep(1); await itemModel.saveForUser(user.id, { id: item1.id, name: '0000000000000000000000000000001B.md', content: Buffer.from('') }); // [3] UPDATE 1b
|
||||
await msleep(1); const item2 = await models().item().makeTestItem(user.id, 2); // [4] CREATE 2
|
||||
await msleep(1); await itemModel.saveForUser(user.id, { id: item2.id, name: '0000000000000000000000000000002A.md' }); // [5] UPDATE 2a
|
||||
await msleep(1); await itemModel.saveForUser(user.id, { id: item2.id, name: '0000000000000000000000000000002A.md', content: Buffer.from('') }); // [5] UPDATE 2a
|
||||
await msleep(1); await itemModel.delete(item1.id); // [6] DELETE 1
|
||||
await msleep(1); await itemModel.saveForUser(user.id, { id: item2.id, name: '0000000000000000000000000000002B.md' }); // [7] UPDATE 2b
|
||||
await msleep(1); await itemModel.saveForUser(user.id, { id: item2.id, name: '0000000000000000000000000000002B.md', content: Buffer.from('') }); // [7] UPDATE 2b
|
||||
await msleep(1); const item3 = await models().item().makeTestItem(user.id, 3); // [8] CREATE 3
|
||||
|
||||
// Check that the 8 changes were created
|
||||
@@ -120,7 +120,7 @@ describe('ChangeModel', function() {
|
||||
|
||||
let i = 1;
|
||||
await msleep(1); const item1 = await models().item().makeTestItem(user.id, 1); // CREATE 1
|
||||
await msleep(1); await itemModel.saveForUser(user.id, { id: item1.id, name: `test_mod${i++}` }); // UPDATE 1
|
||||
await msleep(1); await itemModel.saveForUser(user.id, { id: item1.id, name: `test_mod${i++}`, content: Buffer.from('') }); // UPDATE 1
|
||||
|
||||
await expectThrow(async () => changeModel.delta(user.id, { limit: 1, cursor: 'invalid' }), 'resyncRequired');
|
||||
});
|
||||
|
@@ -7,6 +7,10 @@ import { ApiError, ErrorForbidden, ErrorUnprocessableEntity } from '../utils/err
|
||||
import { Knex } from 'knex';
|
||||
import { ChangePreviousItem } from './ChangeModel';
|
||||
import { unique } from '../utils/array';
|
||||
import StorageDriverBase, { Context } from './items/storage/StorageDriverBase';
|
||||
import { DbConnection } from '../db';
|
||||
import { Config, StorageDriverMode } from '../utils/types';
|
||||
import { NewModelFactoryHandler, Options } from './factory';
|
||||
|
||||
const mimeUtils = require('@joplin/lib/mime-utils.js').mime;
|
||||
|
||||
@@ -38,9 +42,22 @@ export interface ItemSaveOption extends SaveOptions {
|
||||
shareId?: Uuid;
|
||||
}
|
||||
|
||||
export interface ItemLoadOptions extends LoadOptions {
|
||||
withContent?: boolean;
|
||||
}
|
||||
|
||||
export default class ItemModel extends BaseModel<Item> {
|
||||
|
||||
private updatingTotalSizes_: boolean = false;
|
||||
private storageDriver_: StorageDriverBase = null;
|
||||
private storageDriverFallback_: StorageDriverBase = null;
|
||||
|
||||
public constructor(db: DbConnection, modelFactory: NewModelFactoryHandler, config: Config, options: Options) {
|
||||
super(db, modelFactory, config);
|
||||
|
||||
this.storageDriver_ = options.storageDriver;
|
||||
this.storageDriverFallback_ = options.storageDriverFallback;
|
||||
}
|
||||
|
||||
protected get tableName(): string {
|
||||
return 'items';
|
||||
@@ -106,62 +123,106 @@ export default class ItemModel extends BaseModel<Item> {
|
||||
return path.replace(extractNameRegex, '$1');
|
||||
}
|
||||
|
||||
public byShareIdQuery(shareId: Uuid, options: LoadOptions = {}): Knex.QueryBuilder {
|
||||
public byShareIdQuery(shareId: Uuid, options: ItemLoadOptions = {}): Knex.QueryBuilder {
|
||||
return this
|
||||
.db('items')
|
||||
.select(this.selectFields(options, null, 'items'))
|
||||
.where('jop_share_id', '=', shareId);
|
||||
}
|
||||
|
||||
public async byShareId(shareId: Uuid, options: LoadOptions = {}): Promise<Item[]> {
|
||||
public async byShareId(shareId: Uuid, options: ItemLoadOptions = {}): Promise<Item[]> {
|
||||
const query = this.byShareIdQuery(shareId, options);
|
||||
return await query;
|
||||
}
|
||||
|
||||
public async loadByJopIds(userId: Uuid | Uuid[], jopIds: string[], options: LoadOptions = {}): Promise<Item[]> {
|
||||
private async storageDriverWrite(itemId: Uuid, content: Buffer, context: Context) {
|
||||
await this.storageDriver_.write(itemId, content, context);
|
||||
|
||||
if (this.storageDriverFallback_) {
|
||||
if (this.storageDriverFallback_.mode === StorageDriverMode.ReadWrite) {
|
||||
await this.storageDriverFallback_.write(itemId, content, context);
|
||||
} else if (this.storageDriverFallback_.mode === StorageDriverMode.ReadOnly) {
|
||||
await this.storageDriverFallback_.write(itemId, Buffer.from(''), context);
|
||||
} else {
|
||||
throw new Error(`Unsupported fallback mode: ${this.storageDriverFallback_.mode}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async storageDriverRead(itemId: Uuid, context: Context) {
|
||||
if (await this.storageDriver_.exists(itemId, context)) {
|
||||
return this.storageDriver_.read(itemId, context);
|
||||
} else {
|
||||
if (!this.storageDriverFallback_) throw new Error(`Content does not exist but fallback content driver is not defined: ${itemId}`);
|
||||
return this.storageDriverFallback_.read(itemId, context);
|
||||
}
|
||||
}
|
||||
|
||||
public async loadByJopIds(userId: Uuid | Uuid[], jopIds: string[], options: ItemLoadOptions = {}): Promise<Item[]> {
|
||||
if (!jopIds.length) return [];
|
||||
|
||||
const userIds = Array.isArray(userId) ? userId : [userId];
|
||||
if (!userIds.length) return [];
|
||||
|
||||
return this
|
||||
const rows: Item[] = await this
|
||||
.db('user_items')
|
||||
.leftJoin('items', 'items.id', 'user_items.item_id')
|
||||
.distinct(this.selectFields(options, null, 'items'))
|
||||
.whereIn('user_items.user_id', userIds)
|
||||
.whereIn('jop_id', jopIds);
|
||||
|
||||
if (options.withContent) {
|
||||
for (const row of rows) {
|
||||
row.content = await this.storageDriverRead(row.id, { models: this.models() });
|
||||
}
|
||||
}
|
||||
|
||||
return rows;
|
||||
}
|
||||
|
||||
public async loadByJopId(userId: Uuid, jopId: string, options: LoadOptions = {}): Promise<Item> {
|
||||
public async loadByJopId(userId: Uuid, jopId: string, options: ItemLoadOptions = {}): Promise<Item> {
|
||||
const items = await this.loadByJopIds(userId, [jopId], options);
|
||||
return items.length ? items[0] : null;
|
||||
}
|
||||
|
||||
public async loadByNames(userId: Uuid | Uuid[], names: string[], options: LoadOptions = {}): Promise<Item[]> {
|
||||
public async loadByNames(userId: Uuid | Uuid[], names: string[], options: ItemLoadOptions = {}): Promise<Item[]> {
|
||||
if (!names.length) return [];
|
||||
|
||||
const userIds = Array.isArray(userId) ? userId : [userId];
|
||||
|
||||
return this
|
||||
const rows: Item[] = await this
|
||||
.db('user_items')
|
||||
.leftJoin('items', 'items.id', 'user_items.item_id')
|
||||
.distinct(this.selectFields(options, null, 'items'))
|
||||
.whereIn('user_items.user_id', userIds)
|
||||
.whereIn('name', names);
|
||||
|
||||
if (options.withContent) {
|
||||
for (const row of rows) {
|
||||
row.content = await this.storageDriverRead(row.id, { models: this.models() });
|
||||
}
|
||||
}
|
||||
|
||||
return rows;
|
||||
}
|
||||
|
||||
public async loadByName(userId: Uuid, name: string, options: LoadOptions = {}): Promise<Item> {
|
||||
public async loadByName(userId: Uuid, name: string, options: ItemLoadOptions = {}): Promise<Item> {
|
||||
const items = await this.loadByNames(userId, [name], options);
|
||||
return items.length ? items[0] : null;
|
||||
}
|
||||
|
||||
public async loadWithContent(id: Uuid, options: LoadOptions = {}): Promise<Item> {
|
||||
return this
|
||||
.db('user_items')
|
||||
.leftJoin('items', 'items.id', 'user_items.item_id')
|
||||
.select(this.selectFields(options, ['*'], 'items'))
|
||||
.where('items.id', '=', id)
|
||||
.first();
|
||||
public async loadWithContent(id: Uuid, options: ItemLoadOptions = {}): Promise<Item> {
|
||||
const content = await this.storageDriverRead(id, { models: this.models() });
|
||||
|
||||
return {
|
||||
...await this
|
||||
.db('user_items')
|
||||
.leftJoin('items', 'items.id', 'user_items.item_id')
|
||||
.select(this.selectFields(options, ['*'], 'items'))
|
||||
.where('items.id', '=', id)
|
||||
.first(),
|
||||
content,
|
||||
};
|
||||
}
|
||||
|
||||
public async loadAsSerializedJoplinItem(id: Uuid): Promise<string> {
|
||||
@@ -255,9 +316,11 @@ export default class ItemModel extends BaseModel<Item> {
|
||||
return this.itemToJoplinItem(raw);
|
||||
}
|
||||
|
||||
public async saveFromRawContent(user: User, rawContentItems: SaveFromRawContentItem[], options: ItemSaveOption = null): Promise<SaveFromRawContentResult> {
|
||||
public async saveFromRawContent(user: User, rawContentItems: SaveFromRawContentItem[] | SaveFromRawContentItem, options: ItemSaveOption = null): Promise<SaveFromRawContentResult> {
|
||||
options = options || {};
|
||||
|
||||
if (!Array.isArray(rawContentItems)) rawContentItems = [rawContentItems];
|
||||
|
||||
// In this function, first we process the input items, which may be
|
||||
// serialized Joplin items or actual buffers (for resources) and convert
|
||||
// them to database items. Once it's done those db items are saved in
|
||||
@@ -349,11 +412,46 @@ export default class ItemModel extends BaseModel<Item> {
|
||||
continue;
|
||||
}
|
||||
|
||||
const itemToSave = o.item;
|
||||
const itemToSave = { ...o.item };
|
||||
|
||||
try {
|
||||
const content = itemToSave.content;
|
||||
delete itemToSave.content;
|
||||
itemToSave.content_storage_id = this.storageDriver_.storageId;
|
||||
|
||||
itemToSave.content_size = content ? content.byteLength : 0;
|
||||
|
||||
// Here we save the item row and content, and we want to
|
||||
// make sure that either both are saved or none of them.
|
||||
// This is done by setting up a save point before saving the
|
||||
// row, and rollbacking if the content cannot be saved.
|
||||
//
|
||||
// Normally, since we are in a transaction, throwing an
|
||||
// error should work, but since we catch all errors within
|
||||
// this block it doesn't work.
|
||||
|
||||
// TODO: When an item is uploaded multiple times
|
||||
// simultaneously there could be a race condition, where the
|
||||
// content would not match the db row (for example, the
|
||||
// content_size would differ).
|
||||
//
|
||||
// Possible solutions:
|
||||
//
|
||||
// - Row-level lock on items.id, and release once the
|
||||
// content is saved.
|
||||
// - Or external lock - eg. Redis.
|
||||
|
||||
const savePoint = await this.setSavePoint();
|
||||
const savedItem = await this.saveForUser(user.id, itemToSave);
|
||||
|
||||
try {
|
||||
await this.storageDriverWrite(savedItem.id, content, { models: this.models() });
|
||||
await this.releaseSavePoint(savePoint);
|
||||
} catch (error) {
|
||||
await this.rollbackSavePoint(savePoint);
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (o.isNote) {
|
||||
await this.models().itemResource().deleteByItemId(savedItem.id);
|
||||
await this.models().itemResource().addResourceIds(savedItem.id, o.resourceIds);
|
||||
@@ -390,7 +488,7 @@ export default class ItemModel extends BaseModel<Item> {
|
||||
}
|
||||
|
||||
|
||||
private childrenQuery(userId: Uuid, pathQuery: string = '', count: boolean = false, options: LoadOptions = {}): Knex.QueryBuilder {
|
||||
private childrenQuery(userId: Uuid, pathQuery: string = '', count: boolean = false, options: ItemLoadOptions = {}): Knex.QueryBuilder {
|
||||
const query = this
|
||||
.db('user_items')
|
||||
.innerJoin('items', 'user_items.item_id', 'items.id')
|
||||
@@ -420,7 +518,7 @@ export default class ItemModel extends BaseModel<Item> {
|
||||
return `${this.baseUrl}/items/${itemId}/content`;
|
||||
}
|
||||
|
||||
public async children(userId: Uuid, pathQuery: string = '', pagination: Pagination = null, options: LoadOptions = {}): Promise<PaginatedItems> {
|
||||
public async children(userId: Uuid, pathQuery: string = '', pagination: Pagination = null, options: ItemLoadOptions = {}): Promise<PaginatedItems> {
|
||||
pagination = pagination || defaultPagination();
|
||||
const query = this.childrenQuery(userId, pathQuery, false, options);
|
||||
return paginateDbQuery(query, pagination, 'items');
|
||||
@@ -532,6 +630,8 @@ export default class ItemModel extends BaseModel<Item> {
|
||||
await this.models().share().delete(shares.map(s => s.id));
|
||||
await this.models().userItem().deleteByItemIds(ids);
|
||||
await this.models().itemResource().deleteByItemIds(ids);
|
||||
await this.storageDriver_.delete(ids, { models: this.models() });
|
||||
if (this.storageDriverFallback_) await this.storageDriverFallback_.delete(ids, { models: this.models() });
|
||||
|
||||
await super.delete(ids, options);
|
||||
}, 'ItemModel::delete');
|
||||
@@ -552,6 +652,7 @@ export default class ItemModel extends BaseModel<Item> {
|
||||
public async makeTestItem(userId: Uuid, num: number) {
|
||||
return this.saveForUser(userId, {
|
||||
name: `${num.toString().padStart(32, '0')}.md`,
|
||||
content: Buffer.from(''),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -560,23 +661,27 @@ export default class ItemModel extends BaseModel<Item> {
|
||||
for (let i = 1; i <= count; i++) {
|
||||
await this.saveForUser(userId, {
|
||||
name: `${i.toString().padStart(32, '0')}.md`,
|
||||
content: Buffer.from(''),
|
||||
});
|
||||
}
|
||||
}, 'ItemModel::makeTestItems');
|
||||
}
|
||||
|
||||
// This method should be private because items should only be saved using
|
||||
// saveFromRawContent, which is going to deal with the content driver. But
|
||||
// since it's used in various test units, it's kept public for now.
|
||||
public async saveForUser(userId: Uuid, item: Item, options: SaveOptions = {}): Promise<Item> {
|
||||
if (!userId) throw new Error('userId is required');
|
||||
|
||||
item = { ... item };
|
||||
const isNew = await this.isNew(item, options);
|
||||
|
||||
if (item.content) {
|
||||
item.content_size = item.content.byteLength;
|
||||
}
|
||||
|
||||
let previousItem: ChangePreviousItem = null;
|
||||
|
||||
if (item.content && !item.content_storage_id) {
|
||||
item.content_storage_id = this.storageDriver_.storageId;
|
||||
}
|
||||
|
||||
if (isNew) {
|
||||
if (!item.mime_type) item.mime_type = mimeUtils.fromFilename(item.name) || '';
|
||||
if (!item.owner_id) item.owner_id = userId;
|
||||
|
18
packages/server/src/models/StorageModel.ts
Normal file
18
packages/server/src/models/StorageModel.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { Storage } from '../services/database/types';
|
||||
import BaseModel from './BaseModel';
|
||||
|
||||
export default class StorageModel extends BaseModel<Storage> {
|
||||
|
||||
public get tableName(): string {
|
||||
return 'storages';
|
||||
}
|
||||
|
||||
protected hasUuid(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
public async byConnectionString(connectionString: string): Promise<Storage> {
|
||||
return this.db(this.tableName).where('connection_string', connectionString).first();
|
||||
}
|
||||
|
||||
}
|
@@ -360,18 +360,18 @@ describe('UserModel', function() {
|
||||
const syncInfo3: any = JSON.parse(JSON.stringify(syncInfo1));
|
||||
delete syncInfo3.ppk;
|
||||
|
||||
await models().item().saveForUser(user1.id, {
|
||||
content: Buffer.from(JSON.stringify(syncInfo1)),
|
||||
await models().item().saveFromRawContent(user1, {
|
||||
body: Buffer.from(JSON.stringify(syncInfo1)),
|
||||
name: 'info.json',
|
||||
});
|
||||
|
||||
await models().item().saveForUser(user2.id, {
|
||||
content: Buffer.from(JSON.stringify(syncInfo2)),
|
||||
await models().item().saveFromRawContent(user2, {
|
||||
body: Buffer.from(JSON.stringify(syncInfo2)),
|
||||
name: 'info.json',
|
||||
});
|
||||
|
||||
await models().item().saveForUser(user3.id, {
|
||||
content: Buffer.from(JSON.stringify(syncInfo3)),
|
||||
await models().item().saveFromRawContent(user3, {
|
||||
body: Buffer.from(JSON.stringify(syncInfo3)),
|
||||
name: 'info.json',
|
||||
});
|
||||
|
||||
|
@@ -593,7 +593,7 @@ export default class UserModel extends BaseModel<User> {
|
||||
|
||||
public async publicPrivateKey(userId: string): Promise<PublicPrivateKeyPair> {
|
||||
const syncInfo = await this.syncInfo(userId);
|
||||
return syncInfo.ppk?.value || null;// syncInfo.ppk?.value.publicKey || '';
|
||||
return syncInfo.ppk?.value || null;
|
||||
}
|
||||
|
||||
// Note that when the "password" property is provided, it is going to be
|
||||
|
@@ -72,88 +72,111 @@ import SubscriptionModel from './SubscriptionModel';
|
||||
import UserFlagModel from './UserFlagModel';
|
||||
import EventModel from './EventModel';
|
||||
import { Config } from '../utils/types';
|
||||
import StorageDriverBase from './items/storage/StorageDriverBase';
|
||||
import LockModel from './LockModel';
|
||||
import StorageModel from './StorageModel';
|
||||
|
||||
export interface Options {
|
||||
storageDriver: StorageDriverBase;
|
||||
storageDriverFallback?: StorageDriverBase;
|
||||
}
|
||||
|
||||
export type NewModelFactoryHandler = (db: DbConnection)=> Models;
|
||||
|
||||
export class Models {
|
||||
|
||||
private db_: DbConnection;
|
||||
private config_: Config;
|
||||
private options_: Options;
|
||||
|
||||
public constructor(db: DbConnection, config: Config) {
|
||||
public constructor(db: DbConnection, config: Config, options: Options) {
|
||||
this.db_ = db;
|
||||
this.config_ = config;
|
||||
this.options_ = options;
|
||||
|
||||
// if (!options.storageDriver) throw new Error('StorageDriver is required');
|
||||
|
||||
this.newModelFactory = this.newModelFactory.bind(this);
|
||||
}
|
||||
|
||||
private newModelFactory(db: DbConnection) {
|
||||
return new Models(db, this.config_, this.options_);
|
||||
}
|
||||
|
||||
public item() {
|
||||
return new ItemModel(this.db_, newModelFactory, this.config_);
|
||||
return new ItemModel(this.db_, this.newModelFactory, this.config_, this.options_);
|
||||
}
|
||||
|
||||
public user() {
|
||||
return new UserModel(this.db_, newModelFactory, this.config_);
|
||||
return new UserModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public email() {
|
||||
return new EmailModel(this.db_, newModelFactory, this.config_);
|
||||
return new EmailModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public userItem() {
|
||||
return new UserItemModel(this.db_, newModelFactory, this.config_);
|
||||
return new UserItemModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public token() {
|
||||
return new TokenModel(this.db_, newModelFactory, this.config_);
|
||||
return new TokenModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public itemResource() {
|
||||
return new ItemResourceModel(this.db_, newModelFactory, this.config_);
|
||||
return new ItemResourceModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public apiClient() {
|
||||
return new ApiClientModel(this.db_, newModelFactory, this.config_);
|
||||
return new ApiClientModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public session() {
|
||||
return new SessionModel(this.db_, newModelFactory, this.config_);
|
||||
return new SessionModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public change() {
|
||||
return new ChangeModel(this.db_, newModelFactory, this.config_);
|
||||
return new ChangeModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public notification() {
|
||||
return new NotificationModel(this.db_, newModelFactory, this.config_);
|
||||
return new NotificationModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public share() {
|
||||
return new ShareModel(this.db_, newModelFactory, this.config_);
|
||||
return new ShareModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public shareUser() {
|
||||
return new ShareUserModel(this.db_, newModelFactory, this.config_);
|
||||
return new ShareUserModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public keyValue() {
|
||||
return new KeyValueModel(this.db_, newModelFactory, this.config_);
|
||||
return new KeyValueModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public subscription() {
|
||||
return new SubscriptionModel(this.db_, newModelFactory, this.config_);
|
||||
return new SubscriptionModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public userFlag() {
|
||||
return new UserFlagModel(this.db_, newModelFactory, this.config_);
|
||||
return new UserFlagModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public event() {
|
||||
return new EventModel(this.db_, newModelFactory, this.config_);
|
||||
return new EventModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public lock() {
|
||||
return new LockModel(this.db_, newModelFactory, this.config_);
|
||||
return new LockModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public storage() {
|
||||
return new StorageModel(this.db_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default function newModelFactory(db: DbConnection, config: Config): Models {
|
||||
return new Models(db, config);
|
||||
export default function newModelFactory(db: DbConnection, config: Config, options: Options): Models {
|
||||
return new Models(db, config, options);
|
||||
}
|
||||
|
@@ -0,0 +1,42 @@
|
||||
import { StorageDriverConfig, StorageDriverMode } from '../../../utils/types';
|
||||
import { Models } from '../../factory';
|
||||
|
||||
// ItemModel passes the models object when calling any of the driver handler.
|
||||
// This is so that if there's an active transaction, the driver can use that (as
|
||||
// required for example by StorageDriverDatabase).
|
||||
|
||||
export interface Context {
|
||||
models: Models;
|
||||
}
|
||||
|
||||
export default class StorageDriverBase {
|
||||
|
||||
private storageId_: number;
|
||||
private config_: StorageDriverConfig;
|
||||
|
||||
public constructor(storageId: number, config: StorageDriverConfig) {
|
||||
this.storageId_ = storageId;
|
||||
this.config_ = config;
|
||||
}
|
||||
|
||||
public get storageId(): number {
|
||||
return this.storageId_;
|
||||
}
|
||||
|
||||
public get config(): StorageDriverConfig {
|
||||
return this.config_;
|
||||
}
|
||||
|
||||
public get mode(): StorageDriverMode {
|
||||
return this.config.mode || StorageDriverMode.ReadOnly;
|
||||
}
|
||||
|
||||
public async write(_itemId: string, _content: Buffer, _context: Context): Promise<void> { throw new Error('Not implemented'); }
|
||||
|
||||
public async read(_itemId: string, _context: Context): Promise<Buffer> { throw new Error('Not implemented'); }
|
||||
|
||||
public async delete(_itemId: string | string[], _context: Context): Promise<void> { throw new Error('Not implemented'); }
|
||||
|
||||
public async exists(_itemId: string, _context: Context): Promise<boolean> { throw new Error('Not implemented'); }
|
||||
|
||||
}
|
@@ -0,0 +1,70 @@
|
||||
import { clientType } from '../../../db';
|
||||
import { afterAllTests, beforeAllDb, beforeEachDb, db, expectNotThrow, expectThrow, models } from '../../../utils/testing/testUtils';
|
||||
import { StorageDriverMode } from '../../../utils/types';
|
||||
import StorageDriverDatabase from './StorageDriverDatabase';
|
||||
import StorageDriverMemory from './StorageDriverMemory';
|
||||
import { shouldDeleteContent, shouldNotCreateItemIfContentNotSaved, shouldNotUpdateItemIfContentNotSaved, shouldSupportFallbackDriver, shouldSupportFallbackDriverInReadWriteMode, shouldUpdateContentStorageIdAfterSwitchingDriver, shouldWriteToContentAndReadItBack } from './testUtils';
|
||||
|
||||
const newDriver = () => {
|
||||
return new StorageDriverDatabase(1, {
|
||||
dbClientType: clientType(db()),
|
||||
});
|
||||
};
|
||||
|
||||
describe('StorageDriverDatabase', function() {
|
||||
|
||||
beforeAll(async () => {
|
||||
await beforeAllDb('StorageDriverDatabase');
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await afterAllTests();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await beforeEachDb();
|
||||
});
|
||||
|
||||
test('should write to content and read it back', async function() {
|
||||
const driver = newDriver();
|
||||
await shouldWriteToContentAndReadItBack(driver);
|
||||
});
|
||||
|
||||
test('should delete the content', async function() {
|
||||
const driver = newDriver();
|
||||
await shouldDeleteContent(driver);
|
||||
});
|
||||
|
||||
test('should not create the item if the content cannot be saved', async function() {
|
||||
const driver = newDriver();
|
||||
await shouldNotCreateItemIfContentNotSaved(driver);
|
||||
});
|
||||
|
||||
test('should not update the item if the content cannot be saved', async function() {
|
||||
const driver = newDriver();
|
||||
await shouldNotUpdateItemIfContentNotSaved(driver);
|
||||
});
|
||||
|
||||
test('should fail if the item row does not exist', async function() {
|
||||
const driver = newDriver();
|
||||
await expectThrow(async () => driver.read('oops', { models: models() }));
|
||||
});
|
||||
|
||||
test('should do nothing if deleting non-existing row', async function() {
|
||||
const driver = newDriver();
|
||||
await expectNotThrow(async () => driver.delete('oops', { models: models() }));
|
||||
});
|
||||
|
||||
test('should support fallback content drivers', async function() {
|
||||
await shouldSupportFallbackDriver(newDriver(), new StorageDriverMemory(2));
|
||||
});
|
||||
|
||||
test('should support fallback content drivers in rw mode', async function() {
|
||||
await shouldSupportFallbackDriverInReadWriteMode(newDriver(), new StorageDriverMemory(2, { mode: StorageDriverMode.ReadWrite }));
|
||||
});
|
||||
|
||||
test('should update content storage ID after switching driver', async function() {
|
||||
await shouldUpdateContentStorageIdAfterSwitchingDriver(newDriver(), new StorageDriverMemory(2));
|
||||
});
|
||||
|
||||
});
|
@@ -0,0 +1,58 @@
|
||||
// This driver allows storing the content directly with the item row in the
|
||||
// database (as a binary blob). For now the driver expects that the content is
|
||||
// stored in the same table as the items, as it originally was.
|
||||
|
||||
import { DatabaseConfigClient, StorageDriverConfig, StorageDriverType } from '../../../utils/types';
|
||||
import StorageDriverBase, { Context } from './StorageDriverBase';
|
||||
|
||||
interface StorageDriverDatabaseConfig extends StorageDriverConfig {
|
||||
dbClientType: DatabaseConfigClient;
|
||||
}
|
||||
|
||||
export default class StorageDriverDatabase extends StorageDriverBase {
|
||||
|
||||
private handleReturnedRows_: boolean = null;
|
||||
|
||||
public constructor(id: number, config: StorageDriverDatabaseConfig) {
|
||||
super(id, { type: StorageDriverType.Database, ...config });
|
||||
|
||||
this.handleReturnedRows_ = config.dbClientType === DatabaseConfigClient.PostgreSQL;
|
||||
}
|
||||
|
||||
public async write(itemId: string, content: Buffer, context: Context): Promise<void> {
|
||||
const returningOption = this.handleReturnedRows_ ? ['id'] : undefined;
|
||||
|
||||
const updatedRows = await context.models.item().db('items').update({ content }, returningOption).where('id', '=', itemId);
|
||||
if (!this.handleReturnedRows_) return;
|
||||
|
||||
// Not possible because the ID is unique
|
||||
if (updatedRows.length > 1) throw new Error('Update more than one row');
|
||||
|
||||
// Not possible either because the row is created before this handler is called, but still could happen
|
||||
if (!updatedRows.length) throw new Error(`No such item: ${itemId}`);
|
||||
|
||||
// That would be weird
|
||||
if (updatedRows[0].id !== itemId) throw new Error(`Did not update the right row. Expected: ${itemId}. Got: ${updatedRows[0].id}`);
|
||||
}
|
||||
|
||||
public async read(itemId: string, context: Context): Promise<Buffer> {
|
||||
const row = await context.models.item().db('items').select('content').where('id', '=', itemId).first();
|
||||
|
||||
// Calling code should only call this handler if the row exists, so if
|
||||
// we find it doesn't, it's an error.
|
||||
if (!row) throw new Error(`No such row: ${itemId}`);
|
||||
|
||||
return row.content;
|
||||
}
|
||||
|
||||
public async delete(_itemId: string | string[], _context: Context): Promise<void> {
|
||||
// noop because the calling code deletes the whole row, including the
|
||||
// content.
|
||||
}
|
||||
|
||||
public async exists(itemId: string, context: Context): Promise<boolean> {
|
||||
const row = await context.models.item().db('items').select('content').where('id', '=', itemId).first();
|
||||
return !!row && !!row.content;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,84 @@
|
||||
import { pathExists, remove } from 'fs-extra';
|
||||
import { afterAllTests, beforeAllDb, beforeEachDb, expectNotThrow, expectThrow, tempDirPath } from '../../../utils/testing/testUtils';
|
||||
import StorageDriverFs from './StorageDriverFs';
|
||||
import { shouldDeleteContent, shouldNotCreateItemIfContentNotSaved, shouldNotUpdateItemIfContentNotSaved, shouldWriteToContentAndReadItBack } from './testUtils';
|
||||
|
||||
let basePath_: string = '';
|
||||
|
||||
const newDriver = () => {
|
||||
return new StorageDriverFs(1, { path: basePath_ });
|
||||
};
|
||||
|
||||
describe('StorageDriverFs', function() {
|
||||
|
||||
beforeAll(async () => {
|
||||
await beforeAllDb('StorageDriverFs');
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await afterAllTests();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
basePath_ = tempDirPath();
|
||||
await beforeEachDb();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await remove(basePath_);
|
||||
basePath_ = '';
|
||||
});
|
||||
|
||||
test('should write to content and read it back', async function() {
|
||||
const driver = newDriver();
|
||||
await shouldWriteToContentAndReadItBack(driver);
|
||||
});
|
||||
|
||||
test('should delete the content', async function() {
|
||||
const driver = newDriver();
|
||||
await shouldDeleteContent(driver);
|
||||
});
|
||||
|
||||
test('should not create the item if the content cannot be saved', async function() {
|
||||
const driver = newDriver();
|
||||
await shouldNotCreateItemIfContentNotSaved(driver);
|
||||
});
|
||||
|
||||
test('should not update the item if the content cannot be saved', async function() {
|
||||
const driver = newDriver();
|
||||
await shouldNotUpdateItemIfContentNotSaved(driver);
|
||||
});
|
||||
|
||||
test('should write to a file and read it back', async function() {
|
||||
const driver = newDriver();
|
||||
await driver.write('testing', Buffer.from('testing'));
|
||||
const content = await driver.read('testing');
|
||||
expect(content.toString()).toBe('testing');
|
||||
});
|
||||
|
||||
test('should automatically create the base path', async function() {
|
||||
expect(await pathExists(basePath_)).toBe(false);
|
||||
const driver = newDriver();
|
||||
await driver.write('testing', Buffer.from('testing'));
|
||||
expect(await pathExists(basePath_)).toBe(true);
|
||||
});
|
||||
|
||||
test('should delete a file', async function() {
|
||||
const driver = newDriver();
|
||||
await driver.write('testing', Buffer.from('testing'));
|
||||
expect((await driver.read('testing')).toString()).toBe('testing');
|
||||
await driver.delete('testing');
|
||||
await expectThrow(async () => driver.read('testing'), 'ENOENT');
|
||||
});
|
||||
|
||||
test('should throw if the file does not exist when reading it', async function() {
|
||||
const driver = newDriver();
|
||||
await expectThrow(async () => driver.read('testread'), 'ENOENT');
|
||||
});
|
||||
|
||||
test('should not throw if deleting a file that does not exist', async function() {
|
||||
const driver = newDriver();
|
||||
await expectNotThrow(async () => driver.delete('notthere'));
|
||||
});
|
||||
|
||||
});
|
48
packages/server/src/models/items/storage/StorageDriverFs.ts
Normal file
48
packages/server/src/models/items/storage/StorageDriverFs.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { mkdirp, pathExists, readFile, remove, writeFile } from 'fs-extra';
|
||||
import { StorageDriverConfig, StorageDriverType } from '../../../utils/types';
|
||||
import StorageDriverBase from './StorageDriverBase';
|
||||
|
||||
export default class StorageDriverFs extends StorageDriverBase {
|
||||
|
||||
private pathCreated_: Record<string, boolean> = {};
|
||||
|
||||
public constructor(id: number, config: StorageDriverConfig) {
|
||||
super(id, { type: StorageDriverType.Filesystem, ...config });
|
||||
}
|
||||
|
||||
private async createParentDirectories(path: string) {
|
||||
const p = path.split('/');
|
||||
p.pop();
|
||||
const basename = p.join('/');
|
||||
|
||||
if (this.pathCreated_[basename]) return;
|
||||
await mkdirp(basename);
|
||||
this.pathCreated_[basename] = true;
|
||||
}
|
||||
|
||||
private itemPath(itemId: string): string {
|
||||
return `${this.config.path}/${itemId.substr(0, 2).toLowerCase()}/${itemId.substr(2, 2).toLowerCase()}/${itemId}`;
|
||||
}
|
||||
|
||||
public async write(itemId: string, content: Buffer): Promise<void> {
|
||||
const itemPath = this.itemPath(itemId);
|
||||
await this.createParentDirectories(itemPath);
|
||||
await writeFile(itemPath, content);
|
||||
}
|
||||
|
||||
public async read(itemId: string): Promise<Buffer> {
|
||||
return readFile(this.itemPath(itemId));
|
||||
}
|
||||
|
||||
public async delete(itemId: string | string[]): Promise<void> {
|
||||
const itemIds = Array.isArray(itemId) ? itemId : [itemId];
|
||||
for (const id of itemIds) {
|
||||
await remove(this.itemPath(id));
|
||||
}
|
||||
}
|
||||
|
||||
public async exists(itemId: string): Promise<boolean> {
|
||||
return pathExists(this.itemPath(itemId));
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,40 @@
|
||||
import { afterAllTests, beforeAllDb, beforeEachDb } from '../../../utils/testing/testUtils';
|
||||
import StorageDriverMemory from './StorageDriverMemory';
|
||||
import { shouldDeleteContent, shouldNotCreateItemIfContentNotSaved, shouldNotUpdateItemIfContentNotSaved, shouldWriteToContentAndReadItBack } from './testUtils';
|
||||
|
||||
describe('StorageDriverMemory', function() {
|
||||
|
||||
beforeAll(async () => {
|
||||
await beforeAllDb('StorageDriverMemory');
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await afterAllTests();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await beforeEachDb();
|
||||
});
|
||||
|
||||
test('should write to content and read it back', async function() {
|
||||
const driver = new StorageDriverMemory(1);
|
||||
await shouldWriteToContentAndReadItBack(driver);
|
||||
});
|
||||
|
||||
test('should delete the content', async function() {
|
||||
const driver = new StorageDriverMemory(1);
|
||||
await shouldDeleteContent(driver);
|
||||
});
|
||||
|
||||
test('should not create the item if the content cannot be saved', async function() {
|
||||
const driver = new StorageDriverMemory(1);
|
||||
await shouldNotCreateItemIfContentNotSaved(driver);
|
||||
});
|
||||
|
||||
test('should not update the item if the content cannot be saved', async function() {
|
||||
const driver = new StorageDriverMemory(1);
|
||||
await shouldNotUpdateItemIfContentNotSaved(driver);
|
||||
});
|
||||
|
||||
});
|
||||
|
@@ -0,0 +1,32 @@
|
||||
import { StorageDriverConfig, StorageDriverType } from '../../../utils/types';
|
||||
import StorageDriverBase from './StorageDriverBase';
|
||||
|
||||
export default class StorageDriverMemory extends StorageDriverBase {
|
||||
|
||||
private data_: Record<string, Buffer> = {};
|
||||
|
||||
public constructor(id: number, config: StorageDriverConfig = null) {
|
||||
super(id, { type: StorageDriverType.Memory, ...config });
|
||||
}
|
||||
|
||||
public async write(itemId: string, content: Buffer): Promise<void> {
|
||||
this.data_[itemId] = content;
|
||||
}
|
||||
|
||||
public async read(itemId: string): Promise<Buffer> {
|
||||
if (!(itemId in this.data_)) throw new Error(`No such item: ${itemId}`);
|
||||
return this.data_[itemId];
|
||||
}
|
||||
|
||||
public async delete(itemId: string | string[]): Promise<void> {
|
||||
const itemIds = Array.isArray(itemId) ? itemId : [itemId];
|
||||
for (const id of itemIds) {
|
||||
delete this.data_[id];
|
||||
}
|
||||
}
|
||||
|
||||
public async exists(itemId: string): Promise<boolean> {
|
||||
return itemId in this.data_;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,85 @@
|
||||
// Note that these tests require an S3 bucket to be set, with the credentials
|
||||
// defined in the below config file. If the credentials are missing, all the
|
||||
// tests are skipped.
|
||||
|
||||
import { afterAllTests, beforeAllDb, beforeEachDb, expectNotThrow, expectThrow, readCredentialFile } from '../../../utils/testing/testUtils';
|
||||
import { StorageDriverType } from '../../../utils/types';
|
||||
import StorageDriverS3 from './StorageDriverS3';
|
||||
import { shouldDeleteContent, shouldNotCreateItemIfContentNotSaved, shouldNotUpdateItemIfContentNotSaved, shouldWriteToContentAndReadItBack } from './testUtils';
|
||||
|
||||
const s3Config = async () => {
|
||||
const s = await readCredentialFile('server-s3-test-units.json', '');
|
||||
if (!s) return null;
|
||||
return JSON.parse(s);
|
||||
};
|
||||
|
||||
const newDriver = async () => {
|
||||
return new StorageDriverS3(1, {
|
||||
type: StorageDriverType.S3,
|
||||
...await s3Config(),
|
||||
});
|
||||
};
|
||||
|
||||
const configIsSet = async () => {
|
||||
const c = await s3Config();
|
||||
return !!c;
|
||||
};
|
||||
|
||||
describe('StorageDriverS3', function() {
|
||||
|
||||
beforeAll(async () => {
|
||||
if (!(await configIsSet())) {
|
||||
return;
|
||||
} else {
|
||||
console.warn('Running S3 unit tests on live environment!');
|
||||
await beforeAllDb('StorageDriverS3');
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!(await configIsSet())) return;
|
||||
await afterAllTests();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
if (!(await configIsSet())) return;
|
||||
await beforeEachDb();
|
||||
});
|
||||
|
||||
test('should write to content and read it back', async function() {
|
||||
if (!(await configIsSet())) return;
|
||||
const driver = await newDriver();
|
||||
await shouldWriteToContentAndReadItBack(driver);
|
||||
});
|
||||
|
||||
test('should delete the content', async function() {
|
||||
if (!(await configIsSet())) return;
|
||||
const driver = await newDriver();
|
||||
await shouldDeleteContent(driver);
|
||||
});
|
||||
|
||||
test('should not create the item if the content cannot be saved', async function() {
|
||||
if (!(await configIsSet())) return;
|
||||
const driver = await newDriver();
|
||||
await shouldNotCreateItemIfContentNotSaved(driver);
|
||||
});
|
||||
|
||||
test('should not update the item if the content cannot be saved', async function() {
|
||||
if (!(await configIsSet())) return;
|
||||
const driver = await newDriver();
|
||||
await shouldNotUpdateItemIfContentNotSaved(driver);
|
||||
});
|
||||
|
||||
test('should fail if the item row does not exist', async function() {
|
||||
if (!(await configIsSet())) return;
|
||||
const driver = await newDriver();
|
||||
await expectThrow(async () => driver.read('oops'));
|
||||
});
|
||||
|
||||
test('should do nothing if deleting non-existing row', async function() {
|
||||
if (!(await configIsSet())) return;
|
||||
const driver = await newDriver();
|
||||
await expectNotThrow(async () => driver.delete('oops'));
|
||||
});
|
||||
|
||||
});
|
97
packages/server/src/models/items/storage/StorageDriverS3.ts
Normal file
97
packages/server/src/models/items/storage/StorageDriverS3.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectsCommand, ObjectIdentifier, HeadObjectCommand } from '@aws-sdk/client-s3';
|
||||
import { StorageDriverConfig, StorageDriverType } from '../../../utils/types';
|
||||
import StorageDriverBase from './StorageDriverBase';
|
||||
|
||||
function stream2buffer(stream: any): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const buffer: Uint8Array[] = [];
|
||||
let hasError = false;
|
||||
|
||||
stream.on('data', (chunk: Uint8Array) => {
|
||||
if (hasError) return;
|
||||
buffer.push(chunk);
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
if (hasError) return;
|
||||
resolve(Buffer.concat(buffer));
|
||||
});
|
||||
|
||||
stream.on('error', (error: any) => {
|
||||
if (hasError) return;
|
||||
hasError = true;
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export default class StorageDriverS3 extends StorageDriverBase {
|
||||
|
||||
private client_: S3Client;
|
||||
|
||||
public constructor(id: number, config: StorageDriverConfig) {
|
||||
super(id, { type: StorageDriverType.S3, ...config });
|
||||
|
||||
this.client_ = new S3Client({
|
||||
// We need to set a region. See https://github.com/aws/aws-sdk-js-v3/issues/1845#issuecomment-754832210
|
||||
region: this.config.region,
|
||||
credentials: {
|
||||
accessKeyId: this.config.accessKeyId,
|
||||
secretAccessKey: this.config.secretAccessKeyId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
public async write(itemId: string, content: Buffer): Promise<void> {
|
||||
await this.client_.send(new PutObjectCommand({
|
||||
Bucket: this.config.bucket,
|
||||
Key: itemId,
|
||||
Body: content,
|
||||
}));
|
||||
}
|
||||
|
||||
public async read(itemId: string): Promise<Buffer | null> {
|
||||
try {
|
||||
const response = await this.client_.send(new GetObjectCommand({
|
||||
Bucket: this.config.bucket,
|
||||
Key: itemId,
|
||||
}));
|
||||
|
||||
return stream2buffer(response.Body);
|
||||
} catch (error) {
|
||||
error.message = `Could not get item "${itemId}": ${error.message}`;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
public async delete(itemId: string | string[]): Promise<void> {
|
||||
const itemIds = Array.isArray(itemId) ? itemId : [itemId];
|
||||
|
||||
const objects: ObjectIdentifier[] = itemIds.map(id => {
|
||||
return { Key: id };
|
||||
});
|
||||
|
||||
await this.client_.send(new DeleteObjectsCommand({
|
||||
Bucket: this.config.bucket,
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
},
|
||||
}));
|
||||
}
|
||||
|
||||
public async exists(itemId: string): Promise<boolean> {
|
||||
try {
|
||||
await this.client_.send(new HeadObjectCommand({
|
||||
Bucket: this.config.bucket,
|
||||
Key: itemId,
|
||||
}));
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error?.$metadata?.httpStatusCode === 404) return false;
|
||||
error.message = `Could not check if object exists: "${itemId}": ${error.message}`;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,42 @@
|
||||
import { StorageDriverConfig, StorageDriverType } from '../../../utils/types';
|
||||
import parseStorageDriverConnectionString from './parseStorageDriverConnectionString';
|
||||
|
||||
describe('parseStorageDriverConnectionString', function() {
|
||||
|
||||
test('should parse a connection string', async function() {
|
||||
const testCases: Record<string, StorageDriverConfig> = {
|
||||
'Type=Database': {
|
||||
type: StorageDriverType.Database,
|
||||
},
|
||||
' Type = Database ': {
|
||||
type: StorageDriverType.Database,
|
||||
},
|
||||
'Type=Filesystem; Path=/path/to/dir': {
|
||||
type: StorageDriverType.Filesystem,
|
||||
path: '/path/to/dir',
|
||||
},
|
||||
' Type = Filesystem ; Path = /path/to/dir ': {
|
||||
type: StorageDriverType.Filesystem,
|
||||
path: '/path/to/dir',
|
||||
},
|
||||
'Type=Memory;': {
|
||||
type: StorageDriverType.Memory,
|
||||
},
|
||||
'': null,
|
||||
};
|
||||
|
||||
for (const [connectionString, config] of Object.entries(testCases)) {
|
||||
const actual = parseStorageDriverConnectionString(connectionString);
|
||||
expect(actual).toEqual(config);
|
||||
}
|
||||
});
|
||||
|
||||
test('should detect errors', async function() {
|
||||
expect(() => parseStorageDriverConnectionString('Path=/path/to/dir')).toThrow(); // Type is missing
|
||||
expect(() => parseStorageDriverConnectionString('Type=')).toThrow();
|
||||
expect(() => parseStorageDriverConnectionString('Type;')).toThrow();
|
||||
expect(() => parseStorageDriverConnectionString('Type=DoesntExist')).toThrow();
|
||||
expect(() => parseStorageDriverConnectionString('Type=Filesystem')).toThrow();
|
||||
});
|
||||
|
||||
});
|
@@ -0,0 +1,63 @@
|
||||
// Type={Database,Filesystem,Memory,S3}; Path={/path/to/dir,https://s3bucket}
|
||||
|
||||
import { StorageDriverConfig, StorageDriverMode, StorageDriverType } from '../../../utils/types';
|
||||
|
||||
const parseType = (type: string): StorageDriverType => {
|
||||
if (type === 'Database') return StorageDriverType.Database;
|
||||
if (type === 'Filesystem') return StorageDriverType.Filesystem;
|
||||
if (type === 'Memory') return StorageDriverType.Memory;
|
||||
throw new Error(`Invalid type: "${type}"`);
|
||||
};
|
||||
|
||||
const parseMode = (mode: string): StorageDriverMode => {
|
||||
if (mode === 'rw') return StorageDriverMode.ReadWrite;
|
||||
if (mode === 'r') return StorageDriverMode.ReadOnly;
|
||||
throw new Error(`Invalid type: "${mode}"`);
|
||||
};
|
||||
|
||||
const validate = (config: StorageDriverConfig) => {
|
||||
if (!config.type) throw new Error('Type must be specified');
|
||||
if (config.type === StorageDriverType.Filesystem && !config.path) throw new Error('Path must be set for filesystem driver');
|
||||
return config;
|
||||
};
|
||||
|
||||
export default function(connectionString: string): StorageDriverConfig | null {
|
||||
if (!connectionString) return null;
|
||||
|
||||
const output: StorageDriverConfig = {
|
||||
type: null,
|
||||
};
|
||||
|
||||
const items = connectionString.split(';').map(i => i.trim());
|
||||
|
||||
try {
|
||||
for (const item of items) {
|
||||
if (!item) continue;
|
||||
|
||||
const [key, value] = item.split('=').map(s => s.trim());
|
||||
|
||||
if (key === 'Type') {
|
||||
output.type = parseType(value);
|
||||
} else if (key === 'Path') {
|
||||
output.path = value;
|
||||
} else if (key === 'Mode') {
|
||||
output.mode = parseMode(value);
|
||||
} else if (key === 'Region') {
|
||||
output.region = value;
|
||||
} else if (key === 'AccessKeyId') {
|
||||
output.accessKeyId = value;
|
||||
} else if (key === 'SecretAccessKeyId') {
|
||||
output.secretAccessKeyId = value;
|
||||
} else if (key === 'Bucket') {
|
||||
output.bucket = value;
|
||||
} else {
|
||||
throw new Error(`Invalid key: "${key}"`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
error.message = `In connection string "${connectionString}": ${error.message}`;
|
||||
throw error;
|
||||
}
|
||||
|
||||
return validate(output);
|
||||
}
|
@@ -0,0 +1,30 @@
|
||||
import { StorageDriverConfig, StorageDriverMode, StorageDriverType } from '../../../utils/types';
|
||||
|
||||
const serializeType = (type: StorageDriverType): string => {
|
||||
if (type === StorageDriverType.Database) return 'Database';
|
||||
if (type === StorageDriverType.Filesystem) return 'Filesystem';
|
||||
if (type === StorageDriverType.Memory) return 'Memory';
|
||||
throw new Error(`Invalid type: "${type}"`);
|
||||
};
|
||||
|
||||
const serializeMode = (mode: StorageDriverMode): string => {
|
||||
if (mode === StorageDriverMode.ReadWrite) return 'rw';
|
||||
if (mode === StorageDriverMode.ReadOnly) return 'r';
|
||||
throw new Error(`Invalid type: "${mode}"`);
|
||||
};
|
||||
|
||||
export default function(config: StorageDriverConfig, locationOnly: boolean = true): string {
|
||||
if (!config) return '';
|
||||
|
||||
const items: string[] = [];
|
||||
|
||||
items.push(`Type=${serializeType(config.type)}`);
|
||||
|
||||
if (config.path) items.push(`Path=${config.path}`);
|
||||
|
||||
if (!locationOnly && config.mode) items.push(`Mode=${serializeMode(config.mode)}`);
|
||||
|
||||
items.sort();
|
||||
|
||||
return items.join('; ');
|
||||
}
|
@@ -0,0 +1,54 @@
|
||||
import globalConfig from '../../../config';
|
||||
import { clientType, DbConnection } from '../../../db';
|
||||
import { StorageDriverConfig, StorageDriverType } from '../../../utils/types';
|
||||
import newModelFactory from '../../factory';
|
||||
import serializeStorageConfig from './serializeStorageConfig';
|
||||
import StorageDriverBase from './StorageDriverBase';
|
||||
import StorageDriverDatabase from './StorageDriverDatabase';
|
||||
import StorageDriverFs from './StorageDriverFs';
|
||||
import StorageDriverMemory from './StorageDriverMemory';
|
||||
|
||||
export interface Options {
|
||||
assignDriverId?: boolean;
|
||||
}
|
||||
|
||||
export default async function(config: StorageDriverConfig, db: DbConnection, options: Options = null): Promise<StorageDriverBase | null> {
|
||||
if (!config) return null;
|
||||
|
||||
options = {
|
||||
assignDriverId: true,
|
||||
...options,
|
||||
};
|
||||
|
||||
let storageId: number = 0;
|
||||
|
||||
if (options.assignDriverId) {
|
||||
const models = newModelFactory(db, globalConfig(), { storageDriver: null });
|
||||
|
||||
const connectionString = serializeStorageConfig(config);
|
||||
const existingStorage = await models.storage().byConnectionString(connectionString);
|
||||
|
||||
if (existingStorage) {
|
||||
storageId = existingStorage.id;
|
||||
} else {
|
||||
const storage = await models.storage().save({
|
||||
connection_string: connectionString,
|
||||
});
|
||||
storageId = storage.id;
|
||||
}
|
||||
}
|
||||
|
||||
if (config.type === StorageDriverType.Database) {
|
||||
return new StorageDriverDatabase(storageId, { ...config, dbClientType: clientType(db) });
|
||||
}
|
||||
|
||||
if (config.type === StorageDriverType.Filesystem) {
|
||||
return new StorageDriverFs(storageId, config);
|
||||
}
|
||||
|
||||
if (config.type === StorageDriverType.Memory) {
|
||||
return new StorageDriverMemory(storageId, config);
|
||||
}
|
||||
|
||||
throw new Error(`Invalid config: ${JSON.stringify(config)}`);
|
||||
}
|
245
packages/server/src/models/items/storage/testUtils.ts
Normal file
245
packages/server/src/models/items/storage/testUtils.ts
Normal file
@@ -0,0 +1,245 @@
|
||||
import { Item } from '../../../services/database/types';
|
||||
import { createUserAndSession, makeNoteSerializedBody, models } from '../../../utils/testing/testUtils';
|
||||
import { StorageDriverMode } from '../../../utils/types';
|
||||
import StorageDriverBase, { Context } from './StorageDriverBase';
|
||||
|
||||
const testModels = (driver: StorageDriverBase) => {
|
||||
return models({ storageDriver: driver });
|
||||
};
|
||||
|
||||
export async function shouldWriteToContentAndReadItBack(driver: StorageDriverBase) {
|
||||
const { user } = await createUserAndSession(1);
|
||||
const noteBody = makeNoteSerializedBody({
|
||||
id: '00000000000000000000000000000001',
|
||||
title: 'testing driver',
|
||||
});
|
||||
|
||||
const output = await testModels(driver).item().saveFromRawContent(user, [{
|
||||
name: '00000000000000000000000000000001.md',
|
||||
body: Buffer.from(noteBody),
|
||||
}]);
|
||||
|
||||
const result = output['00000000000000000000000000000001.md'];
|
||||
expect(result.error).toBeFalsy();
|
||||
|
||||
const item = await testModels(driver).item().loadWithContent(result.item.id);
|
||||
expect(item.content.byteLength).toBe(item.content_size);
|
||||
expect(item.content_storage_id).toBe(driver.storageId);
|
||||
|
||||
const rawContent = await driver.read(item.id, { models: models() });
|
||||
expect(rawContent.byteLength).toBe(item.content_size);
|
||||
|
||||
const jopItem = testModels(driver).item().itemToJoplinItem(item);
|
||||
expect(jopItem.id).toBe('00000000000000000000000000000001');
|
||||
expect(jopItem.title).toBe('testing driver');
|
||||
}
|
||||
|
||||
export async function shouldDeleteContent(driver: StorageDriverBase) {
|
||||
const { user } = await createUserAndSession(1);
|
||||
const noteBody = makeNoteSerializedBody({
|
||||
id: '00000000000000000000000000000001',
|
||||
title: 'testing driver',
|
||||
});
|
||||
|
||||
const output = await testModels(driver).item().saveFromRawContent(user, [{
|
||||
name: '00000000000000000000000000000001.md',
|
||||
body: Buffer.from(noteBody),
|
||||
}]);
|
||||
|
||||
const item: Item = output['00000000000000000000000000000001.md'].item;
|
||||
|
||||
expect((await testModels(driver).item().all()).length).toBe(1);
|
||||
await testModels(driver).item().delete(item.id);
|
||||
expect((await testModels(driver).item().all()).length).toBe(0);
|
||||
}
|
||||
|
||||
export async function shouldNotCreateItemIfContentNotSaved(driver: StorageDriverBase) {
|
||||
const previousWrite = driver.write;
|
||||
driver.write = () => { throw new Error('not working!'); };
|
||||
|
||||
try {
|
||||
const { user } = await createUserAndSession(1);
|
||||
const noteBody = makeNoteSerializedBody({
|
||||
id: '00000000000000000000000000000001',
|
||||
title: 'testing driver',
|
||||
});
|
||||
|
||||
const output = await testModels(driver).item().saveFromRawContent(user, [{
|
||||
name: '00000000000000000000000000000001.md',
|
||||
body: Buffer.from(noteBody),
|
||||
}]);
|
||||
|
||||
expect(output['00000000000000000000000000000001.md'].error.message).toBe('not working!');
|
||||
expect((await testModels(driver).item().all()).length).toBe(0);
|
||||
} finally {
|
||||
driver.write = previousWrite;
|
||||
}
|
||||
}
|
||||
|
||||
export async function shouldNotUpdateItemIfContentNotSaved(driver: StorageDriverBase) {
|
||||
const { user } = await createUserAndSession(1);
|
||||
const noteBody = makeNoteSerializedBody({
|
||||
id: '00000000000000000000000000000001',
|
||||
title: 'testing driver',
|
||||
});
|
||||
|
||||
await testModels(driver).item().saveFromRawContent(user, [{
|
||||
name: '00000000000000000000000000000001.md',
|
||||
body: Buffer.from(noteBody),
|
||||
}]);
|
||||
|
||||
const noteBodyMod1 = makeNoteSerializedBody({
|
||||
id: '00000000000000000000000000000001',
|
||||
title: 'updated 1',
|
||||
});
|
||||
|
||||
await testModels(driver).item().saveFromRawContent(user, [{
|
||||
name: '00000000000000000000000000000001.md',
|
||||
body: Buffer.from(noteBodyMod1),
|
||||
}]);
|
||||
|
||||
const itemMod1 = testModels(driver).item().itemToJoplinItem(await testModels(driver).item().loadByJopId(user.id, '00000000000000000000000000000001', { withContent: true }));
|
||||
expect(itemMod1.title).toBe('updated 1');
|
||||
|
||||
const noteBodyMod2 = makeNoteSerializedBody({
|
||||
id: '00000000000000000000000000000001',
|
||||
title: 'updated 2',
|
||||
});
|
||||
|
||||
const previousWrite = driver.write;
|
||||
driver.write = () => { throw new Error('not working!'); };
|
||||
|
||||
try {
|
||||
const output = await testModels(driver).item().saveFromRawContent(user, [{
|
||||
name: '00000000000000000000000000000001.md',
|
||||
body: Buffer.from(noteBodyMod2),
|
||||
}]);
|
||||
|
||||
expect(output['00000000000000000000000000000001.md'].error.message).toBe('not working!');
|
||||
const itemMod2 = testModels(driver).item().itemToJoplinItem(await testModels(driver).item().loadByJopId(user.id, '00000000000000000000000000000001', { withContent: true }));
|
||||
expect(itemMod2.title).toBe('updated 1'); // Check it has not been updated
|
||||
} finally {
|
||||
driver.write = previousWrite;
|
||||
}
|
||||
}
|
||||
|
||||
export async function shouldSupportFallbackDriver(driver: StorageDriverBase, fallbackDriver: StorageDriverBase) {
|
||||
const { user } = await createUserAndSession(1);
|
||||
|
||||
const output = await testModels(driver).item().saveFromRawContent(user, [{
|
||||
name: '00000000000000000000000000000001.md',
|
||||
body: Buffer.from(makeNoteSerializedBody({
|
||||
id: '00000000000000000000000000000001',
|
||||
title: 'testing',
|
||||
})),
|
||||
}]);
|
||||
|
||||
const itemId = output['00000000000000000000000000000001.md'].item.id;
|
||||
|
||||
let previousByteLength = 0;
|
||||
|
||||
{
|
||||
const content = await driver.read(itemId, { models: models() });
|
||||
expect(content.byteLength).toBeGreaterThan(10);
|
||||
previousByteLength = content.byteLength;
|
||||
}
|
||||
|
||||
const testModelWithFallback = models({
|
||||
storageDriver: driver,
|
||||
storageDriverFallback: fallbackDriver,
|
||||
});
|
||||
|
||||
// If the item content is not on the main content driver, it should get
|
||||
// it from the fallback one.
|
||||
const itemFromDb = await testModelWithFallback.item().loadWithContent(itemId);
|
||||
expect(itemFromDb.content.byteLength).toBe(previousByteLength);
|
||||
|
||||
// When writing content, it should use the main content driver, and set
|
||||
// the content for the fallback one to "".
|
||||
await testModelWithFallback.item().saveFromRawContent(user, [{
|
||||
name: '00000000000000000000000000000001.md',
|
||||
body: Buffer.from(makeNoteSerializedBody({
|
||||
id: '00000000000000000000000000000001',
|
||||
title: 'testing1234',
|
||||
})),
|
||||
}]);
|
||||
|
||||
{
|
||||
// Check that it has cleared the fallback driver content
|
||||
const context: Context = { models: models() };
|
||||
const fallbackContent = await fallbackDriver.read(itemId, context);
|
||||
expect(fallbackContent.byteLength).toBe(0);
|
||||
|
||||
// Check that it has written to the main driver content
|
||||
const mainContent = await driver.read(itemId, context);
|
||||
expect(mainContent.byteLength).toBe(previousByteLength + 4);
|
||||
}
|
||||
}
|
||||
|
||||
export async function shouldSupportFallbackDriverInReadWriteMode(driver: StorageDriverBase, fallbackDriver: StorageDriverBase) {
|
||||
if (fallbackDriver.mode !== StorageDriverMode.ReadWrite) throw new Error('Content driver must be configured in RW mode for this test');
|
||||
|
||||
const { user } = await createUserAndSession(1);
|
||||
|
||||
const testModelWithFallback = models({
|
||||
storageDriver: driver,
|
||||
storageDriverFallback: fallbackDriver,
|
||||
});
|
||||
|
||||
const output = await testModelWithFallback.item().saveFromRawContent(user, [{
|
||||
name: '00000000000000000000000000000001.md',
|
||||
body: Buffer.from(makeNoteSerializedBody({
|
||||
id: '00000000000000000000000000000001',
|
||||
title: 'testing',
|
||||
})),
|
||||
}]);
|
||||
|
||||
const itemId = output['00000000000000000000000000000001.md'].item.id;
|
||||
|
||||
{
|
||||
// Check that it has written the content to both drivers
|
||||
const context: Context = { models: models() };
|
||||
const fallbackContent = await fallbackDriver.read(itemId, context);
|
||||
expect(fallbackContent.byteLength).toBeGreaterThan(10);
|
||||
|
||||
const mainContent = await driver.read(itemId, context);
|
||||
expect(mainContent.toString()).toBe(fallbackContent.toString());
|
||||
}
|
||||
}
|
||||
|
||||
export async function shouldUpdateContentStorageIdAfterSwitchingDriver(oldDriver: StorageDriverBase, newDriver: StorageDriverBase) {
|
||||
if (oldDriver.storageId === newDriver.storageId) throw new Error('Drivers must be different for this test');
|
||||
|
||||
const { user } = await createUserAndSession(1);
|
||||
|
||||
const oldDriverModel = models({
|
||||
storageDriver: oldDriver,
|
||||
});
|
||||
|
||||
const newDriverModel = models({
|
||||
storageDriver: newDriver,
|
||||
});
|
||||
|
||||
const output = await oldDriverModel.item().saveFromRawContent(user, [{
|
||||
name: '00000000000000000000000000000001.md',
|
||||
body: Buffer.from(makeNoteSerializedBody({
|
||||
id: '00000000000000000000000000000001',
|
||||
title: 'testing',
|
||||
})),
|
||||
}]);
|
||||
|
||||
const itemId = output['00000000000000000000000000000001.md'].item.id;
|
||||
|
||||
expect((await oldDriverModel.item().load(itemId)).content_storage_id).toBe(oldDriver.storageId);
|
||||
|
||||
await newDriverModel.item().saveFromRawContent(user, [{
|
||||
name: '00000000000000000000000000000001.md',
|
||||
body: Buffer.from(makeNoteSerializedBody({
|
||||
id: '00000000000000000000000000000001',
|
||||
title: 'testing',
|
||||
})),
|
||||
}]);
|
||||
|
||||
expect(await newDriverModel.item().count()).toBe(1);
|
||||
expect((await oldDriverModel.item().load(itemId)).content_storage_id).toBe(newDriver.storageId);
|
||||
}
|
0
packages/server/src/models/items/storage/utils.ts
Normal file
0
packages/server/src/models/items/storage/utils.ts
Normal file
@@ -1,4 +1,4 @@
|
||||
export type Uuid = any;
|
||||
export type Uuid = string;
|
||||
|
||||
export enum ItemAddressingType {
|
||||
Id = 1,
|
||||
@@ -246,6 +246,11 @@ export interface Event extends WithUuid {
|
||||
created_time?: number;
|
||||
}
|
||||
|
||||
export interface Storage {
|
||||
id?: number;
|
||||
connection_string?: string;
|
||||
}
|
||||
|
||||
export interface Item extends WithDates, WithUuid {
|
||||
name?: string;
|
||||
mime_type?: string;
|
||||
@@ -258,6 +263,7 @@ export interface Item extends WithDates, WithUuid {
|
||||
jop_encryption_applied?: number;
|
||||
jop_updated_time?: number;
|
||||
owner_id?: Uuid;
|
||||
content_storage_id?: number;
|
||||
}
|
||||
|
||||
export const databaseSchema: DatabaseTables = {
|
||||
@@ -418,6 +424,10 @@ export const databaseSchema: DatabaseTables = {
|
||||
name: { type: 'string' },
|
||||
created_time: { type: 'string' },
|
||||
},
|
||||
storages: {
|
||||
id: { type: 'number' },
|
||||
connection_string: { type: 'string' },
|
||||
},
|
||||
items: {
|
||||
id: { type: 'string' },
|
||||
name: { type: 'string' },
|
||||
@@ -433,6 +443,7 @@ export const databaseSchema: DatabaseTables = {
|
||||
jop_encryption_applied: { type: 'number' },
|
||||
jop_updated_time: { type: 'string' },
|
||||
owner_id: { type: 'string' },
|
||||
content_storage_id: { type: 'number' },
|
||||
},
|
||||
};
|
||||
// AUTO-GENERATED-TYPES
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import time from '@joplin/lib/time';
|
||||
import { DbConnection, dropTables, migrateLatest } from '../db';
|
||||
import newModelFactory from '../models/factory';
|
||||
import storageDriverFromConfig from '../models/items/storage/storageDriverFromConfig';
|
||||
import { AccountType } from '../models/UserModel';
|
||||
import { User, UserFlagType } from '../services/database/types';
|
||||
import { Config } from '../utils/types';
|
||||
@@ -34,9 +35,12 @@ export async function createTestUsers(db: DbConnection, config: Config, options:
|
||||
|
||||
const password = 'hunter1hunter2hunter3';
|
||||
|
||||
if (options.count) {
|
||||
const models = newModelFactory(db, config);
|
||||
const models = newModelFactory(db, config, {
|
||||
// storageDriver: new StorageDriverDatabase(1, { dbClientType: clientType(db) }),
|
||||
storageDriver: await storageDriverFromConfig(config.storageDriver, db), // new StorageDriverDatabase(1, { dbClientType: clientType(db) }),
|
||||
});
|
||||
|
||||
if (options.count) {
|
||||
const users: User[] = [];
|
||||
|
||||
for (let i = 0; i < options.count; i++) {
|
||||
@@ -52,7 +56,6 @@ export async function createTestUsers(db: DbConnection, config: Config, options:
|
||||
} else {
|
||||
await dropTables(db);
|
||||
await migrateLatest(db);
|
||||
const models = newModelFactory(db, config);
|
||||
|
||||
for (let userNum = 1; userNum <= 2; userNum++) {
|
||||
await models.user().save({
|
||||
|
@@ -141,7 +141,7 @@ async function noteLinkedItemInfos(userId: Uuid, itemModel: ItemModel, note: Not
|
||||
const output: LinkedItemInfos = {};
|
||||
|
||||
for (const jopId of jopIds) {
|
||||
const item = await itemModel.loadByJopId(userId, jopId, { fields: ['*'] });
|
||||
const item = await itemModel.loadByJopId(userId, jopId, { fields: ['*'], withContent: true });
|
||||
if (!item) continue;
|
||||
|
||||
output[jopId] = {
|
||||
@@ -265,7 +265,7 @@ export async function renderItem(userId: Uuid, item: Item, share: Share, query:
|
||||
};
|
||||
|
||||
if (query.resource_id) {
|
||||
const resourceItem = await models_.item().loadByName(userId, resourceBlobPath(query.resource_id), { fields: ['*'] });
|
||||
const resourceItem = await models_.item().loadByName(userId, resourceBlobPath(query.resource_id), { fields: ['*'], withContent: true });
|
||||
fileToRender.item = resourceItem;
|
||||
fileToRender.content = resourceItem.content;
|
||||
fileToRender.jopItemId = query.resource_id;
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { LoggerWrapper } from '@joplin/lib/Logger';
|
||||
import config from '../config';
|
||||
import { DbConnection } from '../db';
|
||||
import newModelFactory, { Models } from '../models/factory';
|
||||
import newModelFactory, { Models, Options as ModelFactoryOptions } from '../models/factory';
|
||||
import { AppContext, Config, Env } from './types';
|
||||
import routes from '../routes/routes';
|
||||
import ShareService from '../services/ShareService';
|
||||
@@ -23,8 +23,8 @@ async function setupServices(env: Env, models: Models, config: Config): Promise<
|
||||
return output;
|
||||
}
|
||||
|
||||
export default async function(appContext: AppContext, env: Env, dbConnection: DbConnection, appLogger: ()=> LoggerWrapper): Promise<AppContext> {
|
||||
const models = newModelFactory(dbConnection, config());
|
||||
export default async function(appContext: AppContext, env: Env, dbConnection: DbConnection, appLogger: ()=> LoggerWrapper, options: ModelFactoryOptions): Promise<AppContext> {
|
||||
const models = newModelFactory(dbConnection, config(), options);
|
||||
|
||||
// The joplinBase object is immutable because it is shared by all requests.
|
||||
// Then a "joplin" context property is created from it per request, which
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { DbConnection, connectDb, disconnectDb, truncateTables } from '../../db';
|
||||
import { User, Session, Item, Uuid } from '../../services/database/types';
|
||||
import { createDb, CreateDbOptions } from '../../tools/dbTools';
|
||||
import modelFactory from '../../models/factory';
|
||||
import modelFactory, { Options as ModelFactoryOptions } from '../../models/factory';
|
||||
import { AppContext, Env } from '../types';
|
||||
import config, { initConfig } from '../../config';
|
||||
import Logger from '@joplin/lib/Logger';
|
||||
@@ -23,6 +23,7 @@ import MustacheService from '../../services/MustacheService';
|
||||
import uuidgen from '../uuidgen';
|
||||
import { createCsrfToken } from '../csrf';
|
||||
import { cookieSet } from '../cookies';
|
||||
import StorageDriverMemory from '../../models/items/storage/StorageDriverMemory';
|
||||
import { parseEnv } from '../../env';
|
||||
|
||||
// Takes into account the fact that this file will be inside the /dist directory
|
||||
@@ -37,10 +38,14 @@ export function randomHash(): string {
|
||||
return crypto.createHash('md5').update(`${Date.now()}-${Math.random()}`).digest('hex');
|
||||
}
|
||||
|
||||
export function tempDirPath(): string {
|
||||
return `${packageRootDir}/temp/${randomHash()}`;
|
||||
}
|
||||
|
||||
let tempDir_: string = null;
|
||||
export async function tempDir(): Promise<string> {
|
||||
if (tempDir_) return tempDir_;
|
||||
tempDir_ = `${packageRootDir}/temp/${randomHash()}`;
|
||||
tempDir_ = tempDirPath();
|
||||
await fs.mkdirp(tempDir_);
|
||||
return tempDir_;
|
||||
}
|
||||
@@ -190,7 +195,7 @@ export async function koaAppContext(options: AppContextTestOptions = null): Prom
|
||||
|
||||
const appLogger = Logger.create('AppTest');
|
||||
|
||||
const baseAppContext = await setupAppContext({} as any, Env.Dev, db_, () => appLogger);
|
||||
const baseAppContext = await setupAppContext({} as any, Env.Dev, db_, () => appLogger, { storageDriver: new StorageDriverMemory(1) });
|
||||
|
||||
// Set type to "any" because the Koa context has many properties and we
|
||||
// don't need to mock all of them.
|
||||
@@ -238,12 +243,16 @@ export function db() {
|
||||
return db_;
|
||||
}
|
||||
|
||||
// function baseUrl() {
|
||||
// return 'http://localhost:22300';
|
||||
// }
|
||||
const storageDriverMemory = new StorageDriverMemory(1);
|
||||
|
||||
export function models() {
|
||||
return modelFactory(db(), config());
|
||||
export function models(options: ModelFactoryOptions = null) {
|
||||
options = {
|
||||
storageDriver: storageDriverMemory,
|
||||
storageDriverFallback: null,
|
||||
...options,
|
||||
};
|
||||
|
||||
return modelFactory(db(), config(), options);
|
||||
}
|
||||
|
||||
export function parseHtml(html: string): Document {
|
||||
|
@@ -87,6 +87,48 @@ export interface StripeConfig extends StripePublicConfig {
|
||||
webhookSecret: string;
|
||||
}
|
||||
|
||||
export enum StorageDriverType {
|
||||
Database = 1,
|
||||
Filesystem = 2,
|
||||
Memory = 3,
|
||||
S3 = 4,
|
||||
}
|
||||
|
||||
// The driver mode is only used by fallback drivers. Regardless of the mode, the
|
||||
// fallback always work like this:
|
||||
//
|
||||
// When reading, first the app checks if the content exists on the main driver.
|
||||
// If it does it returns this. Otherwise it reads the content from the fallback
|
||||
// driver.
|
||||
//
|
||||
// When writing, the app writes to the main driver. Then the mode determines how
|
||||
// it writes to the fallback driver:
|
||||
//
|
||||
// - In read-only mode, it's going to clear the fallback driver content. This is
|
||||
// used to migrate from one driver to another. It means that over time the old
|
||||
// storage will be cleared and all content will be on the new storage.
|
||||
//
|
||||
// - In read/write mode, it's going to write the content to the fallback driver.
|
||||
// This is purely for safey - it allows deploying the new storage (such as the
|
||||
// filesystem or S3) but still keep the old content up-to-date. So if
|
||||
// something goes wrong it's possible to go back to the old storage until the
|
||||
// new one is working.
|
||||
|
||||
export enum StorageDriverMode {
|
||||
ReadWrite = 1,
|
||||
ReadOnly = 2,
|
||||
}
|
||||
|
||||
export interface StorageDriverConfig {
|
||||
type?: StorageDriverType;
|
||||
path?: string;
|
||||
mode?: StorageDriverMode;
|
||||
region?: string;
|
||||
accessKeyId?: string;
|
||||
secretAccessKeyId?: string;
|
||||
bucket?: string;
|
||||
}
|
||||
|
||||
export interface Config {
|
||||
appVersion: string;
|
||||
appName: string;
|
||||
@@ -115,6 +157,8 @@ export interface Config {
|
||||
businessEmail: string;
|
||||
isJoplinCloud: boolean;
|
||||
cookieSecure: boolean;
|
||||
storageDriver: StorageDriverConfig;
|
||||
storageDriverFallback: StorageDriverConfig;
|
||||
}
|
||||
|
||||
export enum HttpMethod {
|
||||
|
Reference in New Issue
Block a user