mirror of
https://github.com/laurent22/joplin.git
synced 2024-12-21 09:38:01 +02:00
Server: Add support for Postgres replication
This commit is contained in:
parent
487f01d2ec
commit
a90e3e04a4
@ -1,24 +1,58 @@
|
|||||||
# For development this compose file starts the database only. The app can then
|
# For development this compose file starts the database only. The app can then
|
||||||
# be started using `yarn start-dev`, which is useful for development, because
|
# be started using `yarn start-dev`, which is useful for development, because
|
||||||
# it means the app Docker file doesn't have to be rebuilt on each change.
|
# it means the app Docker file doesn't have to be rebuilt on each change.
|
||||||
|
#
|
||||||
|
# Note that log is setup to give as much information as possible, including
|
||||||
|
# whether it's the master or slave database that is being used for a query.
|
||||||
|
#
|
||||||
|
# To setup and test replication, use the following config in Joplin Server. Note
|
||||||
|
# in particular the different port, which means we access the slave and not the
|
||||||
|
# master.
|
||||||
|
#
|
||||||
|
# DB_USE_SLAVE=true
|
||||||
|
# SLAVE_POSTGRES_PASSWORD=joplin
|
||||||
|
# SLAVE_POSTGRES_DATABASE=joplin
|
||||||
|
# SLAVE_POSTGRES_USER=joplin
|
||||||
|
# SLAVE_POSTGRES_PORT=5433
|
||||||
|
# SLAVE_POSTGRES_HOST=localhost
|
||||||
|
# USERS_WITH_REPLICATION=ID1,ID2,...
|
||||||
|
|
||||||
version: '3'
|
version: '2'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
db:
|
|
||||||
image: postgres:16
|
|
||||||
command: postgres -c work_mem=100000
|
|
||||||
ports:
|
|
||||||
- "5432:5432"
|
|
||||||
environment:
|
|
||||||
- POSTGRES_PASSWORD=joplin
|
|
||||||
- POSTGRES_USER=joplin
|
|
||||||
- POSTGRES_DB=joplin
|
|
||||||
|
|
||||||
# Use this to specify additional Postgres
|
postgresql-master:
|
||||||
# config parameters:
|
image: 'bitnami/postgresql:16.3.0'
|
||||||
#
|
ports:
|
||||||
# command:
|
- '5432:5432'
|
||||||
# - "postgres"
|
environment:
|
||||||
# - "-c"
|
- POSTGRESQL_PASSWORD=joplin
|
||||||
# - "log_min_duration_statement=0"
|
- POSTGRESQL_USERNAME=joplin
|
||||||
|
- POSTGRESQL_DATABASE=joplin
|
||||||
|
|
||||||
|
- POSTGRESQL_REPLICATION_MODE=master
|
||||||
|
- POSTGRESQL_REPLICATION_USER=repl_user
|
||||||
|
- POSTGRESQL_REPLICATION_PASSWORD=repl_password
|
||||||
|
|
||||||
|
- POSTGRESQL_LOG_HOSTNAME=true
|
||||||
|
- POSTGRESQL_PGAUDIT_LOG=READ,WRITE
|
||||||
|
- POSTGRESQL_EXTRA_FLAGS=-c work_mem=100000 -c log_statement=all
|
||||||
|
|
||||||
|
postgresql-slave:
|
||||||
|
image: 'bitnami/postgresql:16.3.0'
|
||||||
|
ports:
|
||||||
|
- '5433:5432'
|
||||||
|
depends_on:
|
||||||
|
- postgresql-master
|
||||||
|
environment:
|
||||||
|
- POSTGRESQL_REPLICATION_MODE=slave
|
||||||
|
- POSTGRESQL_REPLICATION_USER=repl_user
|
||||||
|
- POSTGRESQL_REPLICATION_PASSWORD=repl_password
|
||||||
|
- POSTGRESQL_MASTER_HOST=postgresql-master
|
||||||
|
- POSTGRESQL_PASSWORD=joplin
|
||||||
|
- POSTGRESQL_MASTER_PORT_NUMBER=5432
|
||||||
|
|
||||||
|
- POSTGRESQL_LOG_HOSTNAME=true
|
||||||
|
- POSTGRESQL_PGAUDIT_LOG=READ,WRITE
|
||||||
|
- POSTGRESQL_EXTRA_FLAGS=-c work_mem=100000 -c log_statement=all
|
||||||
|
|
@ -5,7 +5,7 @@ import * as Koa from 'koa';
|
|||||||
import * as fs from 'fs-extra';
|
import * as fs from 'fs-extra';
|
||||||
import Logger, { LogLevel, LoggerWrapper, TargetType } from '@joplin/utils/Logger';
|
import Logger, { LogLevel, LoggerWrapper, TargetType } from '@joplin/utils/Logger';
|
||||||
import config, { fullVersionString, initConfig, runningInDocker } from './config';
|
import config, { fullVersionString, initConfig, runningInDocker } from './config';
|
||||||
import { migrateLatest, waitForConnection, sqliteDefaultDir, latestMigration, needsMigration, migrateList, versionCheck } from './db';
|
import { migrateLatest, waitForConnection, sqliteDefaultDir, latestMigration, needsMigration, migrateList, versionCheck, ConnectionCheckResult } from './db';
|
||||||
import { AppContext, Env, KoaNext } from './utils/types';
|
import { AppContext, Env, KoaNext } from './utils/types';
|
||||||
import FsDriverNode from '@joplin/lib/fs-driver-node';
|
import FsDriverNode from '@joplin/lib/fs-driver-node';
|
||||||
import { getDeviceTimeDrift } from '@joplin/lib/ntp';
|
import { getDeviceTimeDrift } from '@joplin/lib/ntp';
|
||||||
@ -270,7 +270,7 @@ async function main() {
|
|||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
const connectionCheck = await waitForConnection(config().database);
|
const connectionCheck = await waitForConnection(config().database);
|
||||||
const models = newModelFactory(connectionCheck.connection, config());
|
const models = newModelFactory(connectionCheck.connection, connectionCheck.connection, config());
|
||||||
|
|
||||||
await selectedCommand.run(commandArgv, {
|
await selectedCommand.run(commandArgv, {
|
||||||
db: connectionCheck.connection,
|
db: connectionCheck.connection,
|
||||||
@ -293,6 +293,12 @@ async function main() {
|
|||||||
appLogger().info('Skipping NTP time check because MAX_TIME_DRIFT is 0.');
|
appLogger().info('Skipping NTP time check because MAX_TIME_DRIFT is 0.');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const printConnectionCheckInfo = (connectionCheck: ConnectionCheckResult) => {
|
||||||
|
const connectionCheckLogInfo = { ...connectionCheck };
|
||||||
|
delete connectionCheckLogInfo.connection;
|
||||||
|
appLogger().info('Connection check:', connectionCheckLogInfo);
|
||||||
|
};
|
||||||
|
|
||||||
setLocale('en_GB');
|
setLocale('en_GB');
|
||||||
|
|
||||||
appLogger().info('Running in Docker:', runningInDocker());
|
appLogger().info('Running in Docker:', runningInDocker());
|
||||||
@ -307,11 +313,8 @@ async function main() {
|
|||||||
|
|
||||||
appLogger().info('Trying to connect to database...');
|
appLogger().info('Trying to connect to database...');
|
||||||
const connectionCheck = await waitForConnection(config().database);
|
const connectionCheck = await waitForConnection(config().database);
|
||||||
|
printConnectionCheckInfo(connectionCheck);
|
||||||
|
|
||||||
const connectionCheckLogInfo = { ...connectionCheck };
|
|
||||||
delete connectionCheckLogInfo.connection;
|
|
||||||
|
|
||||||
appLogger().info('Connection check:', connectionCheckLogInfo);
|
|
||||||
const ctx = app.context as AppContext;
|
const ctx = app.context as AppContext;
|
||||||
|
|
||||||
await versionCheck(connectionCheck.connection);
|
await versionCheck(connectionCheck.connection);
|
||||||
@ -328,16 +331,32 @@ async function main() {
|
|||||||
appLogger().info('Skipped database auto-migration.');
|
appLogger().info('Skipped database auto-migration.');
|
||||||
}
|
}
|
||||||
|
|
||||||
await setupAppContext(ctx, env, connectionCheck.connection, appLogger);
|
if (config().DB_USE_SLAVE) appLogger().info('Using database replication - trying to connect to slave...');
|
||||||
|
const slaveConnectionCheck = config().DB_USE_SLAVE ? await waitForConnection(config().databaseSlave) : null;
|
||||||
|
|
||||||
|
if (slaveConnectionCheck) {
|
||||||
|
printConnectionCheckInfo(slaveConnectionCheck);
|
||||||
|
appLogger().info(`Users with replication: ${config().USERS_WITH_REPLICATION}`);
|
||||||
|
} else {
|
||||||
|
appLogger().info('Not using database replication...');
|
||||||
|
}
|
||||||
|
|
||||||
|
await setupAppContext(
|
||||||
|
ctx,
|
||||||
|
env,
|
||||||
|
connectionCheck.connection,
|
||||||
|
slaveConnectionCheck ? slaveConnectionCheck.connection : connectionCheck.connection,
|
||||||
|
appLogger,
|
||||||
|
);
|
||||||
|
|
||||||
await initializeJoplinUtils(config(), ctx.joplinBase.models, ctx.joplinBase.services.mustache);
|
await initializeJoplinUtils(config(), ctx.joplinBase.models, ctx.joplinBase.services.mustache);
|
||||||
|
|
||||||
appLogger().info('Performing main storage check...');
|
appLogger().info('Performing main storage check...');
|
||||||
appLogger().info(await storageConnectionCheck(config().storageDriver, ctx.joplinBase.db, ctx.joplinBase.models));
|
appLogger().info(await storageConnectionCheck(config().storageDriver, ctx.joplinBase.db, ctx.joplinBase.dbSlave, ctx.joplinBase.models));
|
||||||
|
|
||||||
if (config().storageDriverFallback) {
|
if (config().storageDriverFallback) {
|
||||||
appLogger().info('Performing fallback storage check...');
|
appLogger().info('Performing fallback storage check...');
|
||||||
appLogger().info(await storageConnectionCheck(config().storageDriverFallback, ctx.joplinBase.db, ctx.joplinBase.models));
|
appLogger().info(await storageConnectionCheck(config().storageDriverFallback, ctx.joplinBase.db, ctx.joplinBase.dbSlave, ctx.joplinBase.models));
|
||||||
}
|
}
|
||||||
|
|
||||||
appLogger().info('Starting services...');
|
appLogger().info('Starting services...');
|
||||||
|
@ -87,7 +87,7 @@ export default class StorageCommand extends BaseCommand {
|
|||||||
},
|
},
|
||||||
|
|
||||||
[ArgvCommand.CheckConnection]: async () => {
|
[ArgvCommand.CheckConnection]: async () => {
|
||||||
logger.info(await storageConnectionCheck(argv.connection, runContext.db, runContext.models));
|
logger.info(await storageConnectionCheck(argv.connection, runContext.db, runContext.db, runContext.models));
|
||||||
},
|
},
|
||||||
|
|
||||||
[ArgvCommand.DeleteDatabaseContentColumn]: async () => {
|
[ArgvCommand.DeleteDatabaseContentColumn]: async () => {
|
||||||
|
@ -20,15 +20,17 @@ export function runningInDocker(): boolean {
|
|||||||
return runningInDocker_;
|
return runningInDocker_;
|
||||||
}
|
}
|
||||||
|
|
||||||
function databaseHostFromEnv(runningInDocker: boolean, env: EnvVariables): string {
|
function databaseHostFromEnv(runningInDocker: boolean, env: EnvVariables, slave: boolean): string {
|
||||||
if (env.POSTGRES_HOST) {
|
const postgresHost = slave ? env.SLAVE_POSTGRES_HOST : env.POSTGRES_HOST;
|
||||||
|
|
||||||
|
if (postgresHost) {
|
||||||
// When running within Docker, the app localhost is different from the
|
// When running within Docker, the app localhost is different from the
|
||||||
// host's localhost. To access the latter, Docker defines a special host
|
// host's localhost. To access the latter, Docker defines a special host
|
||||||
// called "host.docker.internal", so here we swap the values if necessary.
|
// called "host.docker.internal", so here we swap the values if necessary.
|
||||||
if (runningInDocker && ['localhost', '127.0.0.1'].includes(env.POSTGRES_HOST)) {
|
if (runningInDocker && ['localhost', '127.0.0.1'].includes(postgresHost)) {
|
||||||
return 'host.docker.internal';
|
return 'host.docker.internal';
|
||||||
} else {
|
} else {
|
||||||
return env.POSTGRES_HOST;
|
return postgresHost;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -42,7 +44,7 @@ export const fullVersionString = (config: Config) => {
|
|||||||
return output.join(' ');
|
return output.join(' ');
|
||||||
};
|
};
|
||||||
|
|
||||||
function databaseConfigFromEnv(runningInDocker: boolean, env: EnvVariables): DatabaseConfig {
|
function databaseConfigFromEnv(runningInDocker: boolean, env: EnvVariables, slave: boolean): DatabaseConfig {
|
||||||
const baseConfig: DatabaseConfig = {
|
const baseConfig: DatabaseConfig = {
|
||||||
client: DatabaseConfigClient.Null,
|
client: DatabaseConfigClient.Null,
|
||||||
name: '',
|
name: '',
|
||||||
@ -59,16 +61,16 @@ function databaseConfigFromEnv(runningInDocker: boolean, env: EnvVariables): Dat
|
|||||||
if (env.POSTGRES_CONNECTION_STRING) {
|
if (env.POSTGRES_CONNECTION_STRING) {
|
||||||
return {
|
return {
|
||||||
...databaseConfig,
|
...databaseConfig,
|
||||||
connectionString: env.POSTGRES_CONNECTION_STRING,
|
connectionString: slave ? env.SLAVE_POSTGRES_CONNECTION_STRING : env.POSTGRES_CONNECTION_STRING,
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
return {
|
return {
|
||||||
...databaseConfig,
|
...databaseConfig,
|
||||||
name: env.POSTGRES_DATABASE,
|
name: slave ? env.SLAVE_POSTGRES_DATABASE : env.POSTGRES_DATABASE,
|
||||||
user: env.POSTGRES_USER,
|
user: slave ? env.SLAVE_POSTGRES_USER : env.POSTGRES_USER,
|
||||||
password: env.POSTGRES_PASSWORD,
|
password: slave ? env.SLAVE_POSTGRES_PASSWORD : env.POSTGRES_PASSWORD,
|
||||||
port: env.POSTGRES_PORT,
|
port: slave ? env.SLAVE_POSTGRES_PORT : env.POSTGRES_PORT,
|
||||||
host: databaseHostFromEnv(runningInDocker, env) || 'localhost',
|
host: databaseHostFromEnv(runningInDocker, env, slave) || 'localhost',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -76,7 +78,7 @@ function databaseConfigFromEnv(runningInDocker: boolean, env: EnvVariables): Dat
|
|||||||
return {
|
return {
|
||||||
...baseConfig,
|
...baseConfig,
|
||||||
client: DatabaseConfigClient.SQLite,
|
client: DatabaseConfigClient.SQLite,
|
||||||
name: env.SQLITE_DATABASE,
|
name: slave ? env.SLAVE_SQLITE_DATABASE : env.SQLITE_DATABASE,
|
||||||
asyncStackTraces: true,
|
asyncStackTraces: true,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -156,6 +158,7 @@ export async function initConfig(envType: Env, env: EnvVariables, overrides: any
|
|||||||
const apiBaseUrl = env.API_BASE_URL ? env.API_BASE_URL : baseUrl;
|
const apiBaseUrl = env.API_BASE_URL ? env.API_BASE_URL : baseUrl;
|
||||||
const supportEmail = env.SUPPORT_EMAIL;
|
const supportEmail = env.SUPPORT_EMAIL;
|
||||||
const forkVersion = packageJson.joplinServer?.forkVersion;
|
const forkVersion = packageJson.joplinServer?.forkVersion;
|
||||||
|
const dbConfig = databaseConfigFromEnv(runningInDocker_, env, false);
|
||||||
|
|
||||||
config_ = {
|
config_ = {
|
||||||
...env,
|
...env,
|
||||||
@ -169,7 +172,8 @@ export async function initConfig(envType: Env, env: EnvVariables, overrides: any
|
|||||||
layoutDir: `${viewDir}/layouts`,
|
layoutDir: `${viewDir}/layouts`,
|
||||||
tempDir: `${rootDir}/temp`,
|
tempDir: `${rootDir}/temp`,
|
||||||
logDir: `${rootDir}/logs`,
|
logDir: `${rootDir}/logs`,
|
||||||
database: databaseConfigFromEnv(runningInDocker_, env),
|
database: dbConfig,
|
||||||
|
databaseSlave: env.DB_USE_SLAVE ? databaseConfigFromEnv(runningInDocker_, env, true) : dbConfig,
|
||||||
mailer: mailerConfigFromEnv(env),
|
mailer: mailerConfigFromEnv(env),
|
||||||
stripe: stripeConfigFromEnv(stripePublicConfig, env),
|
stripe: stripeConfigFromEnv(stripePublicConfig, env),
|
||||||
port: appPort,
|
port: appPort,
|
||||||
|
81
packages/server/src/db.migrations.test.ts
Normal file
81
packages/server/src/db.migrations.test.ts
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
import { afterAllTests, beforeAllDb, beforeEachDb, db } from './utils/testing/testUtils';
|
||||||
|
import sqlts from '@rmp135/sql-ts';
|
||||||
|
import { DbConnection, migrateDown, migrateLatest, migrateUp, needsMigration, nextMigration } from './db';
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Old code before rule was applied
|
||||||
|
async function dbSchemaSnapshot(db: DbConnection): Promise<any> {
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Old code before rule was applied
|
||||||
|
return sqlts.toTypeScript({}, db as any);
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('db.migrations', () => {
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await beforeAllDb('db.migrations', { autoMigrate: false });
|
||||||
|
await beforeEachDb();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await afterAllTests();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow upgrading and downgrading schema', async () => {
|
||||||
|
// Migrations before that didn't have a down() step.
|
||||||
|
const ignoreAllBefore = '20210819165350_user_flags';
|
||||||
|
|
||||||
|
// Some migrations produce no changes visible to sql-ts, in particular
|
||||||
|
// when the migration only adds a constraint or an index, or when a
|
||||||
|
// default is changed. In this case we skip the migration. Ideally we
|
||||||
|
// should test these too but for now that will do.
|
||||||
|
const doNoCheckUpgrade = [
|
||||||
|
'20211030103016_item_owner_name_unique',
|
||||||
|
'20211111134329_storage_index',
|
||||||
|
'20220121172409_email_recipient_default',
|
||||||
|
'20240413141308_changes_optimization',
|
||||||
|
];
|
||||||
|
|
||||||
|
let startProcessing = false;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
await migrateUp(db());
|
||||||
|
|
||||||
|
if (!startProcessing) {
|
||||||
|
const next = await nextMigration(db());
|
||||||
|
if (next === ignoreAllBefore) {
|
||||||
|
startProcessing = true;
|
||||||
|
} else {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const next = await nextMigration(db());
|
||||||
|
|
||||||
|
if (!next) break;
|
||||||
|
|
||||||
|
const initialSchema = await dbSchemaSnapshot(db());
|
||||||
|
|
||||||
|
await migrateUp(db());
|
||||||
|
|
||||||
|
const afterUpgradeSchema = await dbSchemaSnapshot(db());
|
||||||
|
|
||||||
|
if (!doNoCheckUpgrade.includes(next)) {
|
||||||
|
expect(initialSchema, `Schema upgrade did not produce a new schema. In migration: ${next}`).not.toEqual(afterUpgradeSchema);
|
||||||
|
}
|
||||||
|
|
||||||
|
await migrateDown(db());
|
||||||
|
|
||||||
|
const afterRollbackSchema = await dbSchemaSnapshot(db());
|
||||||
|
|
||||||
|
expect(initialSchema, `Schema rollback did not produce previous schema. In migration: ${next}`).toEqual(afterRollbackSchema);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should tell if a migration is required', async () => {
|
||||||
|
expect(await needsMigration(db())).toBe(true);
|
||||||
|
|
||||||
|
await migrateLatest(db());
|
||||||
|
|
||||||
|
expect(await needsMigration(db())).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
143
packages/server/src/db.replication.test.ts
Normal file
143
packages/server/src/db.replication.test.ts
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
import { afterAllTests, beforeAllDb, beforeEachDb, createFolder, createUserAndSession, db, dbSlave, expectThrow, getDatabaseClientType, models, packageRootDir, updateFolder } from './utils/testing/testUtils';
|
||||||
|
import { connectDb, disconnectDb, reconnectDb, sqliteSyncSlave } from './db';
|
||||||
|
import { ChangeType, Event } from './services/database/types';
|
||||||
|
import { DatabaseConfig, DatabaseConfigClient } from './utils/types';
|
||||||
|
import { createDb } from './tools/dbTools';
|
||||||
|
import { msleep } from './utils/time';
|
||||||
|
|
||||||
|
const event1: Event = {
|
||||||
|
id: 'test1',
|
||||||
|
type: 1,
|
||||||
|
name: 'test',
|
||||||
|
created_time: Date.now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const event2 = {
|
||||||
|
...event1,
|
||||||
|
id: 'test2',
|
||||||
|
};
|
||||||
|
|
||||||
|
const beforeTest = async (extraEnv: Record<string, string> = null) => {
|
||||||
|
await beforeAllDb('db.replication', null, extraEnv);
|
||||||
|
await beforeEachDb();
|
||||||
|
};
|
||||||
|
|
||||||
|
const afterTest = async () => {
|
||||||
|
await afterAllTests();
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('db.replication', () => {
|
||||||
|
|
||||||
|
it('should reconnect a database', async () => {
|
||||||
|
if (getDatabaseClientType() === DatabaseConfigClient.PostgreSQL) return;
|
||||||
|
|
||||||
|
await beforeTest();
|
||||||
|
|
||||||
|
await disconnectDb(db());
|
||||||
|
await expectThrow(async () => db().insert(event1).into('events'));
|
||||||
|
|
||||||
|
await reconnectDb(db());
|
||||||
|
await db().insert(event1).into('events');
|
||||||
|
|
||||||
|
{
|
||||||
|
const results = await db().select('*').from('events');
|
||||||
|
expect(results.length).toBe(1);
|
||||||
|
expect(results[0].id).toBe('test1');
|
||||||
|
}
|
||||||
|
|
||||||
|
await reconnectDb(db());
|
||||||
|
await db().insert(event2).into('events');
|
||||||
|
|
||||||
|
{
|
||||||
|
const results = await db().select('*').from('events');
|
||||||
|
expect(results.length).toBe(2);
|
||||||
|
expect([results[0].id, results[1].id].sort()).toEqual(['test1', 'test2']);
|
||||||
|
}
|
||||||
|
|
||||||
|
await afterTest();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should manually sync an SQLite slave instance', async () => {
|
||||||
|
if (getDatabaseClientType() === DatabaseConfigClient.PostgreSQL) return;
|
||||||
|
|
||||||
|
const masterConfig: DatabaseConfig = {
|
||||||
|
client: DatabaseConfigClient.SQLite,
|
||||||
|
name: `${packageRootDir}/db-master-test.sqlite`,
|
||||||
|
};
|
||||||
|
|
||||||
|
const slaveConfig: DatabaseConfig = {
|
||||||
|
client: DatabaseConfigClient.SQLite,
|
||||||
|
name: `${packageRootDir}/db-slave-test.sqlite`,
|
||||||
|
};
|
||||||
|
|
||||||
|
await createDb(masterConfig, { dropIfExists: true });
|
||||||
|
await createDb(slaveConfig, { dropIfExists: true });
|
||||||
|
|
||||||
|
const master = await connectDb(masterConfig);
|
||||||
|
const slave = await connectDb(slaveConfig);
|
||||||
|
|
||||||
|
await master.insert(event1).into('events');
|
||||||
|
|
||||||
|
expect((await master.select('*').from('events')).length).toBe(1);
|
||||||
|
expect((await slave.select('*').from('events')).length).toBe(0);
|
||||||
|
|
||||||
|
await sqliteSyncSlave(master, slave);
|
||||||
|
|
||||||
|
expect((await master.select('*').from('events')).length).toBe(1);
|
||||||
|
expect((await slave.select('*').from('events')).length).toBe(1);
|
||||||
|
|
||||||
|
await disconnectDb(master);
|
||||||
|
await disconnectDb(slave);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should track changes - using replication', async () => {
|
||||||
|
if (getDatabaseClientType() === DatabaseConfigClient.PostgreSQL) return;
|
||||||
|
|
||||||
|
await beforeTest({ DB_USE_SLAVE: '1' });
|
||||||
|
|
||||||
|
const { session, user } = await createUserAndSession(1, true);
|
||||||
|
const changeModel = models().change();
|
||||||
|
changeModel.usersWithReplication_ = [user.id];
|
||||||
|
|
||||||
|
const folder = {
|
||||||
|
id: '000000000000000000000000000000F1',
|
||||||
|
title: 'title 1',
|
||||||
|
};
|
||||||
|
|
||||||
|
const folderItem = await createFolder(session.id, folder);
|
||||||
|
await msleep(1);
|
||||||
|
|
||||||
|
let result = await changeModel.delta(user.id);
|
||||||
|
|
||||||
|
// We get nothing because the slave has not been synced yet
|
||||||
|
expect(result.items.length).toBe(0);
|
||||||
|
|
||||||
|
// But we still get the item because it doesn't use the slave database
|
||||||
|
expect((await models().item().loadAsJoplinItem(folderItem.id)).title).toBe('title 1');
|
||||||
|
|
||||||
|
// After sync, we should get the change
|
||||||
|
await sqliteSyncSlave(db(), dbSlave());
|
||||||
|
result = await changeModel.delta(user.id);
|
||||||
|
expect(result.items.length).toBe(1);
|
||||||
|
expect(result.items[0].type).toBe(ChangeType.Create);
|
||||||
|
|
||||||
|
await updateFolder(session.id, { ...folder, title: 'title 2' });
|
||||||
|
result = await changeModel.delta(user.id, { cursor: result.cursor });
|
||||||
|
|
||||||
|
// Nothing because it hasn't been synced yet
|
||||||
|
expect(result.items.length).toBe(0);
|
||||||
|
|
||||||
|
// But we get the latest item if requesting it directly
|
||||||
|
expect((await models().item().loadAsJoplinItem(folderItem.id)).title).toBe('title 2');
|
||||||
|
|
||||||
|
// After sync, we should get the change
|
||||||
|
await sqliteSyncSlave(db(), dbSlave());
|
||||||
|
result = await changeModel.delta(user.id, { cursor: result.cursor });
|
||||||
|
|
||||||
|
expect(result.items.length).toBe(1);
|
||||||
|
expect(result.items[0].type).toBe(ChangeType.Update);
|
||||||
|
|
||||||
|
await afterTest();
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
@ -1,17 +1,25 @@
|
|||||||
import { afterAllTests, beforeAllDb, beforeEachDb, db } from './utils/testing/testUtils';
|
import { afterAllTests, beforeAllDb, beforeEachDb, db, expectThrow, packageRootDir } from './utils/testing/testUtils';
|
||||||
import sqlts from '@rmp135/sql-ts';
|
import { connectDb, disconnectDb, reconnectDb, sqliteSyncSlave } from './db';
|
||||||
import { DbConnection, migrateDown, migrateLatest, migrateUp, needsMigration, nextMigration } from './db';
|
import { Event } from './services/database/types';
|
||||||
|
import { DatabaseConfig, DatabaseConfigClient } from './utils/types';
|
||||||
|
import { createDb } from './tools/dbTools';
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Old code before rule was applied
|
const event1: Event = {
|
||||||
async function dbSchemaSnapshot(db: DbConnection): Promise<any> {
|
id: 'test1',
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Old code before rule was applied
|
type: 1,
|
||||||
return sqlts.toTypeScript({}, db as any);
|
name: 'test',
|
||||||
}
|
created_time: Date.now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const event2 = {
|
||||||
|
...event1,
|
||||||
|
id: 'test2',
|
||||||
|
};
|
||||||
|
|
||||||
describe('db', () => {
|
describe('db', () => {
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
await beforeAllDb('db', { autoMigrate: false });
|
await beforeAllDb('db');
|
||||||
await beforeEachDb();
|
await beforeEachDb();
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -19,63 +27,58 @@ describe('db', () => {
|
|||||||
await afterAllTests();
|
await afterAllTests();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow upgrading and downgrading schema', async () => {
|
it('should reconnect a database', async () => {
|
||||||
// Migrations before that didn't have a down() step.
|
await disconnectDb(db());
|
||||||
const ignoreAllBefore = '20210819165350_user_flags';
|
await expectThrow(async () => db().insert(event1).into('events'));
|
||||||
|
|
||||||
// Some migrations produce no changes visible to sql-ts, in particular
|
await reconnectDb(db());
|
||||||
// when the migration only adds a constraint or an index, or when a
|
await db().insert(event1).into('events');
|
||||||
// default is changed. In this case we skip the migration. Ideally we
|
|
||||||
// should test these too but for now that will do.
|
|
||||||
const doNoCheckUpgrade = [
|
|
||||||
'20211030103016_item_owner_name_unique',
|
|
||||||
'20211111134329_storage_index',
|
|
||||||
'20220121172409_email_recipient_default',
|
|
||||||
'20240413141308_changes_optimization',
|
|
||||||
];
|
|
||||||
|
|
||||||
let startProcessing = false;
|
{
|
||||||
|
const results = await db().select('*').from('events');
|
||||||
|
expect(results.length).toBe(1);
|
||||||
|
expect(results[0].id).toBe('test1');
|
||||||
|
}
|
||||||
|
|
||||||
while (true) {
|
await reconnectDb(db());
|
||||||
await migrateUp(db());
|
await db().insert(event2).into('events');
|
||||||
|
|
||||||
if (!startProcessing) {
|
{
|
||||||
const next = await nextMigration(db());
|
const results = await db().select('*').from('events');
|
||||||
if (next === ignoreAllBefore) {
|
expect(results.length).toBe(2);
|
||||||
startProcessing = true;
|
expect([results[0].id, results[1].id].sort()).toEqual(['test1', 'test2']);
|
||||||
} else {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const next = await nextMigration(db());
|
|
||||||
|
|
||||||
if (!next) break;
|
|
||||||
|
|
||||||
const initialSchema = await dbSchemaSnapshot(db());
|
|
||||||
|
|
||||||
await migrateUp(db());
|
|
||||||
|
|
||||||
const afterUpgradeSchema = await dbSchemaSnapshot(db());
|
|
||||||
|
|
||||||
if (!doNoCheckUpgrade.includes(next)) {
|
|
||||||
expect(initialSchema, `Schema upgrade did not produce a new schema. In migration: ${next}`).not.toEqual(afterUpgradeSchema);
|
|
||||||
}
|
|
||||||
|
|
||||||
await migrateDown(db());
|
|
||||||
|
|
||||||
const afterRollbackSchema = await dbSchemaSnapshot(db());
|
|
||||||
|
|
||||||
expect(initialSchema, `Schema rollback did not produce previous schema. In migration: ${next}`).toEqual(afterRollbackSchema);
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should tell if a migration is required', async () => {
|
it('should manually sync an SQLite slave instance', async () => {
|
||||||
expect(await needsMigration(db())).toBe(true);
|
const masterConfig: DatabaseConfig = {
|
||||||
|
client: DatabaseConfigClient.SQLite,
|
||||||
|
name: `${packageRootDir}/db-master-test.sqlite`,
|
||||||
|
};
|
||||||
|
|
||||||
await migrateLatest(db());
|
const slaveConfig: DatabaseConfig = {
|
||||||
|
client: DatabaseConfigClient.SQLite,
|
||||||
|
name: `${packageRootDir}/db-slave-test.sqlite`,
|
||||||
|
};
|
||||||
|
|
||||||
expect(await needsMigration(db())).toBe(false);
|
await createDb(masterConfig, { dropIfExists: true });
|
||||||
|
await createDb(slaveConfig, { dropIfExists: true });
|
||||||
|
|
||||||
|
const master = await connectDb(masterConfig);
|
||||||
|
const slave = await connectDb(slaveConfig);
|
||||||
|
|
||||||
|
await master.insert(event1).into('events');
|
||||||
|
|
||||||
|
expect((await master.select('*').from('events')).length).toBe(1);
|
||||||
|
expect((await slave.select('*').from('events')).length).toBe(0);
|
||||||
|
|
||||||
|
await sqliteSyncSlave(master, slave);
|
||||||
|
|
||||||
|
expect((await master.select('*').from('events')).length).toBe(1);
|
||||||
|
expect((await slave.select('*').from('events')).length).toBe(1);
|
||||||
|
|
||||||
|
await disconnectDb(master);
|
||||||
|
await disconnectDb(slave);
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
@ -5,6 +5,7 @@ import time from '@joplin/lib/time';
|
|||||||
import Logger from '@joplin/utils/Logger';
|
import Logger from '@joplin/utils/Logger';
|
||||||
import { databaseSchema } from './services/database/types';
|
import { databaseSchema } from './services/database/types';
|
||||||
import { compareVersions } from 'compare-versions';
|
import { compareVersions } from 'compare-versions';
|
||||||
|
import { copyFile } from 'fs-extra';
|
||||||
|
|
||||||
// Make sure bigInteger values are numbers and not strings
|
// Make sure bigInteger values are numbers and not strings
|
||||||
//
|
//
|
||||||
@ -101,14 +102,14 @@ export function makeKnexConfig(dbConfig: DatabaseConfig): KnexDatabaseConfig {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function waitForConnection(dbConfig: DatabaseConfig): Promise<ConnectionCheckResult> {
|
export async function waitForConnection(masterConfig: DatabaseConfig): Promise<ConnectionCheckResult> {
|
||||||
const timeout = 30000;
|
const timeout = 30000;
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
let lastError = { message: '' };
|
let lastError = { message: '' };
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
try {
|
try {
|
||||||
const connection = await connectDb(dbConfig);
|
const connection = await connectDb(masterConfig);
|
||||||
const check = await connectionCheck(connection);
|
const check = await connectionCheck(connection);
|
||||||
if (check.error) throw check.error;
|
if (check.error) throw check.error;
|
||||||
return check;
|
return check;
|
||||||
@ -227,6 +228,8 @@ interface KnexQueryErrorData {
|
|||||||
queryContext: QueryContext;
|
queryContext: QueryContext;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const dbConnectionConfigs_: Map<DbConnection, DatabaseConfig> = new Map();
|
||||||
|
|
||||||
export async function connectDb(dbConfig: DatabaseConfig): Promise<DbConnection> {
|
export async function connectDb(dbConfig: DatabaseConfig): Promise<DbConnection> {
|
||||||
const connection = knex(makeKnexConfig(dbConfig));
|
const connection = knex(makeKnexConfig(dbConfig));
|
||||||
|
|
||||||
@ -255,13 +258,35 @@ export async function connectDb(dbConfig: DatabaseConfig): Promise<DbConnection>
|
|||||||
logger.error(...msg);
|
logger.error(...msg);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
dbConnectionConfigs_.set(connection, dbConfig);
|
||||||
|
|
||||||
return connection;
|
return connection;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const reconnectDb = async (db: DbConnection) => {
|
||||||
|
const dbConfig = dbConnectionConfigs_.get(db);
|
||||||
|
|
||||||
|
await disconnectDb(db);
|
||||||
|
|
||||||
|
await db.initialize(makeKnexConfig(dbConfig));
|
||||||
|
};
|
||||||
|
|
||||||
export async function disconnectDb(db: DbConnection) {
|
export async function disconnectDb(db: DbConnection) {
|
||||||
await db.destroy();
|
await db.destroy();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is used in tests to simulate replication in a controlled way. It allows testing how the
|
||||||
|
// server behaves when part of the data is stale.
|
||||||
|
export const sqliteSyncSlave = async (master: DbConnection, slave: DbConnection) => {
|
||||||
|
const masterConfig = dbConnectionConfigs_.get(master);
|
||||||
|
const slaveConfig = dbConnectionConfigs_.get(slave);
|
||||||
|
await disconnectDb(master);
|
||||||
|
await disconnectDb(slave);
|
||||||
|
await copyFile(masterConfig.name, slaveConfig.name);
|
||||||
|
await reconnectDb(master);
|
||||||
|
await reconnectDb(slave);
|
||||||
|
};
|
||||||
|
|
||||||
export async function migrateLatest(db: DbConnection, disableTransactions = false) {
|
export async function migrateLatest(db: DbConnection, disableTransactions = false) {
|
||||||
await db.migrate.latest({
|
await db.migrate.latest({
|
||||||
directory: migrationDir,
|
directory: migrationDir,
|
||||||
|
@ -22,6 +22,7 @@ const defaultEnvValues: EnvVariables = {
|
|||||||
ERROR_STACK_TRACES: false,
|
ERROR_STACK_TRACES: false,
|
||||||
COOKIES_SECURE: false,
|
COOKIES_SECURE: false,
|
||||||
RUNNING_IN_DOCKER: false,
|
RUNNING_IN_DOCKER: false,
|
||||||
|
USERS_WITH_REPLICATION: '', // Temporary
|
||||||
|
|
||||||
// The admin panel is accessible only if this is an admin instance.
|
// The admin panel is accessible only if this is an admin instance.
|
||||||
// Additionally, processing services (those defined in setupTaskService.ts)
|
// Additionally, processing services (those defined in setupTaskService.ts)
|
||||||
@ -59,6 +60,7 @@ const defaultEnvValues: EnvVariables = {
|
|||||||
DB_SLOW_QUERY_LOG_MIN_DURATION: 1000,
|
DB_SLOW_QUERY_LOG_MIN_DURATION: 1000,
|
||||||
DB_AUTO_MIGRATION: true,
|
DB_AUTO_MIGRATION: true,
|
||||||
DB_ALLOW_INCOMPLETE_MIGRATIONS: false,
|
DB_ALLOW_INCOMPLETE_MIGRATIONS: false,
|
||||||
|
DB_USE_SLAVE: false,
|
||||||
|
|
||||||
POSTGRES_PASSWORD: 'joplin',
|
POSTGRES_PASSWORD: 'joplin',
|
||||||
POSTGRES_DATABASE: 'joplin',
|
POSTGRES_DATABASE: 'joplin',
|
||||||
@ -67,8 +69,16 @@ const defaultEnvValues: EnvVariables = {
|
|||||||
POSTGRES_PORT: 5432,
|
POSTGRES_PORT: 5432,
|
||||||
POSTGRES_CONNECTION_STRING: '',
|
POSTGRES_CONNECTION_STRING: '',
|
||||||
|
|
||||||
|
SLAVE_POSTGRES_PASSWORD: 'joplin',
|
||||||
|
SLAVE_POSTGRES_DATABASE: 'joplin',
|
||||||
|
SLAVE_POSTGRES_USER: 'joplin',
|
||||||
|
SLAVE_POSTGRES_HOST: '',
|
||||||
|
SLAVE_POSTGRES_PORT: 5432,
|
||||||
|
SLAVE_POSTGRES_CONNECTION_STRING: '',
|
||||||
|
|
||||||
// This must be the full path to the database file
|
// This must be the full path to the database file
|
||||||
SQLITE_DATABASE: '',
|
SQLITE_DATABASE: '',
|
||||||
|
SLAVE_SQLITE_DATABASE: '',
|
||||||
|
|
||||||
// ==================================================
|
// ==================================================
|
||||||
// Content driver config
|
// Content driver config
|
||||||
@ -141,6 +151,8 @@ export interface EnvVariables {
|
|||||||
ERROR_STACK_TRACES: boolean;
|
ERROR_STACK_TRACES: boolean;
|
||||||
COOKIES_SECURE: boolean;
|
COOKIES_SECURE: boolean;
|
||||||
RUNNING_IN_DOCKER: boolean;
|
RUNNING_IN_DOCKER: boolean;
|
||||||
|
USERS_WITH_REPLICATION: string;
|
||||||
|
|
||||||
MAX_TIME_DRIFT: number;
|
MAX_TIME_DRIFT: number;
|
||||||
NTP_SERVER: string;
|
NTP_SERVER: string;
|
||||||
DELTA_INCLUDES_ITEMS: boolean;
|
DELTA_INCLUDES_ITEMS: boolean;
|
||||||
@ -157,6 +169,7 @@ export interface EnvVariables {
|
|||||||
DB_SLOW_QUERY_LOG_MIN_DURATION: number;
|
DB_SLOW_QUERY_LOG_MIN_DURATION: number;
|
||||||
DB_AUTO_MIGRATION: boolean;
|
DB_AUTO_MIGRATION: boolean;
|
||||||
DB_ALLOW_INCOMPLETE_MIGRATIONS: boolean;
|
DB_ALLOW_INCOMPLETE_MIGRATIONS: boolean;
|
||||||
|
DB_USE_SLAVE: boolean;
|
||||||
|
|
||||||
POSTGRES_PASSWORD: string;
|
POSTGRES_PASSWORD: string;
|
||||||
POSTGRES_DATABASE: string;
|
POSTGRES_DATABASE: string;
|
||||||
@ -165,7 +178,15 @@ export interface EnvVariables {
|
|||||||
POSTGRES_PORT: number;
|
POSTGRES_PORT: number;
|
||||||
POSTGRES_CONNECTION_STRING: string;
|
POSTGRES_CONNECTION_STRING: string;
|
||||||
|
|
||||||
|
SLAVE_POSTGRES_PASSWORD: string;
|
||||||
|
SLAVE_POSTGRES_DATABASE: string;
|
||||||
|
SLAVE_POSTGRES_USER: string;
|
||||||
|
SLAVE_POSTGRES_HOST: string;
|
||||||
|
SLAVE_POSTGRES_PORT: number;
|
||||||
|
SLAVE_POSTGRES_CONNECTION_STRING: string;
|
||||||
|
|
||||||
SQLITE_DATABASE: string;
|
SQLITE_DATABASE: string;
|
||||||
|
SLAVE_SQLITE_DATABASE: string;
|
||||||
|
|
||||||
STORAGE_DRIVER: string;
|
STORAGE_DRIVER: string;
|
||||||
STORAGE_DRIVER_FALLBACK: string;
|
STORAGE_DRIVER_FALLBACK: string;
|
||||||
|
@ -64,15 +64,19 @@ export default abstract class BaseModel<T> {
|
|||||||
|
|
||||||
private defaultFields_: string[] = [];
|
private defaultFields_: string[] = [];
|
||||||
private db_: DbConnection;
|
private db_: DbConnection;
|
||||||
|
private dbSlave_: DbConnection;
|
||||||
private transactionHandler_: TransactionHandler;
|
private transactionHandler_: TransactionHandler;
|
||||||
private modelFactory_: NewModelFactoryHandler;
|
private modelFactory_: NewModelFactoryHandler;
|
||||||
private config_: Config;
|
private config_: Config;
|
||||||
private savePoints_: SavePoint[] = [];
|
private savePoints_: SavePoint[] = [];
|
||||||
|
public usersWithReplication_: string[] = [];
|
||||||
|
|
||||||
public constructor(db: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
public constructor(db: DbConnection, dbSlave: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
||||||
this.db_ = db;
|
this.db_ = db;
|
||||||
|
this.dbSlave_ = dbSlave;
|
||||||
this.modelFactory_ = modelFactory;
|
this.modelFactory_ = modelFactory;
|
||||||
this.config_ = config;
|
this.config_ = config;
|
||||||
|
this.usersWithReplication_ = config.USERS_WITH_REPLICATION ? config.USERS_WITH_REPLICATION.split(',') : [];
|
||||||
|
|
||||||
this.transactionHandler_ = new TransactionHandler(db);
|
this.transactionHandler_ = new TransactionHandler(db);
|
||||||
}
|
}
|
||||||
@ -113,6 +117,15 @@ export default abstract class BaseModel<T> {
|
|||||||
return this.db_;
|
return this.db_;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public dbSlave(userId: Uuid = ''): DbConnection {
|
||||||
|
if (userId && this.usersWithReplication_.includes(userId)) {
|
||||||
|
logger.info(`Using slave database for user: ${userId}`);
|
||||||
|
return this.dbSlave_;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.db_;
|
||||||
|
}
|
||||||
|
|
||||||
protected get defaultFields(): string[] {
|
protected get defaultFields(): string[] {
|
||||||
if (!this.defaultFields_.length) {
|
if (!this.defaultFields_.length) {
|
||||||
this.defaultFields_ = Object.keys(databaseSchema[this.tableName]);
|
this.defaultFields_ = Object.keys(databaseSchema[this.tableName]);
|
||||||
|
@ -57,8 +57,8 @@ export default class ChangeModel extends BaseModel<Change> {
|
|||||||
|
|
||||||
public deltaIncludesItems_: boolean;
|
public deltaIncludesItems_: boolean;
|
||||||
|
|
||||||
public constructor(db: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
public constructor(db: DbConnection, dbSlave: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
||||||
super(db, modelFactory, config);
|
super(db, dbSlave, modelFactory, config);
|
||||||
this.deltaIncludesItems_ = config.DELTA_INCLUDES_ITEMS;
|
this.deltaIncludesItems_ = config.DELTA_INCLUDES_ITEMS;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -199,8 +199,8 @@ export default class ChangeModel extends BaseModel<Change> {
|
|||||||
if (!doCountQuery) {
|
if (!doCountQuery) {
|
||||||
finalParams.push(limit);
|
finalParams.push(limit);
|
||||||
|
|
||||||
if (isPostgres(this.db)) {
|
if (isPostgres(this.dbSlave(userId))) {
|
||||||
query = this.db.raw(`
|
query = this.dbSlave(userId).raw(`
|
||||||
WITH cte1 AS MATERIALIZED (
|
WITH cte1 AS MATERIALIZED (
|
||||||
${subQuery1}
|
${subQuery1}
|
||||||
)
|
)
|
||||||
@ -214,7 +214,7 @@ export default class ChangeModel extends BaseModel<Change> {
|
|||||||
LIMIT ?
|
LIMIT ?
|
||||||
`, finalParams);
|
`, finalParams);
|
||||||
} else {
|
} else {
|
||||||
query = this.db.raw(`
|
query = this.dbSlave(userId).raw(`
|
||||||
SELECT ${fieldsSql} FROM (${subQuery1}) as sub1
|
SELECT ${fieldsSql} FROM (${subQuery1}) as sub1
|
||||||
UNION ALL
|
UNION ALL
|
||||||
SELECT ${fieldsSql} FROM (${subQuery2}) as sub2
|
SELECT ${fieldsSql} FROM (${subQuery2}) as sub2
|
||||||
@ -223,7 +223,7 @@ export default class ChangeModel extends BaseModel<Change> {
|
|||||||
`, finalParams);
|
`, finalParams);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
query = this.db.raw(`
|
query = this.dbSlave(userId).raw(`
|
||||||
SELECT count(*) as total
|
SELECT count(*) as total
|
||||||
FROM (
|
FROM (
|
||||||
(${subQuery1})
|
(${subQuery1})
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { createUserAndSession, beforeAllDb, afterAllTests, beforeEachDb, models, createItemTree, createResource, createNote, createItemTree3, db, tempDir, expectNotThrow, expectHttpError } from '../utils/testing/testUtils';
|
import { createUserAndSession, beforeAllDb, afterAllTests, beforeEachDb, models, createItemTree, createResource, createNote, createItemTree3, db, tempDir, expectNotThrow, expectHttpError, dbSlave } from '../utils/testing/testUtils';
|
||||||
import { shareFolderWithUser } from '../utils/testing/shareApiUtils';
|
import { shareFolderWithUser } from '../utils/testing/shareApiUtils';
|
||||||
import { resourceBlobPath } from '../utils/joplinUtils';
|
import { resourceBlobPath } from '../utils/joplinUtils';
|
||||||
import newModelFactory from './factory';
|
import newModelFactory from './factory';
|
||||||
@ -275,7 +275,7 @@ describe('ItemModel', () => {
|
|||||||
test('should respect the hard item size limit', async () => {
|
test('should respect the hard item size limit', async () => {
|
||||||
const { user: user1 } = await createUserAndSession(1);
|
const { user: user1 } = await createUserAndSession(1);
|
||||||
|
|
||||||
let models = newModelFactory(db(), config());
|
let models = newModelFactory(db(), dbSlave(), config());
|
||||||
|
|
||||||
let result = await models.item().saveFromRawContent(user1, {
|
let result = await models.item().saveFromRawContent(user1, {
|
||||||
body: Buffer.from('1234'),
|
body: Buffer.from('1234'),
|
||||||
@ -285,7 +285,7 @@ describe('ItemModel', () => {
|
|||||||
const item = result['test1.txt'].item;
|
const item = result['test1.txt'].item;
|
||||||
|
|
||||||
config().itemSizeHardLimit = 3;
|
config().itemSizeHardLimit = 3;
|
||||||
models = newModelFactory(db(), config());
|
models = newModelFactory(db(), dbSlave(), config());
|
||||||
|
|
||||||
result = await models.item().saveFromRawContent(user1, {
|
result = await models.item().saveFromRawContent(user1, {
|
||||||
body: Buffer.from('1234'),
|
body: Buffer.from('1234'),
|
||||||
@ -297,7 +297,7 @@ describe('ItemModel', () => {
|
|||||||
await expectHttpError(async () => models.item().loadWithContent(item.id), ErrorPayloadTooLarge.httpCode);
|
await expectHttpError(async () => models.item().loadWithContent(item.id), ErrorPayloadTooLarge.httpCode);
|
||||||
|
|
||||||
config().itemSizeHardLimit = 1000;
|
config().itemSizeHardLimit = 1000;
|
||||||
models = newModelFactory(db(), config());
|
models = newModelFactory(db(), dbSlave(), config());
|
||||||
|
|
||||||
await expectNotThrow(async () => models.item().loadWithContent(item.id));
|
await expectNotThrow(async () => models.item().loadWithContent(item.id));
|
||||||
});
|
});
|
||||||
@ -316,18 +316,18 @@ describe('ItemModel', () => {
|
|||||||
path: tempDir2,
|
path: tempDir2,
|
||||||
};
|
};
|
||||||
|
|
||||||
const fromModels = newModelFactory(db(), {
|
const fromModels = newModelFactory(db(), dbSlave(), {
|
||||||
...config(),
|
...config(),
|
||||||
storageDriver: fromStorageConfig,
|
storageDriver: fromStorageConfig,
|
||||||
});
|
});
|
||||||
|
|
||||||
const toModels = newModelFactory(db(), {
|
const toModels = newModelFactory(db(), dbSlave(), {
|
||||||
...config(),
|
...config(),
|
||||||
storageDriver: toStorageConfig,
|
storageDriver: toStorageConfig,
|
||||||
});
|
});
|
||||||
|
|
||||||
const fromDriver = await loadStorageDriver(fromStorageConfig, db());
|
const fromDriver = await loadStorageDriver(fromStorageConfig, db(), dbSlave());
|
||||||
const toDriver = await loadStorageDriver(toStorageConfig, db());
|
const toDriver = await loadStorageDriver(toStorageConfig, db(), dbSlave());
|
||||||
|
|
||||||
return {
|
return {
|
||||||
fromStorageConfig,
|
fromStorageConfig,
|
||||||
@ -364,7 +364,7 @@ describe('ItemModel', () => {
|
|||||||
|
|
||||||
await msleep(2);
|
await msleep(2);
|
||||||
|
|
||||||
const toModels = newModelFactory(db(), {
|
const toModels = newModelFactory(db(), dbSlave(), {
|
||||||
...config(),
|
...config(),
|
||||||
storageDriver: toStorageConfig,
|
storageDriver: toStorageConfig,
|
||||||
});
|
});
|
||||||
|
@ -75,8 +75,8 @@ export default class ItemModel extends BaseModel<Item> {
|
|||||||
|
|
||||||
private static storageDrivers_: Map<StorageDriverConfig, StorageDriverBase> = new Map();
|
private static storageDrivers_: Map<StorageDriverConfig, StorageDriverBase> = new Map();
|
||||||
|
|
||||||
public constructor(db: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
public constructor(db: DbConnection, dbSlave: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
||||||
super(db, modelFactory, config);
|
super(db, dbSlave, modelFactory, config);
|
||||||
|
|
||||||
this.storageDriverConfig_ = config.storageDriver;
|
this.storageDriverConfig_ = config.storageDriver;
|
||||||
this.storageDriverConfigFallback_ = config.storageDriverFallback;
|
this.storageDriverConfigFallback_ = config.storageDriverFallback;
|
||||||
@ -102,7 +102,7 @@ export default class ItemModel extends BaseModel<Item> {
|
|||||||
let driver = ItemModel.storageDrivers_.get(config);
|
let driver = ItemModel.storageDrivers_.get(config);
|
||||||
|
|
||||||
if (!driver) {
|
if (!driver) {
|
||||||
driver = await loadStorageDriver(config, this.db);
|
driver = await loadStorageDriver(config, this.db, this.dbSlave());
|
||||||
ItemModel.storageDrivers_.set(config, driver);
|
ItemModel.storageDrivers_.set(config, driver);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -331,7 +331,7 @@ export default class ItemModel extends BaseModel<Item> {
|
|||||||
let fromDriver: StorageDriverBase = drivers[item.content_storage_id];
|
let fromDriver: StorageDriverBase = drivers[item.content_storage_id];
|
||||||
|
|
||||||
if (!fromDriver) {
|
if (!fromDriver) {
|
||||||
fromDriver = await loadStorageDriver(item.content_storage_id, this.db);
|
fromDriver = await loadStorageDriver(item.content_storage_id, this.db, this.dbSlave());
|
||||||
drivers[item.content_storage_id] = fromDriver;
|
drivers[item.content_storage_id] = fromDriver;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -118,8 +118,8 @@ export default class UserModel extends BaseModel<User> {
|
|||||||
|
|
||||||
private ldapConfig_: LdapConfig[];
|
private ldapConfig_: LdapConfig[];
|
||||||
|
|
||||||
public constructor(db: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
public constructor(db: DbConnection, dbSlave: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
||||||
super(db, modelFactory, config);
|
super(db, dbSlave, modelFactory, config);
|
||||||
|
|
||||||
this.ldapConfig_ = config.ldap;
|
this.ldapConfig_ = config.ldap;
|
||||||
}
|
}
|
||||||
|
@ -83,105 +83,107 @@ export type NewModelFactoryHandler = (db: DbConnection)=> Models;
|
|||||||
export class Models {
|
export class Models {
|
||||||
|
|
||||||
private db_: DbConnection;
|
private db_: DbConnection;
|
||||||
|
private dbSlave_: DbConnection;
|
||||||
private config_: Config;
|
private config_: Config;
|
||||||
|
|
||||||
public constructor(db: DbConnection, config: Config) {
|
public constructor(db: DbConnection, dbSlave: DbConnection, config: Config) {
|
||||||
this.db_ = db;
|
this.db_ = db;
|
||||||
|
this.dbSlave_ = dbSlave;
|
||||||
this.config_ = config;
|
this.config_ = config;
|
||||||
|
|
||||||
this.newModelFactory = this.newModelFactory.bind(this);
|
this.newModelFactory = this.newModelFactory.bind(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
private newModelFactory(db: DbConnection) {
|
private newModelFactory(db: DbConnection) {
|
||||||
return new Models(db, this.config_);
|
return new Models(db, this.dbSlave_, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public item() {
|
public item() {
|
||||||
return new ItemModel(this.db_, this.newModelFactory, this.config_);
|
return new ItemModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public user() {
|
public user() {
|
||||||
return new UserModel(this.db_, this.newModelFactory, this.config_);
|
return new UserModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public email() {
|
public email() {
|
||||||
return new EmailModel(this.db_, this.newModelFactory, this.config_);
|
return new EmailModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public userItem() {
|
public userItem() {
|
||||||
return new UserItemModel(this.db_, this.newModelFactory, this.config_);
|
return new UserItemModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public token() {
|
public token() {
|
||||||
return new TokenModel(this.db_, this.newModelFactory, this.config_);
|
return new TokenModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public itemResource() {
|
public itemResource() {
|
||||||
return new ItemResourceModel(this.db_, this.newModelFactory, this.config_);
|
return new ItemResourceModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public apiClient() {
|
public apiClient() {
|
||||||
return new ApiClientModel(this.db_, this.newModelFactory, this.config_);
|
return new ApiClientModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public session() {
|
public session() {
|
||||||
return new SessionModel(this.db_, this.newModelFactory, this.config_);
|
return new SessionModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public change() {
|
public change() {
|
||||||
return new ChangeModel(this.db_, this.newModelFactory, this.config_);
|
return new ChangeModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public notification() {
|
public notification() {
|
||||||
return new NotificationModel(this.db_, this.newModelFactory, this.config_);
|
return new NotificationModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public share() {
|
public share() {
|
||||||
return new ShareModel(this.db_, this.newModelFactory, this.config_);
|
return new ShareModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public shareUser() {
|
public shareUser() {
|
||||||
return new ShareUserModel(this.db_, this.newModelFactory, this.config_);
|
return new ShareUserModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public keyValue() {
|
public keyValue() {
|
||||||
return new KeyValueModel(this.db_, this.newModelFactory, this.config_);
|
return new KeyValueModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public subscription() {
|
public subscription() {
|
||||||
return new SubscriptionModel(this.db_, this.newModelFactory, this.config_);
|
return new SubscriptionModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public userFlag() {
|
public userFlag() {
|
||||||
return new UserFlagModel(this.db_, this.newModelFactory, this.config_);
|
return new UserFlagModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public event() {
|
public event() {
|
||||||
return new EventModel(this.db_, this.newModelFactory, this.config_);
|
return new EventModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public lock() {
|
public lock() {
|
||||||
return new LockModel(this.db_, this.newModelFactory, this.config_);
|
return new LockModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public storage() {
|
public storage() {
|
||||||
return new StorageModel(this.db_, this.newModelFactory, this.config_);
|
return new StorageModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public userDeletion() {
|
public userDeletion() {
|
||||||
return new UserDeletionModel(this.db_, this.newModelFactory, this.config_);
|
return new UserDeletionModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public backupItem() {
|
public backupItem() {
|
||||||
return new BackupItemModel(this.db_, this.newModelFactory, this.config_);
|
return new BackupItemModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
public taskState() {
|
public taskState() {
|
||||||
return new TaskStateModel(this.db_, this.newModelFactory, this.config_);
|
return new TaskStateModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default function newModelFactory(db: DbConnection, config: Config): Models {
|
export default function newModelFactory(db: DbConnection, dbSlave: DbConnection, config: Config): Models {
|
||||||
return new Models(db, config);
|
return new Models(db, dbSlave, config);
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { afterAllTests, beforeAllDb, beforeEachDb, db, expectThrow, models } from '../../../utils/testing/testUtils';
|
import { afterAllTests, beforeAllDb, beforeEachDb, db, dbSlave, expectThrow, models } from '../../../utils/testing/testUtils';
|
||||||
import { StorageDriverType } from '../../../utils/types';
|
import { StorageDriverType } from '../../../utils/types';
|
||||||
import loadStorageDriver from './loadStorageDriver';
|
import loadStorageDriver from './loadStorageDriver';
|
||||||
|
|
||||||
@ -18,13 +18,13 @@ describe('loadStorageDriver', () => {
|
|||||||
|
|
||||||
test('should load a driver and assign an ID to it', async () => {
|
test('should load a driver and assign an ID to it', async () => {
|
||||||
{
|
{
|
||||||
const newDriver = await loadStorageDriver({ type: StorageDriverType.Memory }, db());
|
const newDriver = await loadStorageDriver({ type: StorageDriverType.Memory }, db(), dbSlave());
|
||||||
expect(newDriver.storageId).toBe(1);
|
expect(newDriver.storageId).toBe(1);
|
||||||
expect((await models().storage().count())).toBe(1);
|
expect((await models().storage().count())).toBe(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
const newDriver = await loadStorageDriver({ type: StorageDriverType.Filesystem, path: '/just/testing' }, db());
|
const newDriver = await loadStorageDriver({ type: StorageDriverType.Filesystem, path: '/just/testing' }, db(), dbSlave());
|
||||||
expect(newDriver.storageId).toBe(2);
|
expect(newDriver.storageId).toBe(2);
|
||||||
expect((await models().storage().count())).toBe(2);
|
expect((await models().storage().count())).toBe(2);
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,7 @@ export interface Options {
|
|||||||
assignDriverId?: boolean;
|
assignDriverId?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export default async function(config: StorageDriverConfig | number, db: DbConnection, options: Options = null): Promise<StorageDriverBase | null> {
|
export default async function(config: StorageDriverConfig | number, db: DbConnection, dbSlave: DbConnection, options: Options = null): Promise<StorageDriverBase | null> {
|
||||||
if (!config) return null;
|
if (!config) return null;
|
||||||
|
|
||||||
options = {
|
options = {
|
||||||
@ -27,14 +27,14 @@ export default async function(config: StorageDriverConfig | number, db: DbConnec
|
|||||||
if (typeof config === 'number') {
|
if (typeof config === 'number') {
|
||||||
storageId = config;
|
storageId = config;
|
||||||
|
|
||||||
const models = newModelFactory(db, globalConfig());
|
const models = newModelFactory(db, dbSlave, globalConfig());
|
||||||
const storage = await models.storage().byId(storageId);
|
const storage = await models.storage().byId(storageId);
|
||||||
if (!storage) throw new Error(`No such storage ID: ${storageId}`);
|
if (!storage) throw new Error(`No such storage ID: ${storageId}`);
|
||||||
|
|
||||||
config = parseStorageDriverConnectionString(storage.connection_string);
|
config = parseStorageDriverConnectionString(storage.connection_string);
|
||||||
} else {
|
} else {
|
||||||
if (options.assignDriverId) {
|
if (options.assignDriverId) {
|
||||||
const models = newModelFactory(db, globalConfig());
|
const models = newModelFactory(db, dbSlave, globalConfig());
|
||||||
|
|
||||||
const connectionString = serializeStorageConfig(config);
|
const connectionString = serializeStorageConfig(config);
|
||||||
let storage = await models.storage().byConnectionString(connectionString);
|
let storage = await models.storage().byConnectionString(connectionString);
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
import config from '../../../config';
|
import config from '../../../config';
|
||||||
import { Item } from '../../../services/database/types';
|
import { Item } from '../../../services/database/types';
|
||||||
import { CustomErrorCode } from '../../../utils/errors';
|
import { CustomErrorCode } from '../../../utils/errors';
|
||||||
import { createUserAndSession, db, makeNoteSerializedBody, models } from '../../../utils/testing/testUtils';
|
import { createUserAndSession, db, dbSlave, makeNoteSerializedBody, models } from '../../../utils/testing/testUtils';
|
||||||
import { Config, StorageDriverConfig, StorageDriverMode } from '../../../utils/types';
|
import { Config, StorageDriverConfig, StorageDriverMode } from '../../../utils/types';
|
||||||
import newModelFactory from '../../factory';
|
import newModelFactory from '../../factory';
|
||||||
import loadStorageDriver from './loadStorageDriver';
|
import loadStorageDriver from './loadStorageDriver';
|
||||||
@ -15,7 +15,7 @@ const newTestModels = (driverConfig: StorageDriverConfig, driverConfigFallback:
|
|||||||
storageDriver: driverConfig,
|
storageDriver: driverConfig,
|
||||||
storageDriverFallback: driverConfigFallback,
|
storageDriverFallback: driverConfigFallback,
|
||||||
};
|
};
|
||||||
return newModelFactory(db(), newConfig);
|
return newModelFactory(db(), dbSlave(), newConfig);
|
||||||
};
|
};
|
||||||
|
|
||||||
export function shouldWriteToContentAndReadItBack(driverConfig: StorageDriverConfig) {
|
export function shouldWriteToContentAndReadItBack(driverConfig: StorageDriverConfig) {
|
||||||
@ -281,7 +281,7 @@ export function shouldUpdateContentStorageIdAfterSwitchingDriver(oldDriverConfig
|
|||||||
|
|
||||||
export function shouldThrowNotFoundIfNotExist(driverConfig: StorageDriverConfig) {
|
export function shouldThrowNotFoundIfNotExist(driverConfig: StorageDriverConfig) {
|
||||||
test('should throw not found if item does not exist', async () => {
|
test('should throw not found if item does not exist', async () => {
|
||||||
const driver = await loadStorageDriver(driverConfig, db());
|
const driver = await loadStorageDriver(driverConfig, db(), dbSlave());
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Old code before rule was applied
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Old code before rule was applied
|
||||||
let error: any = null;
|
let error: any = null;
|
||||||
|
@ -13,6 +13,19 @@ export interface DropDbOptions {
|
|||||||
ignoreIfNotExists: boolean;
|
ignoreIfNotExists: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const getPostgresToolPath = async (name: string) => {
|
||||||
|
const candidates = [
|
||||||
|
'/usr/local/opt/postgresql@16/bin',
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const candidate of candidates) {
|
||||||
|
const p = `${candidate}/${name}`;
|
||||||
|
if (await fs.pathExists(p)) return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
return name;
|
||||||
|
};
|
||||||
|
|
||||||
export async function createDb(config: DatabaseConfig, options: CreateDbOptions = null) {
|
export async function createDb(config: DatabaseConfig, options: CreateDbOptions = null) {
|
||||||
options = {
|
options = {
|
||||||
dropIfExists: false,
|
dropIfExists: false,
|
||||||
@ -22,7 +35,7 @@ export async function createDb(config: DatabaseConfig, options: CreateDbOptions
|
|||||||
|
|
||||||
if (config.client === 'pg') {
|
if (config.client === 'pg') {
|
||||||
const cmd: string[] = [
|
const cmd: string[] = [
|
||||||
'createdb',
|
await getPostgresToolPath('createdb'),
|
||||||
'--host', config.host,
|
'--host', config.host,
|
||||||
'--port', config.port.toString(),
|
'--port', config.port.toString(),
|
||||||
'--username', config.user,
|
'--username', config.user,
|
||||||
@ -64,7 +77,7 @@ export async function dropDb(config: DatabaseConfig, options: DropDbOptions = nu
|
|||||||
|
|
||||||
if (config.client === 'pg') {
|
if (config.client === 'pg') {
|
||||||
const cmd: string[] = [
|
const cmd: string[] = [
|
||||||
'dropdb',
|
await getPostgresToolPath('dropdb'),
|
||||||
'--host', config.host,
|
'--host', config.host,
|
||||||
'--port', config.port.toString(),
|
'--port', config.port.toString(),
|
||||||
'--username', config.user,
|
'--username', config.user,
|
||||||
|
@ -54,7 +54,7 @@ export async function createTestUsers(db: DbConnection, config: Config, options:
|
|||||||
};
|
};
|
||||||
|
|
||||||
const password = '111111';
|
const password = '111111';
|
||||||
const models = newModelFactory(db, config);
|
const models = newModelFactory(db, db, config);
|
||||||
|
|
||||||
await truncateTables(db, includedTables);
|
await truncateTables(db, includedTables);
|
||||||
|
|
||||||
@ -127,7 +127,7 @@ export async function createTestUsers(db: DbConnection, config: Config, options:
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function createUserDeletions(db: DbConnection, config: Config) {
|
export async function createUserDeletions(db: DbConnection, config: Config) {
|
||||||
const models = newModelFactory(db, config);
|
const models = newModelFactory(db, db, config);
|
||||||
|
|
||||||
const users = await models.user().all();
|
const users = await models.user().all();
|
||||||
|
|
||||||
|
@ -28,8 +28,8 @@ async function setupServices(env: Env, models: Models, config: Config): Promise<
|
|||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
|
|
||||||
export default async function(appContext: AppContext, env: Env, dbConnection: DbConnection, appLogger: ()=> LoggerWrapper): Promise<AppContext> {
|
export default async function(appContext: AppContext, env: Env, dbConnection: DbConnection, slaveConnection: DbConnection, appLogger: ()=> LoggerWrapper): Promise<AppContext> {
|
||||||
const models = newModelFactory(dbConnection, config());
|
const models = newModelFactory(dbConnection, slaveConnection, config());
|
||||||
|
|
||||||
// The joplinBase object is immutable because it is shared by all requests.
|
// The joplinBase object is immutable because it is shared by all requests.
|
||||||
// Then a "joplin" context property is created from it per request, which
|
// Then a "joplin" context property is created from it per request, which
|
||||||
@ -39,6 +39,7 @@ export default async function(appContext: AppContext, env: Env, dbConnection: Db
|
|||||||
appContext.joplinBase = Object.freeze({
|
appContext.joplinBase = Object.freeze({
|
||||||
env: env,
|
env: env,
|
||||||
db: dbConnection,
|
db: dbConnection,
|
||||||
|
dbSlave: slaveConnection,
|
||||||
models: models,
|
models: models,
|
||||||
services: await setupServices(env, models, config()),
|
services: await setupServices(env, models, config()),
|
||||||
appLogger: appLogger,
|
appLogger: appLogger,
|
||||||
|
@ -6,12 +6,12 @@ import { Context } from '../models/items/storage/StorageDriverBase';
|
|||||||
import { StorageDriverConfig, StorageDriverType } from './types';
|
import { StorageDriverConfig, StorageDriverType } from './types';
|
||||||
import { uuidgen } from '@joplin/lib/uuid';
|
import { uuidgen } from '@joplin/lib/uuid';
|
||||||
|
|
||||||
export default async function(connection: string | StorageDriverConfig, db: DbConnection, models: Models): Promise<string> {
|
export default async function(connection: string | StorageDriverConfig, db: DbConnection, dbSlave: DbConnection, models: Models): Promise<string> {
|
||||||
const storageConfig = typeof connection === 'string' ? parseStorageConnectionString(connection) : connection;
|
const storageConfig = typeof connection === 'string' ? parseStorageConnectionString(connection) : connection;
|
||||||
|
|
||||||
if (storageConfig.type === StorageDriverType.Database) return 'Database storage is special and cannot be checked this way. If the connection to the database was successful then the storage driver should work too.';
|
if (storageConfig.type === StorageDriverType.Database) return 'Database storage is special and cannot be checked this way. If the connection to the database was successful then the storage driver should work too.';
|
||||||
|
|
||||||
const driver = await loadStorageDriver(storageConfig, db, { assignDriverId: false });
|
const driver = await loadStorageDriver(storageConfig, db, dbSlave, { assignDriverId: false });
|
||||||
const itemId = `testingconnection${uuidgen(8)}`;
|
const itemId = `testingconnection${uuidgen(8)}`;
|
||||||
const itemContent = Buffer.from(uuidgen(8));
|
const itemContent = Buffer.from(uuidgen(8));
|
||||||
const context: Context = { models };
|
const context: Context = { models };
|
||||||
|
@ -2,7 +2,7 @@ import { DbConnection, connectDb, disconnectDb, truncateTables } from '../../db'
|
|||||||
import { User, Session, Item, Uuid } from '../../services/database/types';
|
import { User, Session, Item, Uuid } from '../../services/database/types';
|
||||||
import { createDb, CreateDbOptions } from '../../tools/dbTools';
|
import { createDb, CreateDbOptions } from '../../tools/dbTools';
|
||||||
import modelFactory from '../../models/factory';
|
import modelFactory from '../../models/factory';
|
||||||
import { AppContext, Env } from '../types';
|
import { AppContext, DatabaseConfigClient, Env } from '../types';
|
||||||
import config, { initConfig } from '../../config';
|
import config, { initConfig } from '../../config';
|
||||||
import Logger from '@joplin/utils/Logger';
|
import Logger from '@joplin/utils/Logger';
|
||||||
import FakeCookies from './koa/FakeCookies';
|
import FakeCookies from './koa/FakeCookies';
|
||||||
@ -29,9 +29,10 @@ import initLib from '@joplin/lib/initLib';
|
|||||||
|
|
||||||
// Takes into account the fact that this file will be inside the /dist directory
|
// Takes into account the fact that this file will be inside the /dist directory
|
||||||
// when it runs.
|
// when it runs.
|
||||||
const packageRootDir = path.dirname(path.dirname(path.dirname(__dirname)));
|
export const packageRootDir = path.dirname(path.dirname(path.dirname(__dirname)));
|
||||||
|
|
||||||
let db_: DbConnection = null;
|
let db_: DbConnection = null;
|
||||||
|
let dbSlave_: DbConnection = null;
|
||||||
|
|
||||||
// require('source-map-support').install();
|
// require('source-map-support').install();
|
||||||
|
|
||||||
@ -68,13 +69,24 @@ function initGlobalLogger() {
|
|||||||
initLib(globalLogger);
|
initLib(globalLogger);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const getDatabaseClientType = () => {
|
||||||
|
if (process.env.JOPLIN_TESTS_SERVER_DB === 'pg') return DatabaseConfigClient.PostgreSQL;
|
||||||
|
return DatabaseConfigClient.SQLite;
|
||||||
|
};
|
||||||
|
|
||||||
let createdDbPath_: string = null;
|
let createdDbPath_: string = null;
|
||||||
export async function beforeAllDb(unitName: string, createDbOptions: CreateDbOptions = null) {
|
let createdDbSlavePath_: string = null;
|
||||||
|
export async function beforeAllDb(unitName: string, createDbOptions: CreateDbOptions = null, extraEnv: Record<string, string> = null) {
|
||||||
unitName = unitName.replace(/\//g, '_');
|
unitName = unitName.replace(/\//g, '_');
|
||||||
|
|
||||||
|
const useDbSlave = extraEnv && extraEnv.DB_USE_SLAVE === '1';
|
||||||
|
|
||||||
createdDbPath_ = `${packageRootDir}/db-test-${unitName}.sqlite`;
|
createdDbPath_ = `${packageRootDir}/db-test-${unitName}.sqlite`;
|
||||||
await fs.remove(createdDbPath_);
|
await fs.remove(createdDbPath_);
|
||||||
|
|
||||||
|
createdDbSlavePath_ = `${packageRootDir}/db-slave-test-${unitName}.sqlite`;
|
||||||
|
await fs.remove(createdDbSlavePath_);
|
||||||
|
|
||||||
const tempDir = `${packageRootDir}/temp/test-${unitName}`;
|
const tempDir = `${packageRootDir}/temp/test-${unitName}`;
|
||||||
await fs.mkdirp(tempDir);
|
await fs.mkdirp(tempDir);
|
||||||
|
|
||||||
@ -84,20 +96,29 @@ export async function beforeAllDb(unitName: string, createDbOptions: CreateDbOpt
|
|||||||
//
|
//
|
||||||
// JOPLIN_TESTS_SERVER_DB=pg yarn test
|
// JOPLIN_TESTS_SERVER_DB=pg yarn test
|
||||||
|
|
||||||
if (process.env.JOPLIN_TESTS_SERVER_DB === 'pg') {
|
if (getDatabaseClientType() === DatabaseConfigClient.PostgreSQL) {
|
||||||
await initConfig(Env.Dev, parseEnv({
|
await initConfig(Env.Dev, parseEnv({
|
||||||
DB_CLIENT: 'pg',
|
DB_CLIENT: 'pg',
|
||||||
|
|
||||||
POSTGRES_DATABASE: unitName,
|
POSTGRES_DATABASE: unitName,
|
||||||
POSTGRES_USER: 'joplin',
|
POSTGRES_USER: 'joplin',
|
||||||
POSTGRES_PASSWORD: 'joplin',
|
POSTGRES_PASSWORD: 'joplin',
|
||||||
|
|
||||||
|
SLAVE_POSTGRES_DATABASE: unitName,
|
||||||
|
SLAVE_POSTGRES_USER: 'joplin',
|
||||||
|
SLAVE_POSTGRES_PASSWORD: 'joplin',
|
||||||
|
|
||||||
SUPPORT_EMAIL: 'testing@localhost',
|
SUPPORT_EMAIL: 'testing@localhost',
|
||||||
|
...extraEnv,
|
||||||
}), {
|
}), {
|
||||||
tempDir: tempDir,
|
tempDir: tempDir,
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
await initConfig(Env.Dev, parseEnv({
|
await initConfig(Env.Dev, parseEnv({
|
||||||
SQLITE_DATABASE: createdDbPath_,
|
SQLITE_DATABASE: createdDbPath_,
|
||||||
|
SLAVE_SQLITE_DATABASE: createdDbSlavePath_,
|
||||||
SUPPORT_EMAIL: 'testing@localhost',
|
SUPPORT_EMAIL: 'testing@localhost',
|
||||||
|
...extraEnv,
|
||||||
}), {
|
}), {
|
||||||
tempDir: tempDir,
|
tempDir: tempDir,
|
||||||
});
|
});
|
||||||
@ -108,6 +129,13 @@ export async function beforeAllDb(unitName: string, createDbOptions: CreateDbOpt
|
|||||||
await createDb(config().database, { dropIfExists: true, ...createDbOptions });
|
await createDb(config().database, { dropIfExists: true, ...createDbOptions });
|
||||||
db_ = await connectDb(config().database);
|
db_ = await connectDb(config().database);
|
||||||
|
|
||||||
|
if (useDbSlave) {
|
||||||
|
await createDb(config().databaseSlave, { dropIfExists: true, ...createDbOptions });
|
||||||
|
dbSlave_ = await connectDb(config().databaseSlave);
|
||||||
|
} else {
|
||||||
|
dbSlave_ = db_;
|
||||||
|
}
|
||||||
|
|
||||||
const mustache = new MustacheService(config().viewDir, config().baseUrl);
|
const mustache = new MustacheService(config().viewDir, config().baseUrl);
|
||||||
await mustache.loadPartials();
|
await mustache.loadPartials();
|
||||||
|
|
||||||
@ -124,6 +152,11 @@ export async function afterAllTests() {
|
|||||||
db_ = null;
|
db_ = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (dbSlave_) {
|
||||||
|
await disconnectDb(dbSlave_);
|
||||||
|
dbSlave_ = null;
|
||||||
|
}
|
||||||
|
|
||||||
if (tempDir_) {
|
if (tempDir_) {
|
||||||
await fs.remove(tempDir_);
|
await fs.remove(tempDir_);
|
||||||
tempDir_ = null;
|
tempDir_ = null;
|
||||||
@ -208,7 +241,7 @@ export async function koaAppContext(options: AppContextTestOptions = null): Prom
|
|||||||
const appLogger = Logger.create('AppTest');
|
const appLogger = Logger.create('AppTest');
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Old code before rule was applied
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Old code before rule was applied
|
||||||
const baseAppContext = await setupAppContext({} as any, Env.Dev, db_, () => appLogger);
|
const baseAppContext = await setupAppContext({} as any, Env.Dev, db_, dbSlave_, () => appLogger);
|
||||||
|
|
||||||
// Set type to "any" because the Koa context has many properties and we
|
// Set type to "any" because the Koa context has many properties and we
|
||||||
// don't need to mock all of them.
|
// don't need to mock all of them.
|
||||||
@ -257,8 +290,16 @@ export function db() {
|
|||||||
return db_;
|
return db_;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function dbSlave() {
|
||||||
|
return dbSlave_;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function dbSlaveSync() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
export function models() {
|
export function models() {
|
||||||
return modelFactory(db(), config());
|
return modelFactory(db(), dbSlave(), config());
|
||||||
}
|
}
|
||||||
|
|
||||||
export function parseHtml(html: string): Document {
|
export function parseHtml(html: string): Document {
|
||||||
|
@ -165,6 +165,7 @@ export interface Config extends EnvVariables {
|
|||||||
accountTypesEnabled: boolean;
|
accountTypesEnabled: boolean;
|
||||||
showErrorStackTraces: boolean;
|
showErrorStackTraces: boolean;
|
||||||
database: DatabaseConfig;
|
database: DatabaseConfig;
|
||||||
|
databaseSlave: DatabaseConfig;
|
||||||
mailer: MailerConfig;
|
mailer: MailerConfig;
|
||||||
stripe: StripeConfig;
|
stripe: StripeConfig;
|
||||||
supportEmail: string;
|
supportEmail: string;
|
||||||
|
Loading…
Reference in New Issue
Block a user