mirror of
https://github.com/laurent22/joplin.git
synced 2024-12-24 10:27:10 +02:00
pg setup
This commit is contained in:
parent
3813448b1e
commit
087a4b28b1
@ -5,7 +5,8 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
db:
|
||||
|
||||
postgres_master:
|
||||
image: postgres:16
|
||||
command: postgres -c work_mem=100000
|
||||
ports:
|
||||
@ -14,6 +15,34 @@ services:
|
||||
- POSTGRES_PASSWORD=joplin
|
||||
- POSTGRES_USER=joplin
|
||||
- POSTGRES_DB=joplin
|
||||
|
||||
# - POSTGRESQL_PGAUDIT_LOG=READ,WRITE
|
||||
- POSTGRESQL_LOG_HOSTNAME=true
|
||||
- POSTGRESQL_REPLICATION_MODE=master
|
||||
- POSTGRESQL_REPLICATION_USER=repl_user
|
||||
- POSTGRESQL_REPLICATION_PASSWORD=repl_password
|
||||
|
||||
postgres_slave:
|
||||
image: postgres:16
|
||||
command: postgres -c work_mem=100000
|
||||
ports:
|
||||
- "5433:5432"
|
||||
depends_on:
|
||||
- postgres_master
|
||||
environment:
|
||||
- POSTGRES_PASSWORD=joplin
|
||||
- POSTGRES_USER=joplin
|
||||
- POSTGRES_DB=joplin
|
||||
|
||||
# - POSTGRESQL_PGAUDIT_LOG=READ,WRITE
|
||||
- POSTGRESQL_MASTER_HOST=postgresql-master
|
||||
- POSTGRESQL_LOG_HOSTNAME=true
|
||||
- POSTGRESQL_REPLICATION_MODE=slave
|
||||
- POSTGRESQL_REPLICATION_USER=repl_user
|
||||
- POSTGRESQL_REPLICATION_PASSWORD=repl_password
|
||||
- POSTGRESQL_MASTER_PORT_NUMBER=5432
|
||||
|
||||
|
||||
|
||||
# Use this to specify additional Postgres
|
||||
# config parameters:
|
||||
|
@ -20,8 +20,8 @@ export function runningInDocker(): boolean {
|
||||
return runningInDocker_;
|
||||
}
|
||||
|
||||
function databaseHostFromEnv(runningInDocker: boolean, env: EnvVariables, replica: boolean): string {
|
||||
const postgresHost = replica ? env.REPLICA_POSTGRES_HOST : env.POSTGRES_HOST;
|
||||
function databaseHostFromEnv(runningInDocker: boolean, env: EnvVariables, slave: boolean): string {
|
||||
const postgresHost = slave ? env.SLAVE_POSTGRES_HOST : env.POSTGRES_HOST;
|
||||
|
||||
if (postgresHost) {
|
||||
// When running within Docker, the app localhost is different from the
|
||||
@ -44,7 +44,7 @@ export const fullVersionString = (config: Config) => {
|
||||
return output.join(' ');
|
||||
};
|
||||
|
||||
function databaseConfigFromEnv(runningInDocker: boolean, env: EnvVariables, replica: boolean): DatabaseConfig {
|
||||
function databaseConfigFromEnv(runningInDocker: boolean, env: EnvVariables, slave: boolean): DatabaseConfig {
|
||||
const baseConfig: DatabaseConfig = {
|
||||
client: DatabaseConfigClient.Null,
|
||||
name: '',
|
||||
@ -61,16 +61,16 @@ function databaseConfigFromEnv(runningInDocker: boolean, env: EnvVariables, repl
|
||||
if (env.POSTGRES_CONNECTION_STRING) {
|
||||
return {
|
||||
...databaseConfig,
|
||||
connectionString: replica ? env.REPLICA_POSTGRES_CONNECTION_STRING : env.POSTGRES_CONNECTION_STRING,
|
||||
connectionString: slave ? env.SLAVE_POSTGRES_CONNECTION_STRING : env.POSTGRES_CONNECTION_STRING,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
...databaseConfig,
|
||||
name: replica ? env.REPLICA_POSTGRES_DATABASE : env.POSTGRES_DATABASE,
|
||||
user: replica ? env.REPLICA_POSTGRES_USER : env.POSTGRES_USER,
|
||||
password: replica ? env.REPLICA_POSTGRES_PASSWORD : env.POSTGRES_PASSWORD,
|
||||
port: replica ? env.REPLICA_POSTGRES_PORT : env.POSTGRES_PORT,
|
||||
host: databaseHostFromEnv(runningInDocker, env, replica) || 'localhost',
|
||||
name: slave ? env.SLAVE_POSTGRES_DATABASE : env.POSTGRES_DATABASE,
|
||||
user: slave ? env.SLAVE_POSTGRES_USER : env.POSTGRES_USER,
|
||||
password: slave ? env.SLAVE_POSTGRES_PASSWORD : env.POSTGRES_PASSWORD,
|
||||
port: slave ? env.SLAVE_POSTGRES_PORT : env.POSTGRES_PORT,
|
||||
host: databaseHostFromEnv(runningInDocker, env, slave) || 'localhost',
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -78,7 +78,7 @@ function databaseConfigFromEnv(runningInDocker: boolean, env: EnvVariables, repl
|
||||
return {
|
||||
...baseConfig,
|
||||
client: DatabaseConfigClient.SQLite,
|
||||
name: env.SQLITE_DATABASE,
|
||||
name: slave ? env.SLAVE_SQLITE_DATABASE : env.SQLITE_DATABASE,
|
||||
asyncStackTraces: true,
|
||||
};
|
||||
}
|
||||
@ -173,7 +173,7 @@ export async function initConfig(envType: Env, env: EnvVariables, overrides: any
|
||||
tempDir: `${rootDir}/temp`,
|
||||
logDir: `${rootDir}/logs`,
|
||||
database: dbConfig,
|
||||
databaseReplica: env.DB_USE_REPLICA ? databaseConfigFromEnv(runningInDocker_, env, true) : dbConfig,
|
||||
databaseSlave: env.DB_USE_SLAVE ? databaseConfigFromEnv(runningInDocker_, env, true) : dbConfig,
|
||||
mailer: mailerConfigFromEnv(env),
|
||||
stripe: stripeConfigFromEnv(stripePublicConfig, env),
|
||||
port: appPort,
|
||||
|
@ -59,7 +59,7 @@ const defaultEnvValues: EnvVariables = {
|
||||
DB_SLOW_QUERY_LOG_MIN_DURATION: 1000,
|
||||
DB_AUTO_MIGRATION: true,
|
||||
DB_ALLOW_INCOMPLETE_MIGRATIONS: false,
|
||||
DB_USE_REPLICA: false,
|
||||
DB_USE_SLAVE: false,
|
||||
|
||||
POSTGRES_PASSWORD: 'joplin',
|
||||
POSTGRES_DATABASE: 'joplin',
|
||||
@ -68,15 +68,16 @@ const defaultEnvValues: EnvVariables = {
|
||||
POSTGRES_PORT: 5432,
|
||||
POSTGRES_CONNECTION_STRING: '',
|
||||
|
||||
REPLICA_POSTGRES_PASSWORD: 'joplin',
|
||||
REPLICA_POSTGRES_DATABASE: 'joplin',
|
||||
REPLICA_POSTGRES_USER: 'joplin',
|
||||
REPLICA_POSTGRES_HOST: '',
|
||||
REPLICA_POSTGRES_PORT: 5432,
|
||||
REPLICA_POSTGRES_CONNECTION_STRING: '',
|
||||
SLAVE_POSTGRES_PASSWORD: 'joplin',
|
||||
SLAVE_POSTGRES_DATABASE: 'joplin',
|
||||
SLAVE_POSTGRES_USER: 'joplin',
|
||||
SLAVE_POSTGRES_HOST: '',
|
||||
SLAVE_POSTGRES_PORT: 5432,
|
||||
SLAVE_POSTGRES_CONNECTION_STRING: '',
|
||||
|
||||
// This must be the full path to the database file
|
||||
SQLITE_DATABASE: '',
|
||||
SLAVE_SQLITE_DATABASE: '',
|
||||
|
||||
// ==================================================
|
||||
// Content driver config
|
||||
@ -165,7 +166,7 @@ export interface EnvVariables {
|
||||
DB_SLOW_QUERY_LOG_MIN_DURATION: number;
|
||||
DB_AUTO_MIGRATION: boolean;
|
||||
DB_ALLOW_INCOMPLETE_MIGRATIONS: boolean;
|
||||
DB_USE_REPLICA: boolean;
|
||||
DB_USE_SLAVE: boolean;
|
||||
|
||||
POSTGRES_PASSWORD: string;
|
||||
POSTGRES_DATABASE: string;
|
||||
@ -174,14 +175,15 @@ export interface EnvVariables {
|
||||
POSTGRES_PORT: number;
|
||||
POSTGRES_CONNECTION_STRING: string;
|
||||
|
||||
REPLICA_POSTGRES_PASSWORD: string;
|
||||
REPLICA_POSTGRES_DATABASE: string;
|
||||
REPLICA_POSTGRES_USER: string;
|
||||
REPLICA_POSTGRES_HOST: string;
|
||||
REPLICA_POSTGRES_PORT: number;
|
||||
REPLICA_POSTGRES_CONNECTION_STRING: string;
|
||||
SLAVE_POSTGRES_PASSWORD: string;
|
||||
SLAVE_POSTGRES_DATABASE: string;
|
||||
SLAVE_POSTGRES_USER: string;
|
||||
SLAVE_POSTGRES_HOST: string;
|
||||
SLAVE_POSTGRES_PORT: number;
|
||||
SLAVE_POSTGRES_CONNECTION_STRING: string;
|
||||
|
||||
SQLITE_DATABASE: string;
|
||||
SLAVE_SQLITE_DATABASE: string;
|
||||
|
||||
STORAGE_DRIVER: string;
|
||||
STORAGE_DRIVER_FALLBACK: string;
|
||||
|
@ -64,15 +64,15 @@ export default abstract class BaseModel<T> {
|
||||
|
||||
private defaultFields_: string[] = [];
|
||||
private db_: DbConnection;
|
||||
private dbReplica_: DbConnection;
|
||||
private dbSlave_: DbConnection;
|
||||
private transactionHandler_: TransactionHandler;
|
||||
private modelFactory_: NewModelFactoryHandler;
|
||||
private config_: Config;
|
||||
private savePoints_: SavePoint[] = [];
|
||||
|
||||
public constructor(db: DbConnection, dbReplica: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
||||
public constructor(db: DbConnection, dbSlave: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
||||
this.db_ = db;
|
||||
this.dbReplica_ = dbReplica;
|
||||
this.dbSlave_ = dbSlave;
|
||||
this.modelFactory_ = modelFactory;
|
||||
this.config_ = config;
|
||||
|
||||
@ -115,8 +115,8 @@ export default abstract class BaseModel<T> {
|
||||
return this.db_;
|
||||
}
|
||||
|
||||
public get dbReplica(): DbConnection {
|
||||
return this.dbReplica_;
|
||||
public get dbSlave(): DbConnection {
|
||||
return this.dbSlave_;
|
||||
}
|
||||
|
||||
protected get defaultFields(): string[] {
|
||||
|
@ -343,4 +343,18 @@ describe('ChangeModel', () => {
|
||||
expect('jopItem' in result.items[0]).toBe(false);
|
||||
});
|
||||
|
||||
// test('should use slave', async () => {
|
||||
// const { session, user } = await createUserAndSession(1, true);
|
||||
// const changeModel = models().change();
|
||||
|
||||
// const item1 = await createFolder(session.id, { title: 'folder' });
|
||||
|
||||
// // {
|
||||
// // const changes = (await changeModel.delta(user.id)).items;
|
||||
// // expect(changes.length).toBe(1);
|
||||
// // expect(changes[0].item_id).toBe(item1.id);
|
||||
// // expect(changes[0].type).toBe(ChangeType.Create);
|
||||
// // }
|
||||
// });
|
||||
|
||||
});
|
||||
|
@ -57,8 +57,8 @@ export default class ChangeModel extends BaseModel<Change> {
|
||||
|
||||
public deltaIncludesItems_: boolean;
|
||||
|
||||
public constructor(db: DbConnection, dbReplica: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
||||
super(db, dbReplica, modelFactory, config);
|
||||
public constructor(db: DbConnection, dbSlave: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
||||
super(db, dbSlave, modelFactory, config);
|
||||
this.deltaIncludesItems_ = config.DELTA_INCLUDES_ITEMS;
|
||||
}
|
||||
|
||||
@ -199,8 +199,8 @@ export default class ChangeModel extends BaseModel<Change> {
|
||||
if (!doCountQuery) {
|
||||
finalParams.push(limit);
|
||||
|
||||
if (isPostgres(this.dbReplica)) {
|
||||
query = this.dbReplica.raw(`
|
||||
if (isPostgres(this.dbSlave)) {
|
||||
query = this.dbSlave.raw(`
|
||||
WITH cte1 AS MATERIALIZED (
|
||||
${subQuery1}
|
||||
)
|
||||
@ -214,7 +214,7 @@ export default class ChangeModel extends BaseModel<Change> {
|
||||
LIMIT ?
|
||||
`, finalParams);
|
||||
} else {
|
||||
query = this.dbReplica.raw(`
|
||||
query = this.dbSlave.raw(`
|
||||
SELECT ${fieldsSql} FROM (${subQuery1}) as sub1
|
||||
UNION ALL
|
||||
SELECT ${fieldsSql} FROM (${subQuery2}) as sub2
|
||||
@ -223,7 +223,7 @@ export default class ChangeModel extends BaseModel<Change> {
|
||||
`, finalParams);
|
||||
}
|
||||
} else {
|
||||
query = this.dbReplica.raw(`
|
||||
query = this.dbSlave.raw(`
|
||||
SELECT count(*) as total
|
||||
FROM (
|
||||
(${subQuery1})
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { createUserAndSession, beforeAllDb, afterAllTests, beforeEachDb, models, createItemTree, createResource, createNote, createItemTree3, db, tempDir, expectNotThrow, expectHttpError, dbReplica } from '../utils/testing/testUtils';
|
||||
import { createUserAndSession, beforeAllDb, afterAllTests, beforeEachDb, models, createItemTree, createResource, createNote, createItemTree3, db, tempDir, expectNotThrow, expectHttpError, dbSlave } from '../utils/testing/testUtils';
|
||||
import { shareFolderWithUser } from '../utils/testing/shareApiUtils';
|
||||
import { resourceBlobPath } from '../utils/joplinUtils';
|
||||
import newModelFactory from './factory';
|
||||
@ -275,7 +275,7 @@ describe('ItemModel', () => {
|
||||
test('should respect the hard item size limit', async () => {
|
||||
const { user: user1 } = await createUserAndSession(1);
|
||||
|
||||
let models = newModelFactory(db(), dbReplica(), config());
|
||||
let models = newModelFactory(db(), dbSlave(), config());
|
||||
|
||||
let result = await models.item().saveFromRawContent(user1, {
|
||||
body: Buffer.from('1234'),
|
||||
@ -285,7 +285,7 @@ describe('ItemModel', () => {
|
||||
const item = result['test1.txt'].item;
|
||||
|
||||
config().itemSizeHardLimit = 3;
|
||||
models = newModelFactory(db(), dbReplica(), config());
|
||||
models = newModelFactory(db(), dbSlave(), config());
|
||||
|
||||
result = await models.item().saveFromRawContent(user1, {
|
||||
body: Buffer.from('1234'),
|
||||
@ -297,7 +297,7 @@ describe('ItemModel', () => {
|
||||
await expectHttpError(async () => models.item().loadWithContent(item.id), ErrorPayloadTooLarge.httpCode);
|
||||
|
||||
config().itemSizeHardLimit = 1000;
|
||||
models = newModelFactory(db(), dbReplica(), config());
|
||||
models = newModelFactory(db(), dbSlave(), config());
|
||||
|
||||
await expectNotThrow(async () => models.item().loadWithContent(item.id));
|
||||
});
|
||||
@ -316,18 +316,18 @@ describe('ItemModel', () => {
|
||||
path: tempDir2,
|
||||
};
|
||||
|
||||
const fromModels = newModelFactory(db(), dbReplica(), {
|
||||
const fromModels = newModelFactory(db(), dbSlave(), {
|
||||
...config(),
|
||||
storageDriver: fromStorageConfig,
|
||||
});
|
||||
|
||||
const toModels = newModelFactory(db(), dbReplica(), {
|
||||
const toModels = newModelFactory(db(), dbSlave(), {
|
||||
...config(),
|
||||
storageDriver: toStorageConfig,
|
||||
});
|
||||
|
||||
const fromDriver = await loadStorageDriver(fromStorageConfig, db(), dbReplica());
|
||||
const toDriver = await loadStorageDriver(toStorageConfig, db(), dbReplica());
|
||||
const fromDriver = await loadStorageDriver(fromStorageConfig, db(), dbSlave());
|
||||
const toDriver = await loadStorageDriver(toStorageConfig, db(), dbSlave());
|
||||
|
||||
return {
|
||||
fromStorageConfig,
|
||||
@ -364,7 +364,7 @@ describe('ItemModel', () => {
|
||||
|
||||
await msleep(2);
|
||||
|
||||
const toModels = newModelFactory(db(), dbReplica(), {
|
||||
const toModels = newModelFactory(db(), dbSlave(), {
|
||||
...config(),
|
||||
storageDriver: toStorageConfig,
|
||||
});
|
||||
|
@ -75,8 +75,8 @@ export default class ItemModel extends BaseModel<Item> {
|
||||
|
||||
private static storageDrivers_: Map<StorageDriverConfig, StorageDriverBase> = new Map();
|
||||
|
||||
public constructor(db: DbConnection, dbReplica: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
||||
super(db, dbReplica, modelFactory, config);
|
||||
public constructor(db: DbConnection, dbSlave: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
||||
super(db, dbSlave, modelFactory, config);
|
||||
|
||||
this.storageDriverConfig_ = config.storageDriver;
|
||||
this.storageDriverConfigFallback_ = config.storageDriverFallback;
|
||||
@ -102,7 +102,7 @@ export default class ItemModel extends BaseModel<Item> {
|
||||
let driver = ItemModel.storageDrivers_.get(config);
|
||||
|
||||
if (!driver) {
|
||||
driver = await loadStorageDriver(config, this.db, this.dbReplica);
|
||||
driver = await loadStorageDriver(config, this.db, this.dbSlave);
|
||||
ItemModel.storageDrivers_.set(config, driver);
|
||||
}
|
||||
|
||||
@ -331,7 +331,7 @@ export default class ItemModel extends BaseModel<Item> {
|
||||
let fromDriver: StorageDriverBase = drivers[item.content_storage_id];
|
||||
|
||||
if (!fromDriver) {
|
||||
fromDriver = await loadStorageDriver(item.content_storage_id, this.db, this.dbReplica);
|
||||
fromDriver = await loadStorageDriver(item.content_storage_id, this.db, this.dbSlave);
|
||||
drivers[item.content_storage_id] = fromDriver;
|
||||
}
|
||||
|
||||
|
@ -118,8 +118,8 @@ export default class UserModel extends BaseModel<User> {
|
||||
|
||||
private ldapConfig_: LdapConfig[];
|
||||
|
||||
public constructor(db: DbConnection, dbReplica: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
||||
super(db, dbReplica, modelFactory, config);
|
||||
public constructor(db: DbConnection, dbSlave: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
|
||||
super(db, dbSlave, modelFactory, config);
|
||||
|
||||
this.ldapConfig_ = config.ldap;
|
||||
}
|
||||
|
@ -83,107 +83,107 @@ export type NewModelFactoryHandler = (db: DbConnection)=> Models;
|
||||
export class Models {
|
||||
|
||||
private db_: DbConnection;
|
||||
private dbReplica_: DbConnection;
|
||||
private dbSlave_: DbConnection;
|
||||
private config_: Config;
|
||||
|
||||
public constructor(db: DbConnection, dbReplica_: DbConnection, config: Config) {
|
||||
public constructor(db: DbConnection, dbSlave: DbConnection, config: Config) {
|
||||
this.db_ = db;
|
||||
this.dbReplica_ = dbReplica_;
|
||||
this.dbSlave_ = dbSlave;
|
||||
this.config_ = config;
|
||||
|
||||
this.newModelFactory = this.newModelFactory.bind(this);
|
||||
}
|
||||
|
||||
private newModelFactory(db: DbConnection) {
|
||||
return new Models(db, this.dbReplica_, this.config_);
|
||||
return new Models(db, this.dbSlave_, this.config_);
|
||||
}
|
||||
|
||||
public item() {
|
||||
return new ItemModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new ItemModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public user() {
|
||||
return new UserModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new UserModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public email() {
|
||||
return new EmailModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new EmailModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public userItem() {
|
||||
return new UserItemModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new UserItemModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public token() {
|
||||
return new TokenModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new TokenModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public itemResource() {
|
||||
return new ItemResourceModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new ItemResourceModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public apiClient() {
|
||||
return new ApiClientModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new ApiClientModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public session() {
|
||||
return new SessionModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new SessionModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public change() {
|
||||
return new ChangeModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new ChangeModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public notification() {
|
||||
return new NotificationModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new NotificationModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public share() {
|
||||
return new ShareModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new ShareModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public shareUser() {
|
||||
return new ShareUserModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new ShareUserModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public keyValue() {
|
||||
return new KeyValueModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new KeyValueModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public subscription() {
|
||||
return new SubscriptionModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new SubscriptionModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public userFlag() {
|
||||
return new UserFlagModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new UserFlagModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public event() {
|
||||
return new EventModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new EventModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public lock() {
|
||||
return new LockModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new LockModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public storage() {
|
||||
return new StorageModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new StorageModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public userDeletion() {
|
||||
return new UserDeletionModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new UserDeletionModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public backupItem() {
|
||||
return new BackupItemModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new BackupItemModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
public taskState() {
|
||||
return new TaskStateModel(this.db_, this.dbReplica_, this.newModelFactory, this.config_);
|
||||
return new TaskStateModel(this.db_, this.dbSlave_, this.newModelFactory, this.config_);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default function newModelFactory(db: DbConnection, dbReplica: DbConnection, config: Config): Models {
|
||||
return new Models(db, dbReplica, config);
|
||||
export default function newModelFactory(db: DbConnection, dbSlave: DbConnection, config: Config): Models {
|
||||
return new Models(db, dbSlave, config);
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { afterAllTests, beforeAllDb, beforeEachDb, db, dbReplica, expectThrow, models } from '../../../utils/testing/testUtils';
|
||||
import { afterAllTests, beforeAllDb, beforeEachDb, db, dbSlave, expectThrow, models } from '../../../utils/testing/testUtils';
|
||||
import { StorageDriverType } from '../../../utils/types';
|
||||
import loadStorageDriver from './loadStorageDriver';
|
||||
|
||||
@ -18,13 +18,13 @@ describe('loadStorageDriver', () => {
|
||||
|
||||
test('should load a driver and assign an ID to it', async () => {
|
||||
{
|
||||
const newDriver = await loadStorageDriver({ type: StorageDriverType.Memory }, db(), dbReplica());
|
||||
const newDriver = await loadStorageDriver({ type: StorageDriverType.Memory }, db(), dbSlave());
|
||||
expect(newDriver.storageId).toBe(1);
|
||||
expect((await models().storage().count())).toBe(1);
|
||||
}
|
||||
|
||||
{
|
||||
const newDriver = await loadStorageDriver({ type: StorageDriverType.Filesystem, path: '/just/testing' }, db(), dbReplica());
|
||||
const newDriver = await loadStorageDriver({ type: StorageDriverType.Filesystem, path: '/just/testing' }, db(), dbSlave());
|
||||
expect(newDriver.storageId).toBe(2);
|
||||
expect((await models().storage().count())).toBe(2);
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ export interface Options {
|
||||
assignDriverId?: boolean;
|
||||
}
|
||||
|
||||
export default async function(config: StorageDriverConfig | number, db: DbConnection, dbReplica: DbConnection, options: Options = null): Promise<StorageDriverBase | null> {
|
||||
export default async function(config: StorageDriverConfig | number, db: DbConnection, dbSlave: DbConnection, options: Options = null): Promise<StorageDriverBase | null> {
|
||||
if (!config) return null;
|
||||
|
||||
options = {
|
||||
@ -27,14 +27,14 @@ export default async function(config: StorageDriverConfig | number, db: DbConnec
|
||||
if (typeof config === 'number') {
|
||||
storageId = config;
|
||||
|
||||
const models = newModelFactory(db, dbReplica, globalConfig());
|
||||
const models = newModelFactory(db, dbSlave, globalConfig());
|
||||
const storage = await models.storage().byId(storageId);
|
||||
if (!storage) throw new Error(`No such storage ID: ${storageId}`);
|
||||
|
||||
config = parseStorageDriverConnectionString(storage.connection_string);
|
||||
} else {
|
||||
if (options.assignDriverId) {
|
||||
const models = newModelFactory(db, dbReplica, globalConfig());
|
||||
const models = newModelFactory(db, dbSlave, globalConfig());
|
||||
|
||||
const connectionString = serializeStorageConfig(config);
|
||||
let storage = await models.storage().byConnectionString(connectionString);
|
||||
|
@ -3,7 +3,7 @@
|
||||
import config from '../../../config';
|
||||
import { Item } from '../../../services/database/types';
|
||||
import { CustomErrorCode } from '../../../utils/errors';
|
||||
import { createUserAndSession, db, dbReplica, makeNoteSerializedBody, models } from '../../../utils/testing/testUtils';
|
||||
import { createUserAndSession, db, dbSlave, makeNoteSerializedBody, models } from '../../../utils/testing/testUtils';
|
||||
import { Config, StorageDriverConfig, StorageDriverMode } from '../../../utils/types';
|
||||
import newModelFactory from '../../factory';
|
||||
import loadStorageDriver from './loadStorageDriver';
|
||||
@ -15,7 +15,7 @@ const newTestModels = (driverConfig: StorageDriverConfig, driverConfigFallback:
|
||||
storageDriver: driverConfig,
|
||||
storageDriverFallback: driverConfigFallback,
|
||||
};
|
||||
return newModelFactory(db(), dbReplica(), newConfig);
|
||||
return newModelFactory(db(), dbSlave(), newConfig);
|
||||
};
|
||||
|
||||
export function shouldWriteToContentAndReadItBack(driverConfig: StorageDriverConfig) {
|
||||
@ -281,7 +281,7 @@ export function shouldUpdateContentStorageIdAfterSwitchingDriver(oldDriverConfig
|
||||
|
||||
export function shouldThrowNotFoundIfNotExist(driverConfig: StorageDriverConfig) {
|
||||
test('should throw not found if item does not exist', async () => {
|
||||
const driver = await loadStorageDriver(driverConfig, db(), dbReplica());
|
||||
const driver = await loadStorageDriver(driverConfig, db(), dbSlave());
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Old code before rule was applied
|
||||
let error: any = null;
|
||||
|
@ -13,6 +13,19 @@ export interface DropDbOptions {
|
||||
ignoreIfNotExists: boolean;
|
||||
}
|
||||
|
||||
const getPostgresToolPath = async (name: string) => {
|
||||
const candidates = [
|
||||
'/usr/local/opt/postgresql@16/bin',
|
||||
];
|
||||
|
||||
for (const candidate of candidates) {
|
||||
const p = `${candidate}/${name}`;
|
||||
if (await fs.pathExists(p)) return p;
|
||||
}
|
||||
|
||||
return name;
|
||||
};
|
||||
|
||||
export async function createDb(config: DatabaseConfig, options: CreateDbOptions = null) {
|
||||
options = {
|
||||
dropIfExists: false,
|
||||
@ -22,7 +35,7 @@ export async function createDb(config: DatabaseConfig, options: CreateDbOptions
|
||||
|
||||
if (config.client === 'pg') {
|
||||
const cmd: string[] = [
|
||||
'createdb',
|
||||
await getPostgresToolPath('createdb'),
|
||||
'--host', config.host,
|
||||
'--port', config.port.toString(),
|
||||
'--username', config.user,
|
||||
@ -64,7 +77,7 @@ export async function dropDb(config: DatabaseConfig, options: DropDbOptions = nu
|
||||
|
||||
if (config.client === 'pg') {
|
||||
const cmd: string[] = [
|
||||
'dropdb',
|
||||
await getPostgresToolPath('dropdb'),
|
||||
'--host', config.host,
|
||||
'--port', config.port.toString(),
|
||||
'--username', config.user,
|
||||
|
@ -6,12 +6,12 @@ import { Context } from '../models/items/storage/StorageDriverBase';
|
||||
import { StorageDriverConfig, StorageDriverType } from './types';
|
||||
import { uuidgen } from '@joplin/lib/uuid';
|
||||
|
||||
export default async function(connection: string | StorageDriverConfig, db: DbConnection, dbReplica: DbConnection, models: Models): Promise<string> {
|
||||
export default async function(connection: string | StorageDriverConfig, db: DbConnection, dbSlave: DbConnection, models: Models): Promise<string> {
|
||||
const storageConfig = typeof connection === 'string' ? parseStorageConnectionString(connection) : connection;
|
||||
|
||||
if (storageConfig.type === StorageDriverType.Database) return 'Database storage is special and cannot be checked this way. If the connection to the database was successful then the storage driver should work too.';
|
||||
|
||||
const driver = await loadStorageDriver(storageConfig, db, dbReplica, { assignDriverId: false });
|
||||
const driver = await loadStorageDriver(storageConfig, db, dbSlave, { assignDriverId: false });
|
||||
const itemId = `testingconnection${uuidgen(8)}`;
|
||||
const itemContent = Buffer.from(uuidgen(8));
|
||||
const context: Context = { models };
|
||||
|
@ -32,6 +32,7 @@ import initLib from '@joplin/lib/initLib';
|
||||
const packageRootDir = path.dirname(path.dirname(path.dirname(__dirname)));
|
||||
|
||||
let db_: DbConnection = null;
|
||||
let dbSlave_: DbConnection = null;
|
||||
|
||||
// require('source-map-support').install();
|
||||
|
||||
@ -69,12 +70,16 @@ function initGlobalLogger() {
|
||||
}
|
||||
|
||||
let createdDbPath_: string = null;
|
||||
let createdDbSlavePath_: string = null;
|
||||
export async function beforeAllDb(unitName: string, createDbOptions: CreateDbOptions = null) {
|
||||
unitName = unitName.replace(/\//g, '_');
|
||||
|
||||
createdDbPath_ = `${packageRootDir}/db-test-${unitName}.sqlite`;
|
||||
await fs.remove(createdDbPath_);
|
||||
|
||||
createdDbSlavePath_ = `${packageRootDir}/db-slave-test-${unitName}.sqlite`;
|
||||
await fs.remove(createdDbSlavePath_);
|
||||
|
||||
const tempDir = `${packageRootDir}/temp/test-${unitName}`;
|
||||
await fs.mkdirp(tempDir);
|
||||
|
||||
@ -87,16 +92,25 @@ export async function beforeAllDb(unitName: string, createDbOptions: CreateDbOpt
|
||||
if (process.env.JOPLIN_TESTS_SERVER_DB === 'pg') {
|
||||
await initConfig(Env.Dev, parseEnv({
|
||||
DB_CLIENT: 'pg',
|
||||
DB_USE_SLAVE: '1',
|
||||
|
||||
POSTGRES_DATABASE: unitName,
|
||||
POSTGRES_USER: 'joplin',
|
||||
POSTGRES_PASSWORD: 'joplin',
|
||||
|
||||
SLAVE_POSTGRES_DATABASE: unitName,
|
||||
SLAVE_POSTGRES_USER: 'joplin',
|
||||
SLAVE_POSTGRES_PASSWORD: 'joplin',
|
||||
|
||||
SUPPORT_EMAIL: 'testing@localhost',
|
||||
}), {
|
||||
tempDir: tempDir,
|
||||
});
|
||||
} else {
|
||||
await initConfig(Env.Dev, parseEnv({
|
||||
DB_USE_SLAVE: '1',
|
||||
SQLITE_DATABASE: createdDbPath_,
|
||||
SLAVE_SQLITE_DATABASE: createdDbSlavePath_,
|
||||
SUPPORT_EMAIL: 'testing@localhost',
|
||||
}), {
|
||||
tempDir: tempDir,
|
||||
@ -108,6 +122,9 @@ export async function beforeAllDb(unitName: string, createDbOptions: CreateDbOpt
|
||||
await createDb(config().database, { dropIfExists: true, ...createDbOptions });
|
||||
db_ = await connectDb(config().database);
|
||||
|
||||
await createDb(config().databaseSlave, { dropIfExists: true, ...createDbOptions });
|
||||
dbSlave_ = await connectDb(config().databaseSlave);
|
||||
|
||||
const mustache = new MustacheService(config().viewDir, config().baseUrl);
|
||||
await mustache.loadPartials();
|
||||
|
||||
@ -124,6 +141,11 @@ export async function afterAllTests() {
|
||||
db_ = null;
|
||||
}
|
||||
|
||||
if (dbSlave_) {
|
||||
await disconnectDb(dbSlave_);
|
||||
dbSlave_ = null;
|
||||
}
|
||||
|
||||
if (tempDir_) {
|
||||
await fs.remove(tempDir_);
|
||||
tempDir_ = null;
|
||||
@ -257,12 +279,16 @@ export function db() {
|
||||
return db_;
|
||||
}
|
||||
|
||||
export function dbReplica() {
|
||||
return db_;
|
||||
export function dbSlave() {
|
||||
return dbSlave_;
|
||||
}
|
||||
|
||||
export function dbSlaveSync() {
|
||||
|
||||
}
|
||||
|
||||
export function models() {
|
||||
return modelFactory(db(), dbReplica(), config());
|
||||
return modelFactory(db(), dbSlave(), config());
|
||||
}
|
||||
|
||||
export function parseHtml(html: string): Document {
|
||||
|
@ -165,7 +165,7 @@ export interface Config extends EnvVariables {
|
||||
accountTypesEnabled: boolean;
|
||||
showErrorStackTraces: boolean;
|
||||
database: DatabaseConfig;
|
||||
databaseReplica: DatabaseConfig;
|
||||
databaseSlave: DatabaseConfig;
|
||||
mailer: MailerConfig;
|
||||
stripe: StripeConfig;
|
||||
supportEmail: string;
|
||||
|
Loading…
Reference in New Issue
Block a user