1
0
mirror of https://github.com/laurent22/joplin.git synced 2024-11-27 08:21:03 +02:00

Server: Improved storage command

This commit is contained in:
Laurent Cozic 2021-12-02 11:27:22 +00:00
parent 75c67b7d78
commit 122afd6d46
4 changed files with 57 additions and 7 deletions

View File

@ -10,7 +10,7 @@ module.exports = {
testEnvironment: 'node',
slowTestThreshold: 40,
slowTestThreshold: 60,
setupFilesAfterEnv: [`${__dirname}/jest.setup.js`],
};

View File

@ -14,9 +14,10 @@ enum ArgvCommand {
interface Argv {
command: ArgvCommand;
connection: string;
connection?: string;
batchSize?: number;
maxContentSize?: number;
maxProcessedItems?: number;
}
export default class StorageCommand extends BaseCommand {
@ -52,6 +53,10 @@ export default class StorageCommand extends BaseCommand {
type: 'number',
description: 'Max content size',
},
'max-processed-items': {
type: 'number',
description: 'Max number of items to process before stopping',
},
'connection': {
description: 'storage connection string',
type: 'string',
@ -85,11 +90,14 @@ export default class StorageCommand extends BaseCommand {
},
[ArgvCommand.DeleteDatabaseContentColumn]: async () => {
const maxProcessedItems = argv.maxProcessedItems;
logger.info(`Batch size: ${batchSize}`);
await runContext.models.item().deleteDatabaseContentColumn({
batchSize,
logger,
maxProcessedItems,
});
},
};

View File

@ -462,6 +462,41 @@ describe('ItemModel', function() {
expect(await models().item().dbContent(note1.id)).toEqual(Buffer.from(''));
});
test('should delete the database item content - maxProcessedItems handling', async function() {
if (isSqlite(db())) {
expect(1).toBe(1);
return;
}
const { user: user1 } = await createUserAndSession(1);
await createItemTree3(user1.id, '', '', [
{
id: '000000000000000000000000000000F1',
children: [
{ id: '00000000000000000000000000000001' },
{ id: '00000000000000000000000000000002' },
{ id: '00000000000000000000000000000003' },
{ id: '00000000000000000000000000000004' },
],
},
]);
await models().item().deleteDatabaseContentColumn({ batchSize: 2, maxProcessedItems: 4 });
const itemIds = (await models().item().all()).map(it => it.id);
const contents = await Promise.all([
models().item().dbContent(itemIds[0]),
models().item().dbContent(itemIds[1]),
models().item().dbContent(itemIds[2]),
models().item().dbContent(itemIds[3]),
models().item().dbContent(itemIds[4]),
]);
const emptyOnes = contents.filter(c => c.toString() === '');
expect(emptyOnes.length).toBe(4);
});
// test('should stop importing item if it has been deleted', async function() {
// const { user: user1 } = await createUserAndSession(1);

View File

@ -30,6 +30,7 @@ export interface ImportContentToStorageOptions {
export interface DeleteDatabaseContentOptions {
batchSize?: number;
logger?: Logger | LoggerWrapper;
maxProcessedItems?: number;
}
export interface SaveFromRawContentItem {
@ -409,20 +410,21 @@ export default class ItemModel extends BaseModel<Item> {
options = {
batchSize: 1000,
logger: new Logger(),
maxProcessedItems: 0,
...options,
};
const itemCount = (await this.db(this.tableName)
.count('id', { as: 'total' })
.where('content', '!=', Buffer.from(''))
.first())['total'];
// const itemCount = (await this.db(this.tableName)
// .count('id', { as: 'total' })
// .where('content', '!=', Buffer.from(''))
// .first())['total'];
let totalDone = 0;
// UPDATE items SET content = '\x' WHERE id IN (SELECT id FROM items WHERE content != '\x' LIMIT 5000);
while (true) {
options.logger.info(`Processing items ${totalDone} / ${itemCount}`);
options.logger.info(`Processing items ${totalDone}`);
const updatedRows = await this
.db(this.tableName)
@ -440,6 +442,11 @@ export default class ItemModel extends BaseModel<Item> {
return;
}
if (options.maxProcessedItems && totalDone + options.batchSize > options.maxProcessedItems) {
options.logger.info(`Processed ${totalDone} items out of requested ${options.maxProcessedItems}`);
return;
}
await msleep(1000);
}
}