mirror of
https://github.com/laurent22/joplin.git
synced 2024-12-24 10:27:10 +02:00
This commit is contained in:
parent
a56f104fe8
commit
d13e7b32c3
@ -1,5 +1,5 @@
|
||||
import BaseModel, { ModelType } from '../BaseModel';
|
||||
import { RevisionEntity } from '../services/database/types';
|
||||
import { RevisionEntity, StringOrSqlQuery } from '../services/database/types';
|
||||
import BaseItem from './BaseItem';
|
||||
const DiffMatchPatch = require('diff-match-patch');
|
||||
import * as ArrayUtils from '../ArrayUtils';
|
||||
@ -305,43 +305,71 @@ export default class Revision extends BaseItem {
|
||||
// and modify that revision into a "merged" one.
|
||||
|
||||
const cutOffDate = Date.now() - ttl;
|
||||
const revisions = await this.modelSelectAll('SELECT * FROM revisions WHERE item_updated_time < ? ORDER BY item_updated_time DESC', [cutOffDate]);
|
||||
const doneItems: Record<string, boolean> = {};
|
||||
const allOldRevisions: RevisionEntity[] = await this.modelSelectAll(
|
||||
'SELECT * FROM revisions WHERE item_updated_time < ? ORDER BY item_updated_time DESC',
|
||||
[cutOffDate],
|
||||
);
|
||||
|
||||
for (const rev of revisions) {
|
||||
const doneKey = `${rev.item_type}_${rev.item_id}`;
|
||||
if (doneItems[doneKey]) continue;
|
||||
const itemIdToOldRevisions = new Map<string, RevisionEntity[]>();
|
||||
for (const rev of allOldRevisions) {
|
||||
const itemId = rev.item_id;
|
||||
if (!itemIdToOldRevisions.has(itemId)) {
|
||||
itemIdToOldRevisions.set(itemId, []);
|
||||
}
|
||||
itemIdToOldRevisions.get(itemId).push(rev);
|
||||
}
|
||||
|
||||
const keptRev = await this.modelSelectOne('SELECT * FROM revisions WHERE item_updated_time >= ? AND item_type = ? AND item_id = ? ORDER BY item_updated_time ASC LIMIT 1', [cutOffDate, rev.item_type, rev.item_id]);
|
||||
const deleteOldRevisionsForItem = async (itemType: ModelType, itemId: string, oldRevisions: RevisionEntity[]) => {
|
||||
const keptRev = await this.modelSelectOne(
|
||||
'SELECT * FROM revisions WHERE item_updated_time >= ? AND item_type = ? AND item_id = ? ORDER BY item_updated_time ASC LIMIT 1',
|
||||
[cutOffDate, itemType, itemId],
|
||||
);
|
||||
const queries: StringOrSqlQuery[] = [];
|
||||
if (!keptRev) {
|
||||
const hasEncrypted = await this.modelSelectOne(
|
||||
'SELECT * FROM revisions WHERE encryption_applied = 1 AND item_updated_time < ? AND item_id = ?',
|
||||
[cutOffDate, itemId],
|
||||
);
|
||||
if (hasEncrypted) {
|
||||
throw new JoplinError('One of the revision to be deleted is encrypted', 'revision_encrypted');
|
||||
}
|
||||
} else {
|
||||
// Note: we don't need to check for encrypted rev here because
|
||||
// mergeDiff will already throw the revision_encrypted exception
|
||||
// if a rev is encrypted.
|
||||
const merged = await this.mergeDiffs(keptRev);
|
||||
|
||||
const titleDiff = this.createTextPatch('', merged.title);
|
||||
const bodyDiff = this.createTextPatch('', merged.body);
|
||||
const metadataDiff = this.createObjectPatch({}, merged.metadata);
|
||||
queries.push({
|
||||
sql: 'UPDATE revisions SET title_diff = ?, body_diff = ?, metadata_diff = ? WHERE id = ?',
|
||||
params: [titleDiff, bodyDiff, metadataDiff, keptRev.id],
|
||||
});
|
||||
}
|
||||
|
||||
await this.batchDelete(oldRevisions.map(item => item.id), { sourceDescription: 'Revision.deleteOldRevisions' });
|
||||
if (queries.length) {
|
||||
await this.db().transactionExecBatch(queries);
|
||||
}
|
||||
};
|
||||
|
||||
for (const [itemId, oldRevisions] of itemIdToOldRevisions.entries()) {
|
||||
if (!oldRevisions.length) {
|
||||
throw new Error('Invalid state: There must be at least one old revision per item to be processed.');
|
||||
}
|
||||
|
||||
const latestOldRevision = oldRevisions[oldRevisions.length - 1];
|
||||
|
||||
try {
|
||||
const deleteQueryCondition = 'item_updated_time < ? AND item_id = ?';
|
||||
const deleteQueryParams = [cutOffDate, rev.item_id];
|
||||
const deleteQuery = { sql: `DELETE FROM revisions WHERE ${deleteQueryCondition}`, params: deleteQueryParams };
|
||||
|
||||
if (!keptRev) {
|
||||
const hasEncrypted = await this.modelSelectOne(`SELECT * FROM revisions WHERE encryption_applied = 1 AND ${deleteQueryCondition}`, deleteQueryParams);
|
||||
if (hasEncrypted) throw new JoplinError('One of the revision to be deleted is encrypted', 'revision_encrypted');
|
||||
await this.db().transactionExecBatch([deleteQuery]);
|
||||
} else {
|
||||
// Note: we don't need to check for encrypted rev here because
|
||||
// mergeDiff will already throw the revision_encrypted exception
|
||||
// if a rev is encrypted.
|
||||
const merged = await this.mergeDiffs(keptRev);
|
||||
|
||||
const queries = [deleteQuery, { sql: 'UPDATE revisions SET title_diff = ?, body_diff = ?, metadata_diff = ? WHERE id = ?', params: [this.createTextPatch('', merged.title), this.createTextPatch('', merged.body), this.createObjectPatch({}, merged.metadata), keptRev.id] }];
|
||||
|
||||
await this.db().transactionExecBatch(queries);
|
||||
}
|
||||
await deleteOldRevisionsForItem(latestOldRevision.item_type, itemId, oldRevisions);
|
||||
} catch (error) {
|
||||
if (error.code === 'revision_encrypted') {
|
||||
this.logger().info(`Aborted deletion of old revisions for item "${rev.item_id}" (rev "${rev.id}") because one of the revisions is still encrypted`, error);
|
||||
this.logger().info(`Aborted deletion of old revisions for item "${itemId}" (latest old rev "${latestOldRevision.id}") because one of the revisions is still encrypted`, error);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
doneItems[doneKey] = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -6,6 +6,32 @@ import Revision from '../models/Revision';
|
||||
import BaseModel, { ModelType } from '../BaseModel';
|
||||
import RevisionService from '../services/RevisionService';
|
||||
import { MarkupLanguage } from '../../renderer';
|
||||
import { NoteEntity } from './database/types';
|
||||
|
||||
interface CreateTestRevisionOptions {
|
||||
// How long to pause (in milliseconds) between each note modification.
|
||||
// For example, [10, 20] would modify the note twice, with pauses of 10ms and 20ms.
|
||||
delaysBetweenModifications: number[];
|
||||
}
|
||||
|
||||
const createTestRevisions = async (
|
||||
noteProperties: Partial<NoteEntity>,
|
||||
{ delaysBetweenModifications }: CreateTestRevisionOptions,
|
||||
) => {
|
||||
const note = await Note.save({
|
||||
title: 'note',
|
||||
...noteProperties,
|
||||
});
|
||||
|
||||
let counter = 0;
|
||||
for (const delay of delaysBetweenModifications) {
|
||||
jest.advanceTimersByTime(delay);
|
||||
await Note.save({ ...noteProperties, id: note.id, title: `note REV${counter++}` });
|
||||
await revisionService().collectRevisions();
|
||||
}
|
||||
|
||||
return note;
|
||||
};
|
||||
|
||||
describe('services/RevisionService', () => {
|
||||
|
||||
@ -13,6 +39,8 @@ describe('services/RevisionService', () => {
|
||||
await setupDatabaseAndSynchronizer(1);
|
||||
await switchClient(1);
|
||||
Setting.setValue('revisionService.intervalBetweenRevisions', 0);
|
||||
|
||||
jest.useFakeTimers({ advanceTimers: true });
|
||||
});
|
||||
|
||||
it('should create diff and rebuild notes', (async () => {
|
||||
@ -185,6 +213,25 @@ describe('services/RevisionService', () => {
|
||||
}
|
||||
}));
|
||||
|
||||
it('should not error on revisions for missing (not downloaded yet/permanently deleted) notes', async () => {
|
||||
Setting.setValue('revisionService.intervalBetweenRevisions', 100);
|
||||
|
||||
const note = await createTestRevisions({
|
||||
share_id: 'test-share-id',
|
||||
}, { delaysBetweenModifications: [200, 400, 600, 8_000] });
|
||||
const getNoteRevisions = () => {
|
||||
return Revision.allByType(BaseModel.TYPE_NOTE, note.id);
|
||||
};
|
||||
expect(await getNoteRevisions()).toHaveLength(4);
|
||||
|
||||
await Note.delete(note.id, { toTrash: false, sourceDescription: 'tests/RevisionService' });
|
||||
|
||||
await revisionService().deleteOldRevisions(4_000);
|
||||
|
||||
// Should leave newer revisions (handle the case where revisions are downloaded before the note).
|
||||
expect(await getNoteRevisions()).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should handle conflicts', (async () => {
|
||||
const service = new RevisionService();
|
||||
|
||||
|
@ -181,4 +181,53 @@ describe('Synchronizer.revisions', () => {
|
||||
expect((await Revision.all()).length).toBe(0);
|
||||
}));
|
||||
|
||||
it('should delete old revisions remotely when deleted locally', async () => {
|
||||
Setting.setValue('revisionService.intervalBetweenRevisions', 100);
|
||||
jest.useFakeTimers({ advanceTimers: true });
|
||||
|
||||
const note = await Note.save({ title: 'note' });
|
||||
const getNoteRevisions = () => {
|
||||
return Revision.allByType(BaseModel.TYPE_NOTE, note.id);
|
||||
};
|
||||
jest.advanceTimersByTime(200);
|
||||
|
||||
await Note.save({ id: note.id, title: 'note REV0' });
|
||||
jest.advanceTimersByTime(200);
|
||||
|
||||
await revisionService().collectRevisions(); // REV0
|
||||
expect(await getNoteRevisions()).toHaveLength(1);
|
||||
|
||||
jest.advanceTimersByTime(200);
|
||||
|
||||
await Note.save({ id: note.id, title: 'note REV1' });
|
||||
await revisionService().collectRevisions(); // REV1
|
||||
expect(await getNoteRevisions()).toHaveLength(2);
|
||||
|
||||
// Should sync the revisions
|
||||
await synchronizer().start();
|
||||
await switchClient(2);
|
||||
await synchronizer().start();
|
||||
|
||||
expect(await getNoteRevisions()).toHaveLength(2);
|
||||
await revisionService().deleteOldRevisions(100);
|
||||
expect(await getNoteRevisions()).toHaveLength(0);
|
||||
|
||||
await synchronizer().start();
|
||||
expect(await getNoteRevisions()).toHaveLength(0);
|
||||
|
||||
// Syncing a new client should not download the deleted revisions
|
||||
await setupDatabaseAndSynchronizer(3);
|
||||
await switchClient(3);
|
||||
await synchronizer().start();
|
||||
expect(await getNoteRevisions()).toHaveLength(0);
|
||||
|
||||
// After switching back to the original client, syncing should locally delete
|
||||
// the remotely deleted revisions.
|
||||
await switchClient(1);
|
||||
expect(await getNoteRevisions()).toHaveLength(2);
|
||||
await synchronizer().start();
|
||||
expect(await getNoteRevisions()).toHaveLength(0);
|
||||
|
||||
jest.useRealTimers();
|
||||
});
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user