mirror of
https://github.com/laurent22/joplin.git
synced 2024-12-24 10:27:10 +02:00
All: Ensure that shared notebook children are not deleted when shared, unshared and shared again, and a conflict happens
This commit is contained in:
parent
eba1d6df56
commit
ccf9882452
@ -28,9 +28,15 @@ export enum ModelType {
|
||||
|
||||
export interface DeleteOptions {
|
||||
idFieldName?: string;
|
||||
trackDeleted?: boolean;
|
||||
changeSource?: number;
|
||||
deleteChildren?: boolean;
|
||||
|
||||
// By default the application tracks item deletions, so that they can be
|
||||
// applied to the remote items during synchronisation. However, in some
|
||||
// cases, we don't want this. In particular when an item is deleted via
|
||||
// sync, we don't need to track the deletion, because the operation doesn't
|
||||
// need to applied again on next sync.
|
||||
trackDeleted?: boolean;
|
||||
}
|
||||
|
||||
class BaseModel {
|
||||
|
@ -20,7 +20,7 @@ import JoplinError from './JoplinError';
|
||||
import ShareService from './services/share/ShareService';
|
||||
import TaskQueue from './TaskQueue';
|
||||
import ItemUploader from './services/synchronizer/ItemUploader';
|
||||
import { FileApi } from './file-api';
|
||||
import { FileApi, RemoteItem } from './file-api';
|
||||
import JoplinDatabase from './JoplinDatabase';
|
||||
import { fetchSyncInfo, getActiveMasterKey, localSyncInfo, mergeSyncInfos, saveLocalSyncInfo, SyncInfo, syncInfoEquals, uploadSyncInfo } from './services/synchronizer/syncInfoUtils';
|
||||
import { getMasterPassword, setupAndDisableEncryption, setupAndEnableEncryption } from './services/e2ee/utils';
|
||||
@ -31,24 +31,6 @@ const { Dirnames } = require('./services/synchronizer/utils/types');
|
||||
|
||||
const logger = Logger.create('Synchronizer');
|
||||
|
||||
interface RemoteItem {
|
||||
id: string;
|
||||
path?: string;
|
||||
type_?: number;
|
||||
isDeleted?: boolean;
|
||||
|
||||
// This the time when the file was created on the server. It is used for
|
||||
// example for the locking mechanim or any file that's not an actual Joplin
|
||||
// item.
|
||||
updated_time?: number;
|
||||
|
||||
// This is the time that corresponds to the actual Joplin item updated_time
|
||||
// value. A note is always uploaded with a delay so the server updated_time
|
||||
// value will always be ahead. However for synchronising we need to know the
|
||||
// exact Joplin item updated_time value.
|
||||
jop_updated_time?: number;
|
||||
}
|
||||
|
||||
function isCannotSyncError(error: any): boolean {
|
||||
if (!error) return false;
|
||||
if (['rejectedByTarget', 'fileNotFound'].indexOf(error.code) >= 0) return true;
|
||||
@ -550,7 +532,7 @@ export default class Synchronizer {
|
||||
if (this.cancelling()) break;
|
||||
|
||||
let local = locals[i];
|
||||
const ItemClass = BaseItem.itemClass(local);
|
||||
const ItemClass: typeof BaseItem = BaseItem.itemClass(local);
|
||||
const path = BaseItem.systemPath(local);
|
||||
|
||||
// Safety check to avoid infinite loops.
|
||||
@ -744,7 +726,10 @@ export default class Synchronizer {
|
||||
const syncTimeQueries = BaseItem.updateSyncTimeQueries(syncTargetId, local, time.unixMs());
|
||||
await ItemClass.save(local, { autoTimestamp: false, changeSource: ItemChange.SOURCE_SYNC, nextQueries: syncTimeQueries });
|
||||
} else {
|
||||
await ItemClass.delete(local.id, { changeSource: ItemChange.SOURCE_SYNC });
|
||||
await ItemClass.delete(local.id, {
|
||||
changeSource: ItemChange.SOURCE_SYNC,
|
||||
trackDeleted: false,
|
||||
});
|
||||
}
|
||||
} else if (action == 'noteConflict') {
|
||||
// ------------------------------------------------------------------------------
|
||||
@ -797,7 +782,7 @@ export default class Synchronizer {
|
||||
if (local.encryption_applied) this.dispatch({ type: 'SYNC_GOT_ENCRYPTED_ITEM' });
|
||||
} else {
|
||||
// Remote no longer exists (note deleted) so delete local one too
|
||||
await ItemClass.delete(local.id, { changeSource: ItemChange.SOURCE_SYNC });
|
||||
await ItemClass.delete(local.id, { changeSource: ItemChange.SOURCE_SYNC, trackDeleted: false });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -16,6 +16,30 @@ export interface MultiPutItem {
|
||||
body: string;
|
||||
}
|
||||
|
||||
export interface RemoteItem {
|
||||
id: string;
|
||||
path?: string;
|
||||
type_?: number;
|
||||
isDeleted?: boolean;
|
||||
|
||||
// This the time when the file was created on the server. It is used for
|
||||
// example for the locking mechanim or any file that's not an actual Joplin
|
||||
// item.
|
||||
updated_time?: number;
|
||||
|
||||
// This is the time that corresponds to the actual Joplin item updated_time
|
||||
// value. A note is always uploaded with a delay so the server updated_time
|
||||
// value will always be ahead. However for synchronising we need to know the
|
||||
// exact Joplin item updated_time value.
|
||||
jop_updated_time?: number;
|
||||
}
|
||||
|
||||
export interface PaginatedList {
|
||||
items: RemoteItem[];
|
||||
hasMore: boolean;
|
||||
context: any;
|
||||
}
|
||||
|
||||
function requestCanBeRepeated(error: any) {
|
||||
const errorCode = typeof error === 'object' && error.code ? error.code : null;
|
||||
|
||||
@ -226,7 +250,7 @@ class FileApi {
|
||||
|
||||
// DRIVER MUST RETURN PATHS RELATIVE TO `path`
|
||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||
async list(path = '', options: any = null) {
|
||||
public async list(path = '', options: any = null): Promise<PaginatedList> {
|
||||
if (!options) options = {};
|
||||
if (!('includeHidden' in options)) options.includeHidden = false;
|
||||
if (!('context' in options)) options.context = null;
|
||||
@ -235,7 +259,7 @@ class FileApi {
|
||||
|
||||
logger.debug(`list ${this.baseDir()}`);
|
||||
|
||||
const result = await tryAndRepeat(() => this.driver_.list(this.fullPath(path), options), this.requestRepeatCount());
|
||||
const result: PaginatedList = await tryAndRepeat(() => this.driver_.list(this.fullPath(path), options), this.requestRepeatCount());
|
||||
|
||||
if (!options.includeHidden) {
|
||||
const temp = [];
|
||||
|
@ -236,7 +236,7 @@ export default class BaseItem extends BaseModel {
|
||||
return ItemClass.delete(id);
|
||||
}
|
||||
|
||||
static async delete(id: string, options: any = null) {
|
||||
static async delete(id: string, options: DeleteOptions = null) {
|
||||
return this.batchDelete([id], options);
|
||||
}
|
||||
|
||||
|
@ -394,4 +394,49 @@ describe('Synchronizer.basics', function() {
|
||||
expect((await Note.all()).length).toBe(11);
|
||||
}));
|
||||
|
||||
it('should not sync deletions that came via sync even when there is a conflict', (async () => {
|
||||
// This test is mainly to simulate sharing, unsharing and sharing a note
|
||||
// again. Previously, when doing so, the app would create deleted_items
|
||||
// objects on the recipient when the owner unshares. It means that when
|
||||
// sharing again, the recipient would apply the deletions and delete
|
||||
// everything in the shared notebook.
|
||||
//
|
||||
// Specifically it was happening when a conflict was generated as a
|
||||
// result of the items being deleted.
|
||||
//
|
||||
// - C1 creates a note and sync
|
||||
// - C2 sync and get the note
|
||||
// - C2 deletes the note and sync
|
||||
// - C1 modify the note, and sync
|
||||
//
|
||||
// => A conflict is created. The note is deleted and a copy is created
|
||||
// in the Conflict folder.
|
||||
//
|
||||
// After this, we recreate the note on the sync target (simulates the
|
||||
// note being shared again), and we check that C2 doesn't attempt to
|
||||
// delete that note.
|
||||
|
||||
const note = await Note.save({});
|
||||
await synchronizerStart();
|
||||
const noteSerialized = await fileApi().get(`${note.id}.md`);
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Note.delete(note.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await Note.save({ id: note.id });
|
||||
await synchronizerStart();
|
||||
expect((await Note.all())[0].is_conflict).toBe(1);
|
||||
await fileApi().put(`${note.id}.md`, noteSerialized); // Recreate the note - simulate sharing again.
|
||||
await synchronizerStart();
|
||||
|
||||
// Check that the client didn't delete the note
|
||||
const remotes = (await fileApi().list()).items;
|
||||
expect(remotes.find(r => r.path === `${note.id}.md`)).toBeTruthy();
|
||||
}));
|
||||
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user