1
0
mirror of https://github.com/laurent22/joplin.git synced 2025-08-24 20:19:10 +02:00

Compare commits

...

4 Commits

Author SHA1 Message Date
Laurent Cozic
e81427a1f2 Merge branch 'dev' into sync_batch_upload 2021-06-18 11:50:41 +01:00
Laurent Cozic
3b9c02e92d Server: Add support for uploading multiple items in one request 2021-06-18 11:50:06 +01:00
Laurent Cozic
d73eab6f82 Fixed tests 2021-06-17 18:32:52 +01:00
Laurent Cozic
958e9163b6 All: Batch upload during initial sync 2021-06-17 12:45:34 +01:00
10 changed files with 302 additions and 93 deletions

View File

@@ -19,6 +19,7 @@ import EncryptionService from './services/EncryptionService';
import JoplinError from './JoplinError';
import ShareService from './services/share/ShareService';
import TaskQueue from './TaskQueue';
import { preUploadItems, serializeAndUploadItem } from './services/synchronizer/uploadUtils';
const { sprintf } = require('sprintf-js');
const { Dirnames } = require('./services/synchronizer/utils/types');
@@ -389,6 +390,12 @@ export default class Synchronizer {
// correctly so as to share/unshare the right items.
await Folder.updateAllShareIds();
const uploadQueue = new TaskQueue('syncUpload', this.logger());
const uploadItem = (path: string, content: any) => {
return this.apiCall('put', path, content);
};
let errorToThrow = null;
let syncLock = null;
@@ -440,6 +447,8 @@ export default class Synchronizer {
const result = await BaseItem.itemsThatNeedSync(syncTargetId);
const locals = result.items;
await preUploadItems(uploadItem, uploadQueue, result.items.filter((it: any) => result.neverSyncedItemIds.includes(it.id)));
for (let i = 0; i < locals.length; i++) {
if (this.cancelling()) break;
@@ -588,8 +597,7 @@ export default class Synchronizer {
let canSync = true;
try {
if (this.testingHooks_.indexOf('notesRejectedByTarget') >= 0 && local.type_ === BaseModel.TYPE_NOTE) throw new JoplinError('Testing rejectedByTarget', 'rejectedByTarget');
const content = await ItemClass.serializeForSync(local);
await this.apiCall('put', path, content);
await serializeAndUploadItem(uploadItem, uploadQueue, ItemClass, path, local);
} catch (error) {
if (error && error.code === 'rejectedByTarget') {
await handleCannotSyncItem(ItemClass, syncTargetId, local, error.message);

View File

@@ -0,0 +1,30 @@
import { ModelType } from '../../BaseModel';
import BaseItem, { ItemThatNeedSync } from '../../models/BaseItem';
import TaskQueue from '../../TaskQueue';
type UploadItem = (path: string, content: any)=> Promise<any>;
export async function serializeAndUploadItem(uploadItem: UploadItem, uploadQueue: TaskQueue, ItemClass: any, path: string, local: ItemThatNeedSync) {
if (uploadQueue && uploadQueue.taskExists(path)) {
return uploadQueue.taskResult(path);
}
const content = await ItemClass.serializeForSync(local);
return uploadItem(path, content);
}
export async function preUploadItems(uploadItem: UploadItem, uploadQueue: TaskQueue, items: ItemThatNeedSync[]) {
for (const local of items) {
// For resources, additional logic is necessary - in particular the blob
// should be uploaded before the metadata, so we can't batch process.
if (local.type_ === ModelType.Resource) continue;
const ItemClass = BaseItem.itemClass(local);
const path = BaseItem.systemPath(local);
uploadQueue.push(path, async () => {
await serializeAndUploadItem(uploadItem, null, ItemClass, path, local);
});
}
await uploadQueue.waitForAll();
}

View File

@@ -12,6 +12,18 @@ const mimeUtils = require('@joplin/lib/mime-utils.js').mime;
// Converts "root:/myfile.txt:" to "myfile.txt"
const extractNameRegex = /^root:\/(.*):$/;
export interface SaveFromRawContentItem {
name: string;
body: Buffer;
}
export interface SaveFromRawContentResultItem {
item: Item;
error: any;
}
export type SaveFromRawContentResult = Record<string, SaveFromRawContentResultItem>;
export interface PaginatedItems extends PaginatedResults {
items: Item[];
}
@@ -282,62 +294,122 @@ export default class ItemModel extends BaseModel<Item> {
return this.itemToJoplinItem(raw);
}
public async saveFromRawContent(user: User, name: string, buffer: Buffer, options: ItemSaveOption = null): Promise<Item> {
public async saveFromRawContent(user: User, rawContentItems: SaveFromRawContentItem[], options: ItemSaveOption = null): Promise<SaveFromRawContentResult> {
options = options || {};
const existingItem = await this.loadByName(user.id, name);
// In this function, first we process the input items, which may be
// serialized Joplin items or actual buffers (for resources) and convert
// them to database items. Once it's done those db items are saved in
// batch at the end.
const isJoplinItem = isJoplinItemName(name);
let isNote = false;
const item: Item = {
name,
};
let joplinItem: any = null;
let resourceIds: string[] = [];
if (isJoplinItem) {
joplinItem = await unserializeJoplinItem(buffer.toString());
isNote = joplinItem.type_ === ModelType.Note;
resourceIds = isNote ? linkedResourceIds(joplinItem.body) : [];
item.jop_id = joplinItem.id;
item.jop_parent_id = joplinItem.parent_id || '';
item.jop_type = joplinItem.type_;
item.jop_encryption_applied = joplinItem.encryption_applied || 0;
item.jop_share_id = joplinItem.share_id || '';
const joplinItemToSave = { ...joplinItem };
delete joplinItemToSave.id;
delete joplinItemToSave.parent_id;
delete joplinItemToSave.share_id;
delete joplinItemToSave.type_;
delete joplinItemToSave.encryption_applied;
item.content = Buffer.from(JSON.stringify(joplinItemToSave));
} else {
item.content = buffer;
interface ItemToProcess {
item: Item;
error: Error;
resourceIds?: string[];
isNote?: boolean;
joplinItem?: any;
}
if (existingItem) item.id = existingItem.id;
const existingItems = await this.loadByNames(user.id, rawContentItems.map(i => i.name));
const itemsToProcess: Record<string, ItemToProcess> = {};
if (options.shareId) item.jop_share_id = options.shareId;
for (const rawItem of rawContentItems) {
try {
const isJoplinItem = isJoplinItemName(rawItem.name);
let isNote = false;
await this.models().user().checkMaxItemSizeLimit(user, buffer, item, joplinItem);
const item: Item = {
name: rawItem.name,
};
return this.withTransaction<Item>(async () => {
const savedItem = await this.saveForUser(user.id, item);
let joplinItem: any = null;
if (isNote) {
await this.models().itemResource().deleteByItemId(savedItem.id);
await this.models().itemResource().addResourceIds(savedItem.id, resourceIds);
let resourceIds: string[] = [];
if (isJoplinItem) {
joplinItem = await unserializeJoplinItem(rawItem.body.toString());
isNote = joplinItem.type_ === ModelType.Note;
resourceIds = isNote ? linkedResourceIds(joplinItem.body) : [];
item.jop_id = joplinItem.id;
item.jop_parent_id = joplinItem.parent_id || '';
item.jop_type = joplinItem.type_;
item.jop_encryption_applied = joplinItem.encryption_applied || 0;
item.jop_share_id = joplinItem.share_id || '';
const joplinItemToSave = { ...joplinItem };
delete joplinItemToSave.id;
delete joplinItemToSave.parent_id;
delete joplinItemToSave.share_id;
delete joplinItemToSave.type_;
delete joplinItemToSave.encryption_applied;
item.content = Buffer.from(JSON.stringify(joplinItemToSave));
} else {
item.content = rawItem.body;
}
const existingItem = existingItems.find(i => i.name === rawItem.name);
if (existingItem) item.id = existingItem.id;
if (options.shareId) item.jop_share_id = options.shareId;
await this.models().user().checkMaxItemSizeLimit(user, rawItem.body, item, joplinItem);
itemsToProcess[rawItem.name] = {
item: item,
error: null,
resourceIds,
isNote,
joplinItem,
};
} catch (error) {
itemsToProcess[rawItem.name] = {
item: null,
error: error,
};
}
}
return savedItem;
const output: SaveFromRawContentResult = {};
await this.withTransaction(async () => {
for (const name of Object.keys(itemsToProcess)) {
const o = itemsToProcess[name];
if (o.error) {
output[name] = {
item: null,
error: o.error,
};
continue;
}
const itemToSave = o.item;
try {
const savedItem = await this.saveForUser(user.id, itemToSave);
if (o.isNote) {
await this.models().itemResource().deleteByItemId(savedItem.id);
await this.models().itemResource().addResourceIds(savedItem.id, o.resourceIds);
}
output[name] = {
item: savedItem,
error: null,
};
} catch (error) {
output[name] = {
item: null,
error: error,
};
}
}
});
return output;
}
protected async validate(item: Item, options: ValidateOptions = {}): Promise<Item> {

View File

@@ -0,0 +1,19 @@
import { SubPath } from '../../utils/routeUtils';
import Router from '../../utils/Router';
import { RouteType } from '../../utils/types';
import { AppContext } from '../../utils/types';
import { putItemContents } from './items';
import { PaginatedResults } from '../../models/utils/pagination';
const router = new Router(RouteType.Api);
router.put('api/batch_items', async (path: SubPath, ctx: AppContext) => {
const output: PaginatedResults = {
items: await putItemContents(path, ctx, true) as any,
has_more: false,
};
return output;
});
export default router;

View File

@@ -3,10 +3,11 @@ import { NoteEntity } from '@joplin/lib/services/database/types';
import { ModelType } from '@joplin/lib/BaseModel';
import { deleteApi, getApi, putApi } from '../../utils/testing/apiUtils';
import { Item } from '../../db';
import { PaginatedItems } from '../../models/ItemModel';
import { PaginatedItems, SaveFromRawContentResult } from '../../models/ItemModel';
import { shareFolderWithUser } from '../../utils/testing/shareApiUtils';
import { resourceBlobPath } from '../../utils/joplinUtils';
import { ErrorForbidden, ErrorPayloadTooLarge } from '../../utils/errors';
import { PaginatedResults } from '../../models/utils/pagination';
describe('api_items', function() {
@@ -149,6 +150,56 @@ describe('api_items', function() {
expect(result.name).toBe(`${noteId}.md`);
});
test('should batch upload items', async function() {
const { session: session1 } = await createUserAndSession(1, false);
const result: PaginatedResults = await putApi(session1.id, 'batch_items', {
items: [
{
name: '00000000000000000000000000000001.md',
body: makeNoteSerializedBody({ id: '00000000000000000000000000000001' }),
},
{
name: '00000000000000000000000000000002.md',
body: makeNoteSerializedBody({ id: '00000000000000000000000000000002' }),
},
],
});
expect(Object.keys(result.items).length).toBe(2);
expect(Object.keys(result.items).sort()).toEqual(['00000000000000000000000000000001.md', '00000000000000000000000000000002.md']);
});
test('should report errors when batch uploading', async function() {
const { user: user1,session: session1 } = await createUserAndSession(1, false);
const note1 = makeNoteSerializedBody({ id: '00000000000000000000000000000001' });
await models().user().save({ id: user1.id, max_item_size: note1.length });
const result: PaginatedResults = await putApi(session1.id, 'batch_items', {
items: [
{
name: '00000000000000000000000000000001.md',
body: note1,
},
{
name: '00000000000000000000000000000002.md',
body: makeNoteSerializedBody({ id: '00000000000000000000000000000002', body: 'too large' }),
},
],
});
const items: SaveFromRawContentResult = result.items as any;
expect(Object.keys(items).length).toBe(2);
expect(Object.keys(items).sort()).toEqual(['00000000000000000000000000000001.md', '00000000000000000000000000000002.md']);
expect(items['00000000000000000000000000000001.md'].item).toBeTruthy();
expect(items['00000000000000000000000000000001.md'].error).toBeFalsy();
expect(items['00000000000000000000000000000002.md'].item).toBeFalsy();
expect(items['00000000000000000000000000000002.md'].error.httpCode).toBe(ErrorPayloadTooLarge.httpCode);
});
test('should list children', async function() {
const { session } = await createUserAndSession(1, true);

View File

@@ -6,13 +6,63 @@ import { RouteType } from '../../utils/types';
import { AppContext } from '../../utils/types';
import * as fs from 'fs-extra';
import { ErrorForbidden, ErrorMethodNotAllowed, ErrorNotFound } from '../../utils/errors';
import ItemModel, { ItemSaveOption } from '../../models/ItemModel';
import ItemModel, { ItemSaveOption, SaveFromRawContentItem } from '../../models/ItemModel';
import { requestDeltaPagination, requestPagination } from '../../models/utils/pagination';
import { AclAction } from '../../models/BaseModel';
import { safeRemove } from '../../utils/fileUtils';
const router = new Router(RouteType.Api);
export async function putItemContents(path: SubPath, ctx: AppContext, isBatch: boolean) {
if (!ctx.owner.can_upload) throw new ErrorForbidden('Uploading content is disabled');
const parsedBody = await formParse(ctx.req);
const bodyFields = parsedBody.fields;
const saveOptions: ItemSaveOption = {};
let items: SaveFromRawContentItem[] = [];
if (isBatch) {
items = bodyFields.items.map((item: any) => {
return {
name: item.name,
body: item.body ? Buffer.from(item.body, 'utf8') : Buffer.alloc(0),
};
});
} else {
const filePath = parsedBody?.files?.file ? parsedBody.files.file.path : null;
try {
const buffer = filePath ? await fs.readFile(filePath) : Buffer.alloc(0);
// This end point can optionally set the associated jop_share_id field. It
// is only useful when uploading resource blob (under .resource folder)
// since they can't have metadata. Note, Folder and Resource items all
// include the "share_id" field property so it doesn't need to be set via
// query parameter.
if (ctx.query['share_id']) {
saveOptions.shareId = ctx.query['share_id'];
await ctx.models.item().checkIfAllowed(ctx.owner, AclAction.Create, { jop_share_id: saveOptions.shareId });
}
items = [
{
name: ctx.models.item().pathToName(path.id),
body: buffer,
},
];
} finally {
if (filePath) await safeRemove(filePath);
}
}
const output = await ctx.models.item().saveFromRawContent(ctx.owner, items, saveOptions);
for (const [name] of Object.entries(output)) {
if (output[name].item) output[name].item = ctx.models.item().toApiOutput(output[name].item) as Item;
}
return output;
}
// Note about access control:
//
// - All these calls are scoped to a user, which is derived from the session
@@ -66,36 +116,10 @@ router.get('api/items/:id/content', async (path: SubPath, ctx: AppContext) => {
});
router.put('api/items/:id/content', async (path: SubPath, ctx: AppContext) => {
if (!ctx.owner.can_upload) throw new ErrorForbidden('Uploading content is disabled');
const itemModel = ctx.models.item();
const name = itemModel.pathToName(path.id);
const parsedBody = await formParse(ctx.req);
const filePath = parsedBody?.files?.file ? parsedBody.files.file.path : null;
let outputItem: Item = null;
try {
const buffer = filePath ? await fs.readFile(filePath) : Buffer.alloc(0);
const saveOptions: ItemSaveOption = {};
// This end point can optionally set the associated jop_share_id field. It
// is only useful when uploading resource blob (under .resource folder)
// since they can't have metadata. Note, Folder and Resource items all
// include the "share_id" field property so it doesn't need to be set via
// query parameter.
if (ctx.query['share_id']) {
saveOptions.shareId = ctx.query['share_id'];
await itemModel.checkIfAllowed(ctx.owner, AclAction.Create, { jop_share_id: saveOptions.shareId });
}
const item = await itemModel.saveFromRawContent(ctx.owner, name, buffer, saveOptions);
outputItem = itemModel.toApiOutput(item) as Item;
} finally {
if (filePath) await safeRemove(filePath);
}
return outputItem;
const results = await putItemContents(path, ctx, false);
const result = results[Object.keys(results)[0]];
if (result.error) throw result.error;
return result.item;
});
router.get('api/items/:id/delta', async (_path: SubPath, ctx: AppContext) => {

View File

@@ -3,6 +3,7 @@ import { Routers } from '../utils/routeUtils';
import apiBatch from './api/batch';
import apiDebug from './api/debug';
import apiEvents from './api/events';
import apiBatchItems from './api/batch_items';
import apiItems from './api/items';
import apiPing from './api/ping';
import apiSessions from './api/sessions';
@@ -27,6 +28,7 @@ import defaultRoute from './default';
const routes: Routers = {
'api/batch': apiBatch,
'api/batch_items': apiBatchItems,
'api/debug': apiDebug,
'api/events': apiEvents,
'api/items': apiItems,

View File

@@ -119,7 +119,7 @@ export default class MustacheService {
globalParams = {
...this.defaultLayoutOptions,
...globalParams,
userDisplayName: this.userDisplayName(globalParams.owner),
userDisplayName: this.userDisplayName(globalParams ? globalParams.owner : null),
};
const contentHtml = Mustache.render(

View File

@@ -60,7 +60,8 @@ async function createItemTree3(sessionId: Uuid, userId: Uuid, parentFolderId: st
}
}
const newItem = await models().item().saveFromRawContent(user, `${jopItem.id}.md`, Buffer.from(serializedBody));
const result = await models().item().saveFromRawContent(user, [{ name: `${jopItem.id}.md`, body: Buffer.from(serializedBody) }]);
const newItem = result[`${jopItem.id}.md`].item;
if (isFolder && jopItem.children.length) await createItemTree3(sessionId, userId, newItem.jop_id, shareId, jopItem.children);
}
}

View File

@@ -275,19 +275,20 @@ export async function createItemTree(userId: Uuid, parentFolderId: string, tree:
}
}
export async function createItemTree2(userId: Uuid, parentFolderId: string, tree: any[]): Promise<void> {
const itemModel = models().item();
const user = await models().user().load(userId);
// export async function createItemTree2(userId: Uuid, parentFolderId: string, tree: any[]): Promise<void> {
// const itemModel = models().item();
// const user = await models().user().load(userId);
for (const jopItem of tree) {
const isFolder = !!jopItem.children;
const serializedBody = isFolder ?
makeFolderSerializedBody({ ...jopItem, parent_id: parentFolderId }) :
makeNoteSerializedBody({ ...jopItem, parent_id: parentFolderId });
const newItem = await itemModel.saveFromRawContent(user, `${jopItem.id}.md`, Buffer.from(serializedBody));
if (isFolder && jopItem.children.length) await createItemTree2(userId, newItem.jop_id, jopItem.children);
}
}
// for (const jopItem of tree) {
// const isFolder = !!jopItem.children;
// const serializedBody = isFolder ?
// makeFolderSerializedBody({ ...jopItem, parent_id: parentFolderId }) :
// makeNoteSerializedBody({ ...jopItem, parent_id: parentFolderId });
// const result = await itemModel.saveFromRawContent(user, [{ name: `${jopItem.id}.md`, body: Buffer.from(serializedBody) }]);
// const newItem = result[`${jopItem.id}.md`].item;
// if (isFolder && jopItem.children.length) await createItemTree2(userId, newItem.jop_id, jopItem.children);
// }
// }
export async function createItemTree3(userId: Uuid, parentFolderId: string, shareId: Uuid, tree: any[]): Promise<void> {
const itemModel = models().item();
@@ -298,7 +299,8 @@ export async function createItemTree3(userId: Uuid, parentFolderId: string, shar
const serializedBody = isFolder ?
makeFolderSerializedBody({ ...jopItem, parent_id: parentFolderId, share_id: shareId }) :
makeNoteSerializedBody({ ...jopItem, parent_id: parentFolderId, share_id: shareId });
const newItem = await itemModel.saveFromRawContent(user, `${jopItem.id}.md`, Buffer.from(serializedBody));
const result = await itemModel.saveFromRawContent(user, [{ name: `${jopItem.id}.md`, body: Buffer.from(serializedBody) }]);
const newItem = result[`${jopItem.id}.md`].item;
if (isFolder && jopItem.children.length) await createItemTree3(userId, newItem.jop_id, shareId, jopItem.children);
}
}