1
0
mirror of https://github.com/laurent22/joplin.git synced 2026-04-18 19:42:23 +02:00

Compare commits

..

1 Commits

Author SHA1 Message Date
Laurent Cozic 4a5a125360 update 2026-04-17 08:26:32 +01:00
2 changed files with 45 additions and 34 deletions
+1 -1
View File
@@ -320,7 +320,7 @@ export async function createResourcesFromPaths(mediaFiles: DownloadedMediaFile[]
const resource = await shim.createResourceFromPath(mediaFile.path);
return { ...mediaFile, resource };
} catch (error) {
logger.info(`Cannot create resource for ${mediaFile.originalUrl}`, error);
logger.warn(`Cannot create resource for ${mediaFile.originalUrl}`, error);
return { ...mediaFile, resource: null };
}
};
+44 -33
View File
@@ -723,50 +723,55 @@ export default class ItemModel extends BaseModel<Item> {
const itemToSave = { ...o.item };
const content = itemToSave.content;
delete itemToSave.content;
itemToSave.content_storage_id = (await this.storageDriver()).storageId;
itemToSave.content_size = content ? content.byteLength : 0;
// Here we save the item row and content, and we want to
// make sure that either both are saved or none of them.
// The savepoint wraps the entire operation so that any
// error (including unique constraint violations) is
// rolled back cleanly without aborting the outer
// transaction.
// TODO: When an item is uploaded multiple times
// simultaneously there could be a race condition, where the
// content would not match the db row (for example, the
// content_size would differ).
//
// Possible solutions:
//
// - Row-level lock on items.id, and release once the
// content is saved.
// - Or external lock - eg. Redis.
const savePoint = await this.setSavePoint();
try {
const content = itemToSave.content;
delete itemToSave.content;
itemToSave.content_storage_id = (await this.storageDriver()).storageId;
itemToSave.content_size = content ? content.byteLength : 0;
// Here we save the item row and content, and we want to
// make sure that either both are saved or none of them.
// This is done by setting up a save point before saving the
// row, and rollbacking if the content cannot be saved.
//
// Normally, since we are in a transaction, throwing an
// error should work, but since we catch all errors within
// this block it doesn't work.
// TODO: When an item is uploaded multiple times
// simultaneously there could be a race condition, where the
// content would not match the db row (for example, the
// content_size would differ).
//
// Possible solutions:
//
// - Row-level lock on items.id, and release once the
// content is saved.
// - Or external lock - eg. Redis.
const savePoint = await this.setSavePoint();
const savedItem = await this.saveForUser(user.id, itemToSave);
await this.storageDriverWrite(savedItem.id, content, { models: this.models() });
try {
await this.storageDriverWrite(savedItem.id, content, { models: this.models() });
await this.releaseSavePoint(savePoint);
} catch (error) {
await this.rollbackSavePoint(savePoint);
throw error;
}
if (o.isNote) {
await this.models().itemResource().deleteByItemId(savedItem.id);
await this.models().itemResource().addResourceIds(savedItem.id, o.resourceIds);
}
await this.releaseSavePoint(savePoint);
output[name] = {
item: savedItem,
error: null,
};
} catch (error) {
await this.rollbackSavePoint(savePoint);
output[name] = {
item: null,
error: error,
@@ -1021,10 +1026,16 @@ export default class ItemModel extends BaseModel<Item> {
// but it would be nice to get to the bottom of this bug.
public processOrphanedItems = async () => {
await this.withTransaction(async () => {
// Find items that have no corresponding entry in user_items.
// NOT EXISTS is used instead of LEFT JOIN for performance as it
// allows Postgres to short-circuit on the first match per item.
const orphanedItems: Item[] = await this.db(this.tableName)
.select(['items.id', 'items.owner_id'])
.leftJoin('user_items', 'user_items.item_id', 'items.id')
.whereNull('user_items.user_id');
.whereNotExists(
this.db('user_items')
.select(this.db.raw('1'))
.whereRaw('user_items.item_id = items.id'),
);
const userIds: string[] = orphanedItems.map(i => i.owner_id);
const users = await this.models().user().loadByIds(userIds, { fields: ['id'] });