2021-06-18 18:17:25 +02:00
import { ModelType } from '../../BaseModel' ;
import { FileApi , MultiPutItem } from '../../file-api' ;
2023-05-15 18:49:26 +02:00
import JoplinError from '../../JoplinError' ;
2023-07-27 17:05:56 +02:00
import Logger from '@joplin/utils/Logger' ;
2021-06-20 14:46:50 +02:00
import BaseItem from '../../models/BaseItem' ;
import { BaseItemEntity } from '../database/types' ;
2023-07-16 18:42:42 +02:00
import { ApiCallFunction } from './utils/types' ;
2021-06-18 18:17:25 +02:00
const logger = Logger . create ( 'ItemUploader' ) ;
interface BatchItem extends MultiPutItem {
localItemUpdatedTime : number ;
}
export default class ItemUploader {
private api_ : FileApi ;
private apiCall_ : ApiCallFunction ;
private preUploadedItems_ : Record < string , any > = { } ;
private preUploadedItemUpdatedTimes_ : Record < string , number > = { } ;
private maxBatchSize_ = 1 * 1024 * 1024 ; // 1MB;
public constructor ( api : FileApi , apiCall : ApiCallFunction ) {
this . api_ = api ;
this . apiCall_ = apiCall ;
}
public get maxBatchSize() {
return this . maxBatchSize_ ;
}
public set maxBatchSize ( v : number ) {
this . maxBatchSize_ = v ;
}
2021-06-20 14:46:50 +02:00
public async serializeAndUploadItem ( ItemClass : any , path : string , local : BaseItemEntity ) {
2021-06-18 18:17:25 +02:00
const preUploadItem = this . preUploadedItems_ [ path ] ;
if ( preUploadItem ) {
if ( this . preUploadedItemUpdatedTimes_ [ path ] !== local . updated_time ) {
// Normally this should be rare as it can only happen if the
// item has been changed between the moment it was pre-uploaded
// and the moment where it's being processed by the
// synchronizer. It could happen for example for a note being
// edited just at the same time. In that case, we proceed with
// the regular upload.
logger . warn ( ` Pre-uploaded item updated_time has changed. It is going to be re-uploaded again: ${ path } (From ${ this . preUploadedItemUpdatedTimes_ [ path ] } to ${ local . updated_time } ) ` ) ;
} else {
2023-05-15 18:49:26 +02:00
const error = preUploadItem . error ;
if ( error ) throw new JoplinError ( error . message ? error . message : 'Unknown pre-upload error' , error . code ) ;
2021-06-18 18:17:25 +02:00
return ;
}
}
const content = await ItemClass . serializeForSync ( local ) ;
await this . apiCall_ ( 'put' , path , content ) ;
}
2021-06-20 14:46:50 +02:00
public async preUploadItems ( items : BaseItemEntity [ ] ) {
2021-06-18 18:17:25 +02:00
if ( ! this . api_ . supportsMultiPut ) return ;
const itemsToUpload : BatchItem [ ] = [ ] ;
for ( const local of items ) {
// For resources, additional logic is necessary - in particular the blob
// should be uploaded before the metadata, so we can't batch process.
if ( local . type_ === ModelType . Resource ) continue ;
const ItemClass = BaseItem . itemClass ( local ) ;
itemsToUpload . push ( {
name : BaseItem.systemPath ( local ) ,
body : await ItemClass . serializeForSync ( local ) ,
localItemUpdatedTime : local.updated_time ,
} ) ;
}
let batchSize = 0 ;
let currentBatch : BatchItem [ ] = [ ] ;
const uploadBatch = async ( batch : BatchItem [ ] ) = > {
for ( const batchItem of batch ) {
this . preUploadedItemUpdatedTimes_ [ batchItem . name ] = batchItem . localItemUpdatedTime ;
}
const response = await this . apiCall_ ( 'multiPut' , batch ) ;
this . preUploadedItems_ = {
. . . this . preUploadedItems_ ,
. . . response . items ,
} ;
} ;
while ( itemsToUpload . length ) {
const itemToUpload = itemsToUpload . pop ( ) ;
const itemSize = itemToUpload . name . length + itemToUpload . body . length ;
// Although it should be rare, if the item itself is above the
// batch max size, we skip it. In that case it will be uploaded the
// regular way when the synchronizer calls `serializeAndUploadItem()`
if ( itemSize > this . maxBatchSize ) continue ;
if ( batchSize + itemSize > this . maxBatchSize ) {
await uploadBatch ( currentBatch ) ;
batchSize = itemSize ;
currentBatch = [ itemToUpload ] ;
} else {
batchSize += itemSize ;
currentBatch . push ( itemToUpload ) ;
}
}
if ( currentBatch . length ) await uploadBatch ( currentBatch ) ;
}
}