2017-11-03 02:09:34 +02:00
const { time } = require ( 'lib/time-utils' ) ;
2017-12-14 20:12:14 +02:00
const BaseItem = require ( 'lib/models/BaseItem.js' ) ;
2020-10-09 19:35:46 +02:00
const Alarm = require ( 'lib/models/Alarm' ) . default ;
2017-12-14 20:12:14 +02:00
const Folder = require ( 'lib/models/Folder.js' ) ;
const Note = require ( 'lib/models/Note.js' ) ;
2019-06-08 00:11:08 +02:00
const BaseModel = require ( 'lib/BaseModel.js' ) ;
const DecryptionWorker = require ( 'lib/services/DecryptionWorker' ) ;
2019-12-28 21:23:38 +02:00
const ResourceFetcher = require ( 'lib/services/ResourceFetcher' ) ;
const Resource = require ( 'lib/models/Resource' ) ;
2020-10-09 19:35:46 +02:00
const { _ } = require ( 'lib/locale' ) ;
2019-06-08 00:11:08 +02:00
const { toTitleCase } = require ( 'lib/string-utils.js' ) ;
2017-07-13 20:09:47 +02:00
class ReportService {
2017-11-21 20:48:50 +02:00
csvEscapeCell ( cell ) {
cell = this . csvValueToString ( cell ) ;
2020-03-14 01:46:14 +02:00
const output = cell . replace ( /"/ , '""' ) ;
2017-11-21 20:48:50 +02:00
if ( this . csvCellRequiresQuotes ( cell , ',' ) ) {
2019-09-19 23:51:18 +02:00
return ` " ${ output } " ` ;
2017-11-21 20:48:50 +02:00
}
return output ;
}
csvCellRequiresQuotes ( cell , delimiter ) {
if ( cell . indexOf ( '\n' ) >= 0 ) return true ;
if ( cell . indexOf ( '"' ) >= 0 ) return true ;
if ( cell . indexOf ( delimiter ) >= 0 ) return true ;
return false ;
}
csvValueToString ( v ) {
if ( v === undefined || v === null ) return '' ;
return v . toString ( ) ;
}
csvCreateLine ( row ) {
for ( let i = 0 ; i < row . length ; i ++ ) {
row [ i ] = this . csvEscapeCell ( row [ i ] ) ;
}
return row . join ( ',' ) ;
}
csvCreate ( rows ) {
2020-03-14 01:46:14 +02:00
const output = [ ] ;
2017-11-21 20:48:50 +02:00
for ( let i = 0 ; i < rows . length ; i ++ ) {
output . push ( this . csvCreateLine ( rows [ i ] ) ) ;
}
return output . join ( '\n' ) ;
}
async basicItemList ( option = null ) {
if ( ! option ) option = { } ;
if ( ! option . format ) option . format = 'array' ;
const itemTypes = BaseItem . syncItemTypes ( ) ;
2020-03-14 01:46:14 +02:00
const output = [ ] ;
2017-11-21 20:48:50 +02:00
output . push ( [ 'type' , 'id' , 'updated_time' , 'sync_time' , 'is_conflict' ] ) ;
for ( let i = 0 ; i < itemTypes . length ; i ++ ) {
const itemType = itemTypes [ i ] ;
const ItemClass = BaseItem . getClassByItemType ( itemType ) ;
2019-09-19 23:51:18 +02:00
const items = await ItemClass . modelSelectAll ( ` SELECT items.id, items.updated_time, sync_items.sync_time FROM ${ ItemClass . tableName ( ) } items JOIN sync_items ON sync_items.item_id = items.id ` ) ;
2017-11-21 20:48:50 +02:00
for ( let j = 0 ; j < items . length ; j ++ ) {
const item = items [ j ] ;
2020-03-14 01:46:14 +02:00
const row = [ itemType , item . id , item . updated _time , item . sync _time ] ;
2019-07-29 15:43:53 +02:00
row . push ( 'is_conflict' in item ? item . is _conflict : '' ) ;
2017-11-21 20:48:50 +02:00
output . push ( row ) ;
}
}
return option . format === 'csv' ? this . csvCreate ( output ) : output ;
}
2017-07-16 18:06:05 +02:00
async syncStatus ( syncTarget ) {
2020-03-14 01:46:14 +02:00
const output = {
2017-07-13 20:09:47 +02:00
items : { } ,
total : { } ,
} ;
let itemCount = 0 ;
let syncedCount = 0 ;
for ( let i = 0 ; i < BaseItem . syncItemDefinitions _ . length ; i ++ ) {
2020-03-14 01:46:14 +02:00
const d = BaseItem . syncItemDefinitions _ [ i ] ;
const ItemClass = BaseItem . getClass ( d . className ) ;
const o = {
2017-07-13 20:09:47 +02:00
total : await ItemClass . count ( ) ,
2017-07-16 18:06:05 +02:00
synced : await ItemClass . syncedCount ( syncTarget ) ,
2017-07-13 20:09:47 +02:00
} ;
output . items [ d . className ] = o ;
itemCount += o . total ;
syncedCount += o . synced ;
}
2020-03-14 01:46:14 +02:00
const conflictedCount = await Note . conflictedCount ( ) ;
2017-07-16 00:47:11 +02:00
2017-07-13 20:09:47 +02:00
output . total = {
2017-07-16 00:47:11 +02:00
total : itemCount - conflictedCount ,
2017-07-13 20:09:47 +02:00
synced : syncedCount ,
} ;
output . toDelete = {
2017-07-19 21:15:55 +02:00
total : await BaseItem . deletedItemCount ( syncTarget ) ,
2017-07-13 20:09:47 +02:00
} ;
2017-07-15 17:35:40 +02:00
output . conflicted = {
total : await Note . conflictedCount ( ) ,
} ;
output . items [ 'Note' ] . total -= output . conflicted . total ;
2017-07-13 20:09:47 +02:00
return output ;
}
2017-07-16 18:06:05 +02:00
async status ( syncTarget ) {
2020-03-14 01:46:14 +02:00
const r = await this . syncStatus ( syncTarget ) ;
const sections = [ ] ;
2017-12-05 21:21:01 +02:00
let section = null ;
2017-07-13 20:09:47 +02:00
2017-12-05 21:21:01 +02:00
const disabledItems = await BaseItem . syncDisabledItems ( syncTarget ) ;
if ( disabledItems . length ) {
section = { title : _ ( 'Items that cannot be synchronised' ) , body : [ ] } ;
2019-06-08 00:11:08 +02:00
section . body . push ( _ ( 'These items will remain on the device but will not be uploaded to the sync target. In order to find these items, either search for the title or the ID (which is displayed in brackets above).' ) ) ;
section . body . push ( '' ) ;
2017-12-05 21:21:01 +02:00
for ( let i = 0 ; i < disabledItems . length ; i ++ ) {
const row = disabledItems [ i ] ;
2019-05-12 02:15:52 +02:00
if ( row . location === BaseItem . SYNC _ITEM _LOCATION _LOCAL ) {
section . body . push ( _ ( '%s (%s) could not be uploaded: %s' , row . item . title , row . item . id , row . syncInfo . sync _disabled _reason ) ) ;
} else {
section . body . push ( _ ( 'Item "%s" could not be downloaded: %s' , row . syncInfo . item _id , row . syncInfo . sync _disabled _reason ) ) ;
}
2017-12-05 21:21:01 +02:00
}
2017-01-29 20:29:34 +02:00
2019-06-08 00:11:08 +02:00
sections . push ( section ) ;
}
const decryptionDisabledItems = await DecryptionWorker . instance ( ) . decryptionDisabledItems ( ) ;
if ( decryptionDisabledItems . length ) {
2020-04-04 19:30:13 +02:00
section = { title : _ ( 'Items that cannot be decrypted' ) , body : [ ] , name : 'failedDecryption' , canRetryAll : false , retryAllHandler : null } ;
2019-06-08 00:11:08 +02:00
section . body . push ( _ ( 'Joplin failed to decrypt these items multiple times, possibly because they are corrupted or too large. These items will remain on the device but Joplin will no longer attempt to decrypt them.' ) ) ;
2017-01-29 20:29:34 +02:00
section . body . push ( '' ) ;
2019-07-29 15:43:53 +02:00
2019-06-08 00:11:08 +02:00
for ( let i = 0 ; i < decryptionDisabledItems . length ; i ++ ) {
const row = decryptionDisabledItems [ i ] ;
2019-07-29 15:43:53 +02:00
section . body . push ( {
text : _ ( '%s: %s' , toTitleCase ( BaseModel . modelTypeToName ( row . type _ ) ) , row . id ) ,
canRetry : true ,
2020-04-08 19:02:31 +02:00
canRetryType : 'e2ee' ,
2019-07-29 15:43:53 +02:00
retryHandler : async ( ) => {
await DecryptionWorker . instance ( ) . clearDisabledItem ( row . type _ , row . id ) ;
DecryptionWorker . instance ( ) . scheduleStart ( ) ;
} ,
} ) ;
2019-06-08 00:11:08 +02:00
}
2017-01-29 20:29:34 +02:00
2020-04-04 19:30:13 +02:00
const retryHandlers = [ ] ;
for ( let i = 0 ; i < section . body . length ; i ++ ) {
if ( section . body [ i ] . canRetry ) {
retryHandlers . push ( section . body [ i ] . retryHandler ) ;
}
}
if ( retryHandlers . length > 1 ) {
section . canRetryAll = true ;
section . retryAllHandler = async ( ) => {
for ( const retryHandler of retryHandlers ) {
await retryHandler ( ) ;
}
} ;
}
2017-12-05 21:21:01 +02:00
sections . push ( section ) ;
}
2019-12-28 21:23:38 +02:00
{
section = { title : _ ( 'Attachments' ) , body : [ ] , name : 'resources' } ;
const statuses = [ Resource . FETCH _STATUS _IDLE , Resource . FETCH _STATUS _STARTED , Resource . FETCH _STATUS _DONE , Resource . FETCH _STATUS _ERROR ] ;
for ( const status of statuses ) {
if ( status === Resource . FETCH _STATUS _DONE ) {
const downloadedButEncryptedBlobCount = await Resource . downloadedButEncryptedBlobCount ( ) ;
const downloadedCount = await Resource . downloadStatusCounts ( Resource . FETCH _STATUS _DONE ) ;
section . body . push ( _ ( '%s: %d' , _ ( 'Downloaded and decrypted' ) , downloadedCount - downloadedButEncryptedBlobCount ) ) ;
section . body . push ( _ ( '%s: %d' , _ ( 'Downloaded and encrypted' ) , downloadedButEncryptedBlobCount ) ) ;
} else {
const count = await Resource . downloadStatusCounts ( status ) ;
section . body . push ( _ ( '%s: %d' , Resource . fetchStatusToLabel ( status ) , count ) ) ;
}
}
sections . push ( section ) ;
}
const resourceErrorFetchStatuses = await Resource . errorFetchStatuses ( ) ;
if ( resourceErrorFetchStatuses . length ) {
section = { title : _ ( 'Attachments that could not be downloaded' ) , body : [ ] , name : 'failedResourceDownload' } ;
for ( let i = 0 ; i < resourceErrorFetchStatuses . length ; i ++ ) {
const row = resourceErrorFetchStatuses [ i ] ;
section . body . push ( {
text : _ ( '%s (%s): %s' , row . resource _title , row . resource _id , row . fetch _error ) ,
canRetry : true ,
2020-04-08 19:02:31 +02:00
canRetryType : 'resourceDownload' ,
2019-12-28 21:23:38 +02:00
retryHandler : async ( ) => {
await Resource . resetErrorStatus ( row . resource _id ) ;
ResourceFetcher . instance ( ) . autoAddResources ( ) ;
} ,
} ) ;
}
sections . push ( section ) ;
}
2017-12-05 21:21:01 +02:00
section = { title : _ ( 'Sync status (synced items / total items)' ) , body : [ ] } ;
2017-07-13 20:09:47 +02:00
2020-03-14 01:46:14 +02:00
for ( const n in r . items ) {
2017-07-13 20:09:47 +02:00
if ( ! r . items . hasOwnProperty ( n ) ) continue ;
section . body . push ( _ ( '%s: %d/%d' , n , r . items [ n ] . synced , r . items [ n ] . total ) ) ;
}
2017-07-15 17:35:40 +02:00
section . body . push ( _ ( 'Total: %d/%d' , r . total . synced , r . total . total ) ) ;
section . body . push ( '' ) ;
section . body . push ( _ ( 'Conflicted: %d' , r . conflicted . total ) ) ;
section . body . push ( _ ( 'To delete: %d' , r . toDelete . total ) ) ;
2017-07-13 20:09:47 +02:00
sections . push ( section ) ;
2017-11-28 22:31:14 +02:00
section = { title : _ ( 'Folders' ) , body : [ ] } ;
2017-07-13 20:09:47 +02:00
2017-11-28 22:31:14 +02:00
const folders = await Folder . all ( {
2017-07-26 20:36:16 +02:00
order : { by : 'title' , dir : 'ASC' } ,
2017-07-14 20:02:45 +02:00
caseInsensitive : true ,
} ) ;
2017-07-13 20:09:47 +02:00
for ( let i = 0 ; i < folders . length ; i ++ ) {
section . body . push ( _ ( '%s: %d notes' , folders [ i ] . title , await Folder . noteCount ( folders [ i ] . id ) ) ) ;
}
sections . push ( section ) ;
2017-11-28 22:31:14 +02:00
const alarms = await Alarm . allDue ( ) ;
2017-12-05 21:21:01 +02:00
if ( alarms . length ) {
section = { title : _ ( 'Coming alarms' ) , body : [ ] } ;
for ( let i = 0 ; i < alarms . length ; i ++ ) {
const alarm = alarms [ i ] ;
const note = await Note . load ( alarm . note _id ) ;
section . body . push ( _ ( 'On %s: %s' , time . formatMsToLocal ( alarm . trigger _time ) , note . title ) ) ;
}
sections . push ( section ) ;
}
2017-11-28 22:31:14 +02:00
2017-07-13 20:09:47 +02:00
return sections ;
}
}
2019-07-29 15:43:53 +02:00
module . exports = { ReportService } ;