2021-01-22 19:41:11 +02:00
import ItemChange from '../models/ItemChange' ;
import Note from '../models/Note' ;
import Folder from '../models/Folder' ;
import Setting from '../models/Setting' ;
import Revision from '../models/Revision' ;
import BaseModel from '../BaseModel' ;
import ItemChangeUtils from './ItemChangeUtils' ;
import shim from '../shim' ;
import BaseService from './BaseService' ;
import { _ } from '../locale' ;
import { ItemChangeEntity , NoteEntity , RevisionEntity } from './database/types' ;
2023-07-27 17:05:56 +02:00
import Logger from '@joplin/utils/Logger' ;
2022-03-12 18:23:30 +02:00
import { MarkupLanguage } from '../../renderer' ;
2021-06-10 11:49:20 +02:00
const { substrWithEllipsis } = require ( '../string-utils' ) ;
2019-05-14 23:23:34 +02:00
const { sprintf } = require ( 'sprintf-js' ) ;
2020-11-05 18:58:23 +02:00
const { wrapError } = require ( '../errorUtils' ) ;
2019-05-06 22:35:29 +02:00
2021-06-20 12:19:59 +02:00
const logger = Logger . create ( 'RevisionService' ) ;
2021-01-22 19:41:11 +02:00
export default class RevisionService extends BaseService {
2019-05-06 22:35:29 +02:00
2021-01-22 19:41:11 +02:00
public static instance_ : RevisionService ;
2020-03-16 04:30:54 +02:00
2021-01-22 19:41:11 +02:00
// An "old note" is one that has been created before the revision service existed. These
// notes never benefited from revisions so the first time they are modified, a copy of
// the original note is saved. The goal is to have at least one revision in case the note
// is deleted or modified as a result of a bug or user mistake.
private isOldNotesCache_ : any = { } ;
private maintenanceCalls_ : any [ ] = [ ] ;
private maintenanceTimer1_ : any = null ;
private maintenanceTimer2_ : any = null ;
private isCollecting_ = false ;
public isRunningInBackground_ = false ;
2019-05-06 22:35:29 +02:00
2023-03-06 16:22:01 +02:00
public static instance() {
2019-05-06 22:35:29 +02:00
if ( this . instance_ ) return this . instance_ ;
this . instance_ = new RevisionService ( ) ;
return this . instance_ ;
}
2023-03-06 16:22:01 +02:00
public oldNoteCutOffDate_() {
2019-05-08 01:51:56 +02:00
return Date . now ( ) - Setting . value ( 'revisionService.oldNoteInterval' ) ;
}
2023-03-06 16:22:01 +02:00
public async isOldNote ( noteId : string ) {
2019-05-06 22:35:29 +02:00
if ( noteId in this . isOldNotesCache_ ) return this . isOldNotesCache_ [ noteId ] ;
2019-05-08 01:51:56 +02:00
const isOld = await Note . noteIsOlderThan ( noteId , this . oldNoteCutOffDate_ ( ) ) ;
this . isOldNotesCache_ [ noteId ] = isOld ;
return isOld ;
2019-05-06 22:35:29 +02:00
}
2023-03-06 16:22:01 +02:00
private noteMetadata_ ( note : NoteEntity ) {
2019-05-06 22:35:29 +02:00
const excludedFields = [ 'type_' , 'title' , 'body' , 'created_time' , 'updated_time' , 'encryption_applied' , 'encryption_cipher_text' , 'is_conflict' ] ;
2021-01-22 19:41:11 +02:00
const md : any = { } ;
2020-03-14 01:46:14 +02:00
for ( const k in note ) {
2019-05-06 22:35:29 +02:00
if ( excludedFields . indexOf ( k ) >= 0 ) continue ;
2021-01-22 19:41:11 +02:00
md [ k ] = ( note as any ) [ k ] ;
2019-05-06 22:35:29 +02:00
}
2019-05-07 21:46:58 +02:00
if ( note . user_updated_time === note . updated_time ) delete md . user_updated_time ;
if ( note . user_created_time === note . created_time ) delete md . user_created_time ;
2019-05-06 22:35:29 +02:00
return md ;
}
2021-06-20 12:19:59 +02:00
public async createNoteRevision_ ( note : NoteEntity , parentRevId : string = null ) : Promise < RevisionEntity > {
2020-10-09 19:35:46 +02:00
try {
const parentRev = parentRevId ? await Revision . load ( parentRevId ) : await Revision . latestRevision ( BaseModel . TYPE_NOTE , note . id ) ;
2021-01-22 19:41:11 +02:00
const output : RevisionEntity = {
2020-10-09 19:35:46 +02:00
parent_id : '' ,
item_type : BaseModel.TYPE_NOTE ,
item_id : note.id ,
item_updated_time : note.updated_time ,
} ;
const noteMd = this . noteMetadata_ ( note ) ;
const noteTitle = note . title ? note . title : '' ;
const noteBody = note . body ? note . body : '' ;
if ( ! parentRev ) {
output . title_diff = Revision . createTextPatch ( '' , noteTitle ) ;
output . body_diff = Revision . createTextPatch ( '' , noteBody ) ;
output . metadata_diff = Revision . createObjectPatch ( { } , noteMd ) ;
} else {
if ( Date . now ( ) - parentRev . updated_time < Setting . value ( 'revisionService.intervalBetweenRevisions' ) ) return null ;
2019-05-06 22:35:29 +02:00
2020-10-09 19:35:46 +02:00
const merged = await Revision . mergeDiffs ( parentRev ) ;
output . parent_id = parentRev . id ;
output . title_diff = Revision . createTextPatch ( merged . title , noteTitle ) ;
output . body_diff = Revision . createTextPatch ( merged . body , noteBody ) ;
output . metadata_diff = Revision . createObjectPatch ( merged . metadata , noteMd ) ;
}
2019-05-06 22:35:29 +02:00
2021-06-20 12:19:59 +02:00
if ( Revision . isEmptyRevision ( output ) ) return null ;
2020-10-09 19:35:46 +02:00
return Revision . save ( output ) ;
} catch ( error ) {
const newError = wrapError ( ` Could not create revision for note: ${ note . id } ` , error ) ;
throw newError ;
}
2019-05-06 22:35:29 +02:00
}
2021-06-20 12:19:59 +02:00
public async collectRevisions() {
2019-05-06 22:35:29 +02:00
if ( this . isCollecting_ ) return ;
this . isCollecting_ = true ;
await ItemChange . waitForAllSaved ( ) ;
const doneNoteIds = [ ] ;
try {
while ( true ) {
// See synchronizer test units to see why changes coming
// from sync are skipped.
2021-01-22 19:41:11 +02:00
const changes : ItemChangeEntity [ ] = await ItemChange . modelSelectAll (
2019-07-29 15:43:53 +02:00
`
2019-05-06 22:35:29 +02:00
SELECT id , item_id , type , before_change_item
FROM item_changes
WHERE item_type = ?
AND source != ?
2019-05-28 19:10:21 +02:00
AND source != ?
2019-05-06 22:35:29 +02:00
AND id > ?
ORDER BY id ASC
LIMIT 10
2019-07-29 15:43:53 +02:00
` ,
2023-08-22 12:58:53 +02:00
[ BaseModel . TYPE_NOTE , ItemChange . SOURCE_SYNC , ItemChange . SOURCE_DECRYPTION , Setting . value ( 'revisionService.lastProcessedChangeId' ) ] ,
2019-07-29 15:43:53 +02:00
) ;
2019-05-06 22:35:29 +02:00
if ( ! changes . length ) break ;
2021-01-22 19:41:11 +02:00
const noteIds = changes . map ( ( a ) = > a . item_id ) ;
2019-09-19 23:51:18 +02:00
const notes = await Note . modelSelectAll ( ` SELECT * FROM notes WHERE is_conflict = 0 AND encryption_applied = 0 AND id IN (" ${ noteIds . join ( '","' ) } ") ` ) ;
2019-05-06 22:35:29 +02:00
for ( let i = 0 ; i < changes . length ; i ++ ) {
const change = changes [ i ] ;
const noteId = change . item_id ;
2022-02-27 12:30:40 +02:00
try {
if ( change . type === ItemChange . TYPE_UPDATE && doneNoteIds . indexOf ( noteId ) < 0 ) {
const note = BaseModel . byId ( notes , noteId ) ;
const oldNote = change . before_change_item ? JSON . parse ( change . before_change_item ) : null ;
if ( note ) {
if ( oldNote && oldNote . updated_time < this . oldNoteCutOffDate_ ( ) ) {
// This is where we save the original version of this old note
const rev = await this . createNoteRevision_ ( oldNote ) ;
if ( rev ) logger . debug ( sprintf ( 'collectRevisions: Saved revision %s (old note)' , rev . id ) ) ;
}
const rev = await this . createNoteRevision_ ( note ) ;
if ( rev ) logger . debug ( sprintf ( 'collectRevisions: Saved revision %s (Last rev was more than %d ms ago)' , rev . id , Setting . value ( 'revisionService.intervalBetweenRevisions' ) ) ) ;
doneNoteIds . push ( noteId ) ;
this . isOldNotesCache_ [ noteId ] = false ;
2019-05-06 22:35:29 +02:00
}
2022-02-27 12:30:40 +02:00
}
2019-05-06 22:35:29 +02:00
2022-02-27 12:30:40 +02:00
if ( change . type === ItemChange . TYPE_DELETE && ! ! change . before_change_item ) {
const note = JSON . parse ( change . before_change_item ) ;
const revExists = await Revision . revisionExists ( BaseModel . TYPE_NOTE , note . id , note . updated_time ) ;
if ( ! revExists ) {
const rev = await this . createNoteRevision_ ( note ) ;
if ( rev ) logger . debug ( sprintf ( 'collectRevisions: Saved revision %s (for deleted note)' , rev . id ) ) ;
}
2019-05-06 22:35:29 +02:00
doneNoteIds . push ( noteId ) ;
}
2022-02-27 12:30:40 +02:00
} catch ( error ) {
if ( error . code === 'revision_encrypted' ) {
throw error ;
} else {
// If any revision creation fails, we continue
// processing the other changes. It seems a rare bug
// in diff-match-patch can cause the creation of
// revisions to fail in some case. It should be rare
// and it's best to continue processing the other
// changes. The alternative would be to stop here
// and fix the bug, but in the meantime revisions
// will no longer be generated.
// The drawback is that once a change has been
// skipped it will never be processed again because
// the error will be in the past (before
// revisionService.lastProcessedChangeId)
//
// https://github.com/laurent22/joplin/issues/5531
logger . error ( ` collectRevisions: Processing one of the changes for note ${ noteId } failed. Other changes will still be processed. Error was: ` , error ) ;
logger . error ( 'collectRevisions: Change was:' , change ) ;
2019-05-14 23:23:34 +02:00
}
2019-05-06 22:35:29 +02:00
}
Setting . setValue ( 'revisionService.lastProcessedChangeId' , change . id ) ;
}
}
} catch ( error ) {
if ( error . code === 'revision_encrypted' ) {
// One or more revisions are encrypted - stop processing for now
// and these revisions will be processed next time the revision
// collector runs.
2022-02-27 12:30:40 +02:00
logger . info ( 'collectRevisions: One or more revision was encrypted. Processing was stopped but will resume later when the revision is decrypted.' , error ) ;
2019-05-06 22:35:29 +02:00
} else {
2022-02-27 12:30:40 +02:00
// This should not happen anymore because we handle the error in
// the loop above.
logger . error ( 'collectRevisions:' , error ) ;
2019-05-06 22:35:29 +02:00
}
}
await Setting . saveAll ( ) ;
2019-07-29 15:43:53 +02:00
await ItemChangeUtils . deleteProcessedChanges ( ) ;
2019-05-06 22:35:29 +02:00
this . isCollecting_ = false ;
2022-02-27 12:30:40 +02:00
logger . info ( ` collectRevisions: Created revisions for ${ doneNoteIds . length } notes ` ) ;
2019-05-06 22:35:29 +02:00
}
2023-03-06 16:22:01 +02:00
public async deleteOldRevisions ( ttl : number ) {
2019-05-06 22:35:29 +02:00
return Revision . deleteOldRevisions ( ttl ) ;
}
2022-03-12 18:23:30 +02:00
public async revisionNote ( revisions : RevisionEntity [ ] , index : number ) {
2019-09-19 23:51:18 +02:00
if ( index < 0 || index >= revisions . length ) throw new Error ( ` Invalid revision index: ${ index } ` ) ;
2019-05-06 22:35:29 +02:00
const rev = revisions [ index ] ;
const merged = await Revision . mergeDiffs ( rev , revisions ) ;
2023-06-01 13:02:36 +02:00
const output : NoteEntity = {
title : merged.title ,
body : merged.body ,
. . . merged . metadata ,
} ;
2019-05-06 22:35:29 +02:00
output . updated_time = output . user_updated_time ;
output . created_time = output . user_created_time ;
2021-01-22 19:41:11 +02:00
( output as any ) . type_ = BaseModel . TYPE_NOTE ;
2022-03-12 18:23:30 +02:00
if ( ! ( 'markup_language' in output ) ) output . markup_language = MarkupLanguage . Markdown ;
2019-05-06 22:35:29 +02:00
return output ;
}
2023-03-06 16:22:01 +02:00
public restoreFolderTitle() {
2019-05-06 22:35:29 +02:00
return _ ( 'Restored Notes' ) ;
}
2023-03-06 16:22:01 +02:00
public async restoreFolder() {
2019-05-06 22:35:29 +02:00
let folder = await Folder . loadByTitle ( this . restoreFolderTitle ( ) ) ;
if ( ! folder ) {
folder = await Folder . save ( { title : this.restoreFolderTitle ( ) } ) ;
}
return folder ;
}
2021-06-10 11:49:20 +02:00
// reverseRevIndex = 0 means restoring the latest version. reverseRevIndex =
// 1 means the version before that, etc.
public async restoreNoteById ( noteId : string , reverseRevIndex : number ) : Promise < NoteEntity > {
const revisions = await Revision . allByType ( BaseModel . TYPE_NOTE , noteId ) ;
if ( ! revisions . length ) throw new Error ( ` No revision for note " ${ noteId } " ` ) ;
const revIndex = revisions . length - 1 - reverseRevIndex ;
const note = await this . revisionNote ( revisions , revIndex ) ;
return this . importRevisionNote ( note ) ;
}
public restoreSuccessMessage ( note : NoteEntity ) : string {
return _ ( 'The note "%s" has been successfully restored to the notebook "%s".' , substrWithEllipsis ( note . title , 0 , 32 ) , this . restoreFolderTitle ( ) ) ;
}
2023-03-06 16:22:01 +02:00
public async importRevisionNote ( note : NoteEntity ) : Promise < NoteEntity > {
2023-06-01 13:02:36 +02:00
const toImport = { . . . note } ;
2019-05-06 22:35:29 +02:00
delete toImport . id ;
delete toImport . updated_time ;
delete toImport . created_time ;
delete toImport . encryption_applied ;
delete toImport . encryption_cipher_text ;
const folder = await this . restoreFolder ( ) ;
toImport . parent_id = folder . id ;
2021-06-10 11:49:20 +02:00
return Note . save ( toImport ) ;
2019-05-06 22:35:29 +02:00
}
2023-03-06 16:22:01 +02:00
public async maintenance() {
2020-03-16 04:30:54 +02:00
this . maintenanceCalls_ . push ( true ) ;
try {
const startTime = Date . now ( ) ;
2022-02-27 12:30:40 +02:00
logger . info ( 'maintenance: Starting...' ) ;
2020-03-16 04:30:54 +02:00
if ( ! Setting . value ( 'revisionService.enabled' ) ) {
2022-02-27 12:30:40 +02:00
logger . info ( 'maintenance: Service is disabled' ) ;
2020-03-16 04:30:54 +02:00
// We do as if we had processed all the latest changes so that they can be cleaned up
// later on by ItemChangeUtils.deleteProcessedChanges().
Setting . setValue ( 'revisionService.lastProcessedChangeId' , await ItemChange . lastChangeId ( ) ) ;
await this . deleteOldRevisions ( Setting . value ( 'revisionService.ttlDays' ) * 24 * 60 * 60 * 1000 ) ;
} else {
2022-02-27 12:30:40 +02:00
logger . info ( 'maintenance: Service is enabled' ) ;
2020-03-16 04:30:54 +02:00
await this . collectRevisions ( ) ;
await this . deleteOldRevisions ( Setting . value ( 'revisionService.ttlDays' ) * 24 * 60 * 60 * 1000 ) ;
2019-05-06 22:35:29 +02:00
2022-02-27 12:30:40 +02:00
logger . info ( ` maintenance: Done in ${ Date . now ( ) - startTime } ms ` ) ;
2020-03-16 04:30:54 +02:00
}
2021-05-14 12:09:20 +02:00
} catch ( error ) {
2022-02-27 12:30:40 +02:00
logger . error ( 'maintenance:' , error ) ;
2020-03-16 04:30:54 +02:00
} finally {
this . maintenanceCalls_ . pop ( ) ;
}
2019-05-06 22:35:29 +02:00
}
2023-03-06 16:22:01 +02:00
public runInBackground ( collectRevisionInterval : number = null ) {
2019-05-06 22:35:29 +02:00
if ( this . isRunningInBackground_ ) return ;
this . isRunningInBackground_ = true ;
if ( collectRevisionInterval === null ) collectRevisionInterval = 1000 * 60 * 10 ;
2022-02-27 12:30:40 +02:00
logger . info ( ` runInBackground: Starting background service with revision collection interval ${ collectRevisionInterval } ` ) ;
2019-05-06 22:35:29 +02:00
2020-10-09 19:35:46 +02:00
this . maintenanceTimer1_ = shim . setTimeout ( ( ) = > {
2021-01-22 19:41:11 +02:00
void this . maintenance ( ) ;
2019-05-06 22:35:29 +02:00
} , 1000 * 4 ) ;
2019-07-29 15:43:53 +02:00
2021-01-22 19:41:11 +02:00
this . maintenanceTimer2_ = shim . setInterval ( ( ) = > {
void this . maintenance ( ) ;
2019-05-06 22:35:29 +02:00
} , collectRevisionInterval ) ;
}
2020-03-16 04:30:54 +02:00
2023-03-06 16:22:01 +02:00
public async cancelTimers() {
2020-03-16 04:30:54 +02:00
if ( this . maintenanceTimer1_ ) {
2021-01-22 19:41:11 +02:00
shim . clearTimeout ( this . maintenanceTimer1_ ) ;
2020-03-16 04:30:54 +02:00
this . maintenanceTimer1_ = null ;
}
if ( this . maintenanceTimer2_ ) {
2021-01-22 19:41:11 +02:00
shim . clearInterval ( this . maintenanceTimer2_ ) ;
2020-03-16 04:30:54 +02:00
this . maintenanceTimer2_ = null ;
}
return new Promise ( ( resolve ) = > {
2020-10-09 19:35:46 +02:00
const iid = shim . setInterval ( ( ) = > {
2020-03-16 04:30:54 +02:00
if ( ! this . maintenanceCalls_ . length ) {
2020-10-09 19:35:46 +02:00
shim . clearInterval ( iid ) ;
2021-01-22 19:41:11 +02:00
resolve ( null ) ;
2020-03-16 04:30:54 +02:00
}
} , 100 ) ;
} ) ;
}
2019-05-06 22:35:29 +02:00
}