2019-05-06 22:35:29 +02:00
const { Logger } = require ( 'lib/logger.js' ) ;
const ItemChange = require ( 'lib/models/ItemChange' ) ;
const Note = require ( 'lib/models/Note' ) ;
const Folder = require ( 'lib/models/Folder' ) ;
const Setting = require ( 'lib/models/Setting' ) ;
const Revision = require ( 'lib/models/Revision' ) ;
const BaseModel = require ( 'lib/BaseModel' ) ;
const ItemChangeUtils = require ( 'lib/services/ItemChangeUtils' ) ;
const { shim } = require ( 'lib/shim' ) ;
const BaseService = require ( 'lib/services/BaseService' ) ;
const { _ } = require ( 'lib/locale.js' ) ;
const ArrayUtils = require ( 'lib/ArrayUtils.js' ) ;
class RevisionService extends BaseService {
constructor ( ) {
super ( ) ;
// An "old note" is one that has been created before the revision service existed. These
// notes never benefited from revisions so the first time they are modified, a copy of
// the original note is saved. The goal is to have at least one revision in case the note
// is deleted or modified as a result of a bug or user mistake.
this . isOldNotesCache _ = { } ;
if ( ! Setting . value ( 'revisionService.installedTime' ) ) Setting . setValue ( 'revisionService.installedTime' , Date . now ( ) ) ;
}
installedTime ( ) {
return Setting . value ( 'revisionService.installedTime' ) ;
}
static instance ( ) {
if ( this . instance _ ) return this . instance _ ;
this . instance _ = new RevisionService ( ) ;
return this . instance _ ;
}
async isOldNote ( noteId ) {
if ( noteId in this . isOldNotesCache _ ) return this . isOldNotesCache _ [ noteId ] ;
const r = await Note . noteIsOlderThan ( noteId , this . installedTime ( ) ) ;
this . isOldNotesCache _ [ noteId ] = r ;
return r ;
}
noteMetadata _ ( note ) {
const excludedFields = [ 'type_' , 'title' , 'body' , 'created_time' , 'updated_time' , 'encryption_applied' , 'encryption_cipher_text' , 'is_conflict' ] ;
const md = { } ;
for ( let k in note ) {
if ( excludedFields . indexOf ( k ) >= 0 ) continue ;
md [ k ] = note [ k ] ;
}
2019-05-07 21:46:58 +02:00
if ( note . user _updated _time === note . updated _time ) delete md . user _updated _time ;
if ( note . user _created _time === note . created _time ) delete md . user _created _time ;
2019-05-06 22:35:29 +02:00
return md ;
}
2019-05-07 21:46:58 +02:00
isEmptyRevision _ ( rev ) {
if ( ! ! rev . title _diff ) return false ;
if ( ! ! rev . body _diff ) return false ;
const md = JSON . parse ( rev . metadata _diff ) ;
if ( md . new && md . new . length ) return false ;
if ( md . deleted && md . deleted . length ) return false ;
return true ;
}
async createNoteRevision _ ( note , parentRevId = null ) {
2019-05-06 22:35:29 +02:00
const parentRev = parentRevId ? await Revision . load ( parentRevId ) : await Revision . latestRevision ( BaseModel . TYPE _NOTE , note . id ) ;
const output = {
parent _id : '' ,
item _type : BaseModel . TYPE _NOTE ,
item _id : note . id ,
item _updated _time : note . updated _time ,
} ;
const noteMd = this . noteMetadata _ ( note ) ;
const noteTitle = note . title ? note . title : '' ;
const noteBody = note . body ? note . body : '' ;
if ( ! parentRev ) {
output . title _diff = Revision . createTextPatch ( '' , noteTitle ) ;
output . body _diff = Revision . createTextPatch ( '' , noteBody ) ;
output . metadata _diff = Revision . createObjectPatch ( { } , noteMd ) ;
} else {
const merged = await Revision . mergeDiffs ( parentRev ) ;
output . parent _id = parentRev . id ;
output . title _diff = Revision . createTextPatch ( merged . title , noteTitle ) ;
output . body _diff = Revision . createTextPatch ( merged . body , noteBody ) ;
output . metadata _diff = Revision . createObjectPatch ( merged . metadata , noteMd ) ;
}
2019-05-07 21:46:58 +02:00
if ( this . isEmptyRevision _ ( output ) ) return null ;
2019-05-06 22:35:29 +02:00
2019-05-07 21:46:58 +02:00
return Revision . save ( output ) ;
2019-05-06 22:35:29 +02:00
}
async collectRevisions ( ) {
if ( this . isCollecting _ ) return ;
this . isCollecting _ = true ;
await ItemChange . waitForAllSaved ( ) ;
const doneNoteIds = [ ] ;
try {
while ( true ) {
// See synchronizer test units to see why changes coming
// from sync are skipped.
const changes = await ItemChange . modelSelectAll ( `
SELECT id , item _id , type , before _change _item
FROM item _changes
WHERE item _type = ?
AND source != ?
AND id > ?
ORDER BY id ASC
LIMIT 10
` , [BaseModel.TYPE_NOTE, ItemChange.SOURCE_SYNC, Setting.value('revisionService.lastProcessedChangeId')]);
if ( ! changes . length ) break ;
const noteIds = changes . map ( a => a . item _id ) ;
const notes = await Note . modelSelectAll ( 'SELECT * FROM notes WHERE is_conflict = 0 AND encryption_applied = 0 AND id IN ("' + noteIds . join ( '","' ) + '")' ) ;
for ( let i = 0 ; i < changes . length ; i ++ ) {
const change = changes [ i ] ;
const noteId = change . item _id ;
if ( change . type === ItemChange . TYPE _UPDATE && doneNoteIds . indexOf ( noteId ) < 0 ) {
const note = BaseModel . byId ( notes , noteId ) ;
const oldNote = change . before _change _item ? JSON . parse ( change . before _change _item ) : null ;
if ( note ) {
if ( oldNote && oldNote . updated _time < this . installedTime ( ) ) {
// This is where we save the original version of this old note
2019-05-07 21:46:58 +02:00
await this . createNoteRevision _ ( oldNote ) ;
2019-05-06 22:35:29 +02:00
}
2019-05-07 21:46:58 +02:00
await this . createNoteRevision _ ( note ) ;
2019-05-06 22:35:29 +02:00
doneNoteIds . push ( noteId ) ;
this . isOldNotesCache _ [ noteId ] = false ;
}
}
if ( change . type === ItemChange . TYPE _DELETE && ! ! change . before _change _item ) {
const note = JSON . parse ( change . before _change _item ) ;
const revExists = await Revision . revisionExists ( BaseModel . TYPE _NOTE , note . id , note . updated _time ) ;
2019-05-07 21:46:58 +02:00
if ( ! revExists ) await this . createNoteRevision _ ( note ) ;
2019-05-06 22:35:29 +02:00
doneNoteIds . push ( noteId ) ;
}
Setting . setValue ( 'revisionService.lastProcessedChangeId' , change . id ) ;
}
}
} catch ( error ) {
if ( error . code === 'revision_encrypted' ) {
// One or more revisions are encrypted - stop processing for now
// and these revisions will be processed next time the revision
// collector runs.
this . logger ( ) . info ( 'RevisionService::collectRevisions: One or more revision was encrypted. Processing was stopped but will resume later when the revision is decrypted.' , error ) ;
} else {
this . logger ( ) . error ( 'RevisionService::collectRevisions:' , error ) ;
}
}
await Setting . saveAll ( ) ;
await ItemChangeUtils . deleteProcessedChanges ( ) ;
this . isCollecting _ = false ;
this . logger ( ) . info ( 'RevisionService::collectRevisions: Created revisions for ' + doneNoteIds . length + ' notes' ) ;
}
async deleteOldRevisions ( ttl ) {
return Revision . deleteOldRevisions ( ttl ) ;
}
async revisionNote ( revisions , index ) {
if ( index < 0 || index >= revisions . length ) throw new Error ( 'Invalid revision index: ' + index ) ;
const rev = revisions [ index ] ;
const merged = await Revision . mergeDiffs ( rev , revisions ) ;
const output = Object . assign ( {
title : merged . title ,
body : merged . body ,
} , merged . metadata ) ;
output . updated _time = output . user _updated _time ;
output . created _time = output . user _created _time ;
output . type _ = BaseModel . TYPE _NOTE ;
return output ;
}
restoreFolderTitle ( ) {
return _ ( 'Restored Notes' ) ;
}
async restoreFolder ( ) {
let folder = await Folder . loadByTitle ( this . restoreFolderTitle ( ) ) ;
if ( ! folder ) {
folder = await Folder . save ( { title : this . restoreFolderTitle ( ) } ) ;
}
return folder ;
}
async importRevisionNote ( note ) {
const toImport = Object . assign ( { } , note ) ;
delete toImport . id ;
delete toImport . updated _time ;
delete toImport . created _time ;
delete toImport . encryption _applied ;
delete toImport . encryption _cipher _text ;
const folder = await this . restoreFolder ( ) ;
toImport . parent _id = folder . id ;
await Note . save ( toImport ) ;
}
async maintenance ( ) {
const startTime = Date . now ( ) ;
this . logger ( ) . info ( 'RevisionService::maintenance: Starting...' ) ;
if ( ! Setting . value ( 'revisionService.enabled' ) ) {
this . logger ( ) . info ( 'RevisionService::maintenance: Service is disabled' ) ;
// We do as if we had processed all the latest changes so that they can be cleaned up
// later on by ItemChangeUtils.deleteProcessedChanges().
Setting . setValue ( 'revisionService.lastProcessedChangeId' , await ItemChange . lastChangeId ( ) ) ;
await this . deleteOldRevisions ( Setting . value ( 'revisionService.ttlDays' ) * 24 * 60 * 60 * 1000 ) ;
} else {
this . logger ( ) . info ( 'RevisionService::maintenance: Service is enabled' ) ;
await this . collectRevisions ( ) ;
await this . deleteOldRevisions ( Setting . value ( 'revisionService.ttlDays' ) * 24 * 60 * 60 * 1000 ) ;
}
this . logger ( ) . info ( 'RevisionService::maintenance: Done in ' + ( Date . now ( ) - startTime ) + 'ms' ) ;
}
runInBackground ( collectRevisionInterval = null ) {
if ( this . isRunningInBackground _ ) return ;
this . isRunningInBackground _ = true ;
if ( collectRevisionInterval === null ) collectRevisionInterval = 1000 * 60 * 10 ;
this . logger ( ) . info ( 'RevisionService::runInBackground: Starting background service with revision collection interval ' + collectRevisionInterval ) ;
setTimeout ( ( ) => {
this . maintenance ( ) ;
} , 1000 * 4 ) ;
shim . setInterval ( ( ) => {
this . maintenance ( ) ;
} , collectRevisionInterval ) ;
}
}
module . exports = RevisionService ;