2020-10-09 19:35:46 +02:00
import { ModuleType , FileSystemItem , ImportModuleOutputFormat , Module , ImportOptions , ExportOptions , ImportExportResult , defaultImportExportModule } from './types' ;
import InteropService_Importer_Custom from './InteropService_Importer_Custom' ;
import InteropService_Exporter_Custom from './InteropService_Exporter_Custom' ;
import shim from 'lib/shim' ;
import { _ } from 'lib/locale' ;
2018-03-09 22:59:12 +02:00
const BaseItem = require ( 'lib/models/BaseItem.js' ) ;
const BaseModel = require ( 'lib/BaseModel.js' ) ;
const Resource = require ( 'lib/models/Resource.js' ) ;
const Folder = require ( 'lib/models/Folder.js' ) ;
const NoteTag = require ( 'lib/models/NoteTag.js' ) ;
const Note = require ( 'lib/models/Note.js' ) ;
const ArrayUtils = require ( 'lib/ArrayUtils' ) ;
const { sprintf } = require ( 'sprintf-js' ) ;
const { fileExtension } = require ( 'lib/path-utils' ) ;
const { toTitleCase } = require ( 'lib/string-utils' ) ;
2020-10-09 19:35:46 +02:00
const EventEmitter = require ( 'events' ) ;
export default class InteropService {
private defaultModules_ :Module [ ] ;
private userModules_ :Module [ ] = [ ] ;
private eventEmitter_ :any = null ;
private static instance_ :InteropService ;
public static instance ( ) : InteropService {
if ( ! this . instance_ ) this . instance_ = new InteropService ( ) ;
return this . instance_ ;
}
2018-02-25 19:01:16 +02:00
2018-02-27 22:04:38 +02:00
constructor ( ) {
2020-10-09 19:35:46 +02:00
this . eventEmitter_ = new EventEmitter ( ) ;
}
on ( eventName :string , callback :Function ) {
return this . eventEmitter_ . on ( eventName , callback ) ;
}
off ( eventName :string , callback :Function ) {
return this . eventEmitter_ . removeListener ( eventName , callback ) ;
2019-09-30 00:11:36 +02:00
}
2018-02-27 22:04:38 +02:00
module s ( ) {
2020-10-09 19:35:46 +02:00
if ( ! this . defaultModules_ ) {
const importModules :Module [ ] = [
2019-07-29 15:43:53 +02:00
{
2020-10-09 19:35:46 +02:00
. . . defaultImportExportModule ( ModuleType . Importer ) ,
format : 'jex' ,
fileExtensions : [ 'jex' ] ,
sources : [ FileSystemItem . File ] ,
description : _ ( 'Joplin Export File' ) ,
2019-07-29 15:43:53 +02:00
} ,
2020-10-09 19:35:46 +02:00
{
. . . defaultImportExportModule ( ModuleType . Importer ) ,
format : 'md' ,
fileExtensions : [ 'md' , 'markdown' , 'txt' ] ,
sources : [ FileSystemItem . File , FileSystemItem . Directory ] ,
isNoteArchive : false , // Tells whether the file can contain multiple notes (eg. Enex or Jex format)
description : _ ( 'Markdown' ) ,
} ,
{
. . . defaultImportExportModule ( ModuleType . Importer ) ,
format : 'raw' ,
sources : [ FileSystemItem . Directory ] ,
description : _ ( 'Joplin Export Directory' ) ,
} ,
{
. . . defaultImportExportModule ( ModuleType . Importer ) ,
format : 'enex' ,
fileExtensions : [ 'enex' ] ,
sources : [ FileSystemItem . File ] ,
description : _ ( 'Evernote Export File (as Markdown)' ) ,
importerClass : 'InteropService_Importer_EnexToMd' ,
isDefault : true ,
} ,
{
. . . defaultImportExportModule ( ModuleType . Importer ) ,
format : 'enex' ,
fileExtensions : [ 'enex' ] ,
sources : [ FileSystemItem . File ] ,
description : _ ( 'Evernote Export File (as HTML)' ) ,
// TODO: Consider doing this the same way as the multiple `md` importers are handled
importerClass : 'InteropService_Importer_EnexToHtml' ,
outputFormat : ImportModuleOutputFormat.Html ,
} ,
] ;
2018-02-27 22:04:38 +02:00
2020-10-09 19:35:46 +02:00
const exportModules :Module [ ] = [
{
. . . defaultImportExportModule ( ModuleType . Exporter ) ,
format : 'jex' ,
fileExtensions : [ 'jex' ] ,
target : FileSystemItem.File ,
description : _ ( 'Joplin Export File' ) ,
} ,
{
. . . defaultImportExportModule ( ModuleType . Exporter ) ,
format : 'raw' ,
target : FileSystemItem.Directory ,
description : _ ( 'Joplin Export Directory' ) ,
} ,
{
. . . defaultImportExportModule ( ModuleType . Exporter ) ,
format : 'md' ,
target : FileSystemItem.Directory ,
description : _ ( 'Markdown' ) ,
} ,
{
. . . defaultImportExportModule ( ModuleType . Exporter ) ,
format : 'html' ,
fileExtensions : [ 'html' , 'htm' ] ,
target : FileSystemItem.File ,
isNoteArchive : false ,
description : _ ( 'HTML File' ) ,
} ,
{
. . . defaultImportExportModule ( ModuleType . Exporter ) ,
format : 'html' ,
target : FileSystemItem.Directory ,
description : _ ( 'HTML Directory' ) ,
} ,
] ;
2018-03-12 20:01:47 +02:00
2020-10-09 19:35:46 +02:00
this . defaultModules_ = importModules . concat ( exportModules ) ;
}
2018-02-27 22:04:38 +02:00
2020-10-09 19:35:46 +02:00
return this . defaultModules_ . concat ( this . userModules_ ) ;
}
public registerModule ( module :Module ) {
module = {
. . . defaultImportExportModule ( module .type ) ,
. . . module ,
} ;
this . userModules_ . push ( module ) ;
this . eventEmitter_ . emit ( 'modulesChanged' ) ;
2018-02-27 22:04:38 +02:00
}
2019-09-23 23:18:30 +02:00
// Find the module that matches the given type ("importer" or "exporter")
// and the given format. Some formats can have multiple assocated importers
// or exporters, such as ENEX. In this case, the one marked as "isDefault"
// is returned. This is useful to auto-detect the module based on the format.
// For more precise matching, newModuleFromPath_ should be used.
2020-10-09 19:35:46 +02:00
findModuleByFormat_ ( type : ModuleType , format :string , target :FileSystemItem = null , outputFormat :ImportModuleOutputFormat = null ) {
2018-02-27 22:04:38 +02:00
const module s = this . module s ( ) ;
2019-09-23 23:18:30 +02:00
const matches = [ ] ;
2018-02-27 22:04:38 +02:00
for ( let i = 0 ; i < module s.length ; i ++ ) {
const m = module s [ i ] ;
2019-12-15 20:41:13 +02:00
if ( m . format === format && m . type === type ) {
2020-06-14 18:45:17 +02:00
if ( ! target && ! outputFormat ) {
2019-12-15 20:41:13 +02:00
matches . push ( m ) ;
2020-06-14 18:45:17 +02:00
} else if ( target && target === m . target ) {
matches . push ( m ) ;
} else if ( outputFormat && outputFormat === m . outputFormat ) {
2019-12-15 20:41:13 +02:00
matches . push ( m ) ;
}
}
2018-02-26 21:16:01 +02:00
}
2019-09-23 23:18:30 +02:00
2020-05-21 10:14:33 +02:00
const output = matches . find ( m = > ! ! m . isDefault ) ;
2019-09-23 23:18:30 +02:00
if ( output ) return output ;
return matches . length ? matches [ 0 ] : null ;
2018-02-25 19:01:16 +02:00
}
2020-10-09 19:35:46 +02:00
private module Path ( module :Module ) {
let className = '' ;
if ( module .type === ModuleType . Importer ) {
className = module .importerClass || ` InteropService_Importer_ ${ toTitleCase ( module .format ) } ` ;
} else {
className = ` InteropService_Exporter_ ${ toTitleCase ( module .format ) } ` ;
}
return ` lib/services/interop/ ${ className } ` ;
}
private newModuleFromCustomFactory ( module :Module ) {
if ( module .type === ModuleType . Importer ) {
return new InteropService_Importer_Custom ( module ) ;
} else {
return new InteropService_Exporter_Custom ( module ) ;
}
}
2019-09-23 23:18:30 +02:00
/ * *
* NOTE TO FUTURE SELF : It might make sense to simply move all the existing
* formatters to the ` newModuleFromPath_ ` approach , so that there ' s only one way
* to do this mapping . This isn ' t a priority right now ( per the convo in :
* https : //github.com/laurent22/joplin/pull/1795#discussion_r322379121) but
* we can do it if it ever becomes necessary .
* /
2020-10-09 19:35:46 +02:00
newModuleByFormat_ ( type : ModuleType , format :string , outputFormat :ImportModuleOutputFormat = ImportModuleOutputFormat . Markdown ) {
2020-06-14 18:45:17 +02:00
const module Metadata = this . findModuleByFormat_ ( type , format , null , outputFormat ) ;
if ( ! module Metadata ) throw new Error ( _ ( 'Cannot load "%s" module for format "%s" and output "%s"' , type , format , outputFormat ) ) ;
2020-10-09 19:35:46 +02:00
let output = null ;
if ( module Metadata.isCustom ) {
output = this . newModuleFromCustomFactory ( module Metadata ) ;
} else {
const ModuleClass = require ( this . module Path ( module Metadata ) ) . default ;
output = new ModuleClass ( ) ;
}
2019-09-23 23:18:30 +02:00
output . setMetadata ( module Metadata ) ;
2020-10-09 19:35:46 +02:00
2019-09-23 23:18:30 +02:00
return output ;
}
/ * *
* The existing ` newModuleByFormat_ ` fn would load by the input format . This
* was fine when there was a 1 - 1 mapping of input formats to output formats ,
* but now that we have 2 possible outputs for an ` enex ` input , we need to be
* explicit with which importer we want to use .
*
* https : //github.com/laurent22/joplin/pull/1795#pullrequestreview-281574417
* /
2020-10-09 19:35:46 +02:00
newModuleFromPath_ ( type : ModuleType , options :any ) {
2019-12-17 02:40:25 +02:00
let module Path = options && options . module Path ? options . module Path : '' ;
if ( ! module Path ) {
const module Metadata = this . findModuleByFormat_ ( type , options . format , options . target ) ;
2020-10-09 19:35:46 +02:00
if ( ! module Metadata ) throw new Error ( _ ( 'Cannot load "%s" module for format "%s" and target "%s"' , type , options . format , options . target ) ) ;
module Path = this . module Path ( module Metadata ) ;
2019-09-23 23:18:30 +02:00
}
2020-10-09 19:35:46 +02:00
2019-12-15 20:41:13 +02:00
const module Metadata = this . findModuleByFormat_ ( type , options . format , options . target ) ;
2020-10-09 19:35:46 +02:00
let output = null ;
if ( module Metadata.isCustom ) {
output = this . newModuleFromCustomFactory ( module Metadata ) ;
} else {
const ModuleClass = require ( module Path ) . default ;
output = new ModuleClass ( ) ;
}
output . setMetadata ( { options , . . . module Metadata } ) ;
2018-06-26 01:07:53 +02:00
return output ;
2018-02-26 20:43:50 +02:00
}
2020-10-09 19:35:46 +02:00
module ByFileExtension_ ( type : ModuleType , ext :string ) {
2018-02-27 22:04:38 +02:00
ext = ext . toLowerCase ( ) ;
const module s = this . module s ( ) ;
for ( let i = 0 ; i < module s.length ; i ++ ) {
const m = module s [ i ] ;
if ( type !== m . type ) continue ;
2019-07-13 18:26:47 +02:00
if ( m . fileExtensions && m . fileExtensions . indexOf ( ext ) >= 0 ) return m ;
2018-02-27 22:04:38 +02:00
}
return null ;
2018-02-26 21:16:01 +02:00
}
2020-10-09 19:35:46 +02:00
async import ( options :ImportOptions ) : Promise < ImportExportResult > {
2019-07-29 15:43:53 +02:00
if ( ! ( await shim . fsDriver ( ) . exists ( options . path ) ) ) throw new Error ( _ ( 'Cannot find "%s".' , options . path ) ) ;
2018-02-26 21:16:01 +02:00
2020-10-09 19:35:46 +02:00
options = {
format : 'auto' ,
destinationFolderId : null ,
destinationFolder : null ,
. . . options ,
} ;
2018-02-25 19:01:16 +02:00
2018-03-09 22:59:12 +02:00
if ( options . format === 'auto' ) {
2020-10-09 19:35:46 +02:00
const module = this . module ByFileExtension_ ( ModuleType . Importer , fileExtension ( options . path ) ) ;
2018-03-09 22:59:12 +02:00
if ( ! module ) throw new Error ( _ ( 'Please specify import format for %s' , options . path ) ) ;
2019-07-30 09:35:42 +02:00
// eslint-disable-next-line require-atomic-updates
2018-02-27 22:04:38 +02:00
options . format = module .format ;
2018-02-25 19:01:16 +02:00
}
if ( options . destinationFolderId ) {
const folder = await Folder . load ( options . destinationFolderId ) ;
2018-02-27 22:04:38 +02:00
if ( ! folder ) throw new Error ( _ ( 'Cannot find "%s".' , options . destinationFolderId ) ) ;
2019-07-30 09:35:42 +02:00
// eslint-disable-next-line require-atomic-updates
2018-02-26 20:43:50 +02:00
options . destinationFolder = folder ;
2018-02-25 19:01:16 +02:00
}
2020-10-09 19:35:46 +02:00
let result :ImportExportResult = { warnings : [ ] } ;
2018-02-25 19:01:16 +02:00
2019-09-23 23:18:30 +02:00
let importer = null ;
if ( options . module Path ) {
2020-10-09 19:35:46 +02:00
importer = this . newModuleFromPath_ ( ModuleType . Importer , options ) ;
2019-09-23 23:18:30 +02:00
} else {
2020-10-09 19:35:46 +02:00
importer = this . newModuleByFormat_ ( ModuleType . Importer , options . format , options . outputFormat ) ;
2019-09-23 23:18:30 +02:00
}
2018-02-26 20:43:50 +02:00
await importer . init ( options . path , options ) ;
result = await importer . exec ( result ) ;
2018-02-25 19:01:16 +02:00
return result ;
2018-02-23 21:32:19 +02:00
}
2020-10-09 19:35:46 +02:00
async export ( options :ExportOptions ) : Promise < ImportExportResult > {
options = {
format : 'jex' ,
. . . options ,
} ;
2019-12-15 20:41:13 +02:00
2018-02-23 21:32:19 +02:00
const exportPath = options . path ? options.path : null ;
2018-06-10 20:15:40 +02:00
let sourceFolderIds = options . sourceFolderIds ? options . sourceFolderIds : [ ] ;
2018-02-23 21:32:19 +02:00
const sourceNoteIds = options . sourceNoteIds ? options . sourceNoteIds : [ ] ;
2020-10-09 19:35:46 +02:00
const result :ImportExportResult = { warnings : [ ] } ;
const itemsToExport :any [ ] = [ ] ;
2018-02-23 21:32:19 +02:00
2020-10-09 19:35:46 +02:00
const queueExportItem = ( itemType :number , itemOrId :any ) = > {
2018-02-23 21:32:19 +02:00
itemsToExport . push ( {
type : itemType ,
2019-07-29 15:43:53 +02:00
itemOrId : itemOrId ,
2018-02-23 21:32:19 +02:00
} ) ;
2019-07-29 15:43:53 +02:00
} ;
2018-02-23 21:32:19 +02:00
2020-03-14 01:46:14 +02:00
const exportedNoteIds = [ ] ;
2020-10-09 19:35:46 +02:00
let resourceIds :string [ ] = [ ] ;
2020-09-07 23:12:51 +02:00
// Recursively get all the folders that have valid parents
const folderIds = await Folder . childrenIds ( '' , true ) ;
2018-02-23 21:32:19 +02:00
2018-06-10 20:15:40 +02:00
let fullSourceFolderIds = sourceFolderIds . slice ( ) ;
for ( let i = 0 ; i < sourceFolderIds . length ; i ++ ) {
const id = sourceFolderIds [ i ] ;
const childrenIds = await Folder . childrenIds ( id ) ;
fullSourceFolderIds = fullSourceFolderIds . concat ( childrenIds ) ;
}
sourceFolderIds = fullSourceFolderIds ;
2018-02-23 21:32:19 +02:00
for ( let folderIndex = 0 ; folderIndex < folderIds . length ; folderIndex ++ ) {
const folderId = folderIds [ folderIndex ] ;
if ( sourceFolderIds . length && sourceFolderIds . indexOf ( folderId ) < 0 ) continue ;
if ( ! sourceNoteIds . length ) await queueExportItem ( BaseModel . TYPE_FOLDER , folderId ) ;
const noteIds = await Folder . noteIds ( folderId ) ;
for ( let noteIndex = 0 ; noteIndex < noteIds . length ; noteIndex ++ ) {
const noteId = noteIds [ noteIndex ] ;
if ( sourceNoteIds . length && sourceNoteIds . indexOf ( noteId ) < 0 ) continue ;
const note = await Note . load ( noteId ) ;
await queueExportItem ( BaseModel . TYPE_NOTE , note ) ;
exportedNoteIds . push ( noteId ) ;
2018-05-03 14:11:45 +02:00
const rids = await Note . linkedResourceIds ( note . body ) ;
2018-02-23 21:32:19 +02:00
resourceIds = resourceIds . concat ( rids ) ;
}
}
2018-02-26 21:25:54 +02:00
resourceIds = ArrayUtils . unique ( resourceIds ) ;
2018-02-23 21:32:19 +02:00
for ( let i = 0 ; i < resourceIds . length ; i ++ ) {
await queueExportItem ( BaseModel . TYPE_RESOURCE , resourceIds [ i ] ) ;
}
const noteTags = await NoteTag . all ( ) ;
2020-03-14 01:46:14 +02:00
const exportedTagIds = [ ] ;
2018-02-23 21:32:19 +02:00
for ( let i = 0 ; i < noteTags . length ; i ++ ) {
const noteTag = noteTags [ i ] ;
if ( exportedNoteIds . indexOf ( noteTag . note_id ) < 0 ) continue ;
await queueExportItem ( BaseModel . TYPE_NOTE_TAG , noteTag . id ) ;
exportedTagIds . push ( noteTag . tag_id ) ;
}
for ( let i = 0 ; i < exportedTagIds . length ; i ++ ) {
await queueExportItem ( BaseModel . TYPE_TAG , exportedTagIds [ i ] ) ;
}
2020-10-09 19:35:46 +02:00
const exporter = this . newModuleFromPath_ ( ModuleType . Exporter , options ) ;
2020-01-24 23:46:48 +02:00
await exporter . init ( exportPath , options ) ;
2018-02-23 21:32:19 +02:00
2018-11-21 02:36:23 +02:00
const typeOrder = [ BaseModel . TYPE_FOLDER , BaseModel . TYPE_RESOURCE , BaseModel . TYPE_NOTE , BaseModel . TYPE_TAG , BaseModel . TYPE_NOTE_TAG ] ;
2020-10-09 19:35:46 +02:00
const context :any = {
2018-11-21 02:36:23 +02:00
resourcePaths : { } ,
} ;
for ( let typeOrderIndex = 0 ; typeOrderIndex < typeOrder . length ; typeOrderIndex ++ ) {
const type = typeOrder [ typeOrderIndex ] ;
2020-01-18 15:16:14 +02:00
await exporter . prepareForProcessingItemType ( type , itemsToExport ) ;
2018-11-21 02:36:23 +02:00
for ( let i = 0 ; i < itemsToExport . length ; i ++ ) {
const itemType = itemsToExport [ i ] . type ;
if ( itemType !== type ) continue ;
const ItemClass = BaseItem . getClassByItemType ( itemType ) ;
const itemOrId = itemsToExport [ i ] . itemOrId ;
const item = typeof itemOrId === 'object' ? itemOrId : await ItemClass . load ( itemOrId ) ;
if ( ! item ) {
if ( itemType === BaseModel . TYPE_RESOURCE ) {
result . warnings . push ( sprintf ( 'A resource that does not exist is referenced in a note. The resource was skipped. Resource ID: %s' , itemOrId ) ) ;
} else {
result . warnings . push ( sprintf ( 'Cannot find item with type "%s" and ID %s. Item was skipped.' , ItemClass . tableName ( ) , JSON . stringify ( itemOrId ) ) ) ;
}
continue ;
2018-02-23 21:32:19 +02:00
}
2020-09-17 11:00:13 +02:00
if ( item . encryption_applied || item . encryption_blob_encrypted ) {
result . warnings . push ( sprintf ( 'This item is currently encrypted: %s "%s" (%s) and was not exported. You may wait for it to be decrypted and try again.' , BaseModel . modelTypeToName ( itemType ) , item . title ? item.title : item.id , item . id ) ) ;
continue ;
}
2018-02-27 22:51:07 +02:00
2018-11-21 02:36:23 +02:00
try {
if ( itemType == BaseModel . TYPE_RESOURCE ) {
const resourcePath = Resource . fullPath ( item ) ;
context . resourcePaths [ item . id ] = resourcePath ;
exporter . updateContext ( context ) ;
await exporter . processResource ( item , resourcePath ) ;
}
2018-02-23 21:32:19 +02:00
2020-10-09 19:35:46 +02:00
await exporter . processItem ( itemType , item ) ;
2018-11-21 02:36:23 +02:00
} catch ( error ) {
2019-12-15 20:41:13 +02:00
console . error ( error ) ;
2018-11-21 02:36:23 +02:00
result . warnings . push ( error . message ) ;
}
2018-02-23 21:32:19 +02:00
}
}
await exporter . close ( ) ;
return result ;
}
}