mirror of
https://github.com/laurent22/joplin.git
synced 2025-03-03 15:32:30 +02:00
Server: Add support for sharing notes via a link
This commit is contained in:
parent
4a847a096b
commit
ccbc329cbf
237
.eslintignore
237
.eslintignore
@ -97,6 +97,9 @@ packages/app-cli/tests/Synchronizer.resources.js.map
|
||||
packages/app-cli/tests/Synchronizer.revisions.d.ts
|
||||
packages/app-cli/tests/Synchronizer.revisions.js
|
||||
packages/app-cli/tests/Synchronizer.revisions.js.map
|
||||
packages/app-cli/tests/Synchronizer.sharing.d.ts
|
||||
packages/app-cli/tests/Synchronizer.sharing.js
|
||||
packages/app-cli/tests/Synchronizer.sharing.js.map
|
||||
packages/app-cli/tests/Synchronizer.tags.d.ts
|
||||
packages/app-cli/tests/Synchronizer.tags.js
|
||||
packages/app-cli/tests/Synchronizer.tags.js.map
|
||||
@ -784,15 +787,18 @@ packages/lib/BaseApplication.js.map
|
||||
packages/lib/BaseModel.d.ts
|
||||
packages/lib/BaseModel.js
|
||||
packages/lib/BaseModel.js.map
|
||||
packages/lib/BaseSyncTarget.d.ts
|
||||
packages/lib/BaseSyncTarget.js
|
||||
packages/lib/BaseSyncTarget.js.map
|
||||
packages/lib/InMemoryCache.d.ts
|
||||
packages/lib/InMemoryCache.js
|
||||
packages/lib/InMemoryCache.js.map
|
||||
packages/lib/JoplinDatabase.d.ts
|
||||
packages/lib/JoplinDatabase.js
|
||||
packages/lib/JoplinDatabase.js.map
|
||||
packages/lib/JoplinServerApi.d.ts
|
||||
packages/lib/JoplinServerApi.js
|
||||
packages/lib/JoplinServerApi.js.map
|
||||
packages/lib/JoplinServerApi2.d.ts
|
||||
packages/lib/JoplinServerApi2.js
|
||||
packages/lib/JoplinServerApi2.js.map
|
||||
packages/lib/Logger.d.ts
|
||||
packages/lib/Logger.js
|
||||
packages/lib/Logger.js.map
|
||||
@ -817,6 +823,9 @@ packages/lib/commands/historyForward.js.map
|
||||
packages/lib/commands/synchronize.d.ts
|
||||
packages/lib/commands/synchronize.js
|
||||
packages/lib/commands/synchronize.js.map
|
||||
packages/lib/database.d.ts
|
||||
packages/lib/database.js
|
||||
packages/lib/database.js.map
|
||||
packages/lib/dummy.test.d.ts
|
||||
packages/lib/dummy.test.js
|
||||
packages/lib/dummy.test.js.map
|
||||
@ -829,6 +838,9 @@ packages/lib/eventManager.js.map
|
||||
packages/lib/file-api-driver-joplinServer.d.ts
|
||||
packages/lib/file-api-driver-joplinServer.js
|
||||
packages/lib/file-api-driver-joplinServer.js.map
|
||||
packages/lib/file-api.d.ts
|
||||
packages/lib/file-api.js
|
||||
packages/lib/file-api.js.map
|
||||
packages/lib/fs-driver-base.d.ts
|
||||
packages/lib/fs-driver-base.js
|
||||
packages/lib/fs-driver-base.js.map
|
||||
@ -916,6 +928,9 @@ packages/lib/path-utils.js.map
|
||||
packages/lib/reducer.d.ts
|
||||
packages/lib/reducer.js
|
||||
packages/lib/reducer.js.map
|
||||
packages/lib/registry.d.ts
|
||||
packages/lib/registry.js
|
||||
packages/lib/registry.js.map
|
||||
packages/lib/services/AlarmService.d.ts
|
||||
packages/lib/services/AlarmService.js
|
||||
packages/lib/services/AlarmService.js.map
|
||||
@ -1432,219 +1447,9 @@ packages/renderer/pathUtils.js.map
|
||||
packages/renderer/utils.d.ts
|
||||
packages/renderer/utils.js
|
||||
packages/renderer/utils.js.map
|
||||
packages/server/src/app.d.ts
|
||||
packages/server/src/app.js
|
||||
packages/server/src/app.js.map
|
||||
packages/server/src/config.d.ts
|
||||
packages/server/src/config.js
|
||||
packages/server/src/config.js.map
|
||||
packages/server/src/db.d.ts
|
||||
packages/server/src/db.js
|
||||
packages/server/src/db.js.map
|
||||
packages/server/src/middleware/notificationHandler.d.ts
|
||||
packages/server/src/middleware/notificationHandler.js
|
||||
packages/server/src/middleware/notificationHandler.js.map
|
||||
packages/server/src/middleware/notificationHandler.test.d.ts
|
||||
packages/server/src/middleware/notificationHandler.test.js
|
||||
packages/server/src/middleware/notificationHandler.test.js.map
|
||||
packages/server/src/middleware/ownerHandler.d.ts
|
||||
packages/server/src/middleware/ownerHandler.js
|
||||
packages/server/src/middleware/ownerHandler.js.map
|
||||
packages/server/src/middleware/ownerHandler.test.d.ts
|
||||
packages/server/src/middleware/ownerHandler.test.js
|
||||
packages/server/src/middleware/ownerHandler.test.js.map
|
||||
packages/server/src/middleware/routeHandler.d.ts
|
||||
packages/server/src/middleware/routeHandler.js
|
||||
packages/server/src/middleware/routeHandler.js.map
|
||||
packages/server/src/migrations/20190913171451_create.d.ts
|
||||
packages/server/src/migrations/20190913171451_create.js
|
||||
packages/server/src/migrations/20190913171451_create.js.map
|
||||
packages/server/src/migrations/20203012152842_notifications.d.ts
|
||||
packages/server/src/migrations/20203012152842_notifications.js
|
||||
packages/server/src/migrations/20203012152842_notifications.js.map
|
||||
packages/server/src/models/ApiClientModel.d.ts
|
||||
packages/server/src/models/ApiClientModel.js
|
||||
packages/server/src/models/ApiClientModel.js.map
|
||||
packages/server/src/models/BaseModel.d.ts
|
||||
packages/server/src/models/BaseModel.js
|
||||
packages/server/src/models/BaseModel.js.map
|
||||
packages/server/src/models/ChangeModel.d.ts
|
||||
packages/server/src/models/ChangeModel.js
|
||||
packages/server/src/models/ChangeModel.js.map
|
||||
packages/server/src/models/ChangeModel.test.d.ts
|
||||
packages/server/src/models/ChangeModel.test.js
|
||||
packages/server/src/models/ChangeModel.test.js.map
|
||||
packages/server/src/models/FileModel.d.ts
|
||||
packages/server/src/models/FileModel.js
|
||||
packages/server/src/models/FileModel.js.map
|
||||
packages/server/src/models/FileModel.test.d.ts
|
||||
packages/server/src/models/FileModel.test.js
|
||||
packages/server/src/models/FileModel.test.js.map
|
||||
packages/server/src/models/NotificationModel.d.ts
|
||||
packages/server/src/models/NotificationModel.js
|
||||
packages/server/src/models/NotificationModel.js.map
|
||||
packages/server/src/models/NotificationModel.test.d.ts
|
||||
packages/server/src/models/NotificationModel.test.js
|
||||
packages/server/src/models/NotificationModel.test.js.map
|
||||
packages/server/src/models/PermissionModel.d.ts
|
||||
packages/server/src/models/PermissionModel.js
|
||||
packages/server/src/models/PermissionModel.js.map
|
||||
packages/server/src/models/SessionModel.d.ts
|
||||
packages/server/src/models/SessionModel.js
|
||||
packages/server/src/models/SessionModel.js.map
|
||||
packages/server/src/models/UserModel.d.ts
|
||||
packages/server/src/models/UserModel.js
|
||||
packages/server/src/models/UserModel.js.map
|
||||
packages/server/src/models/UserModel.test.d.ts
|
||||
packages/server/src/models/UserModel.test.js
|
||||
packages/server/src/models/UserModel.test.js.map
|
||||
packages/server/src/models/factory.d.ts
|
||||
packages/server/src/models/factory.js
|
||||
packages/server/src/models/factory.js.map
|
||||
packages/server/src/models/utils/pagination.d.ts
|
||||
packages/server/src/models/utils/pagination.js
|
||||
packages/server/src/models/utils/pagination.js.map
|
||||
packages/server/src/models/utils/pagination.test.d.ts
|
||||
packages/server/src/models/utils/pagination.test.js
|
||||
packages/server/src/models/utils/pagination.test.js.map
|
||||
packages/server/src/routes/api/files.d.ts
|
||||
packages/server/src/routes/api/files.js
|
||||
packages/server/src/routes/api/files.js.map
|
||||
packages/server/src/routes/api/files.test.d.ts
|
||||
packages/server/src/routes/api/files.test.js
|
||||
packages/server/src/routes/api/files.test.js.map
|
||||
packages/server/src/routes/api/ping.d.ts
|
||||
packages/server/src/routes/api/ping.js
|
||||
packages/server/src/routes/api/ping.js.map
|
||||
packages/server/src/routes/api/ping.test.d.ts
|
||||
packages/server/src/routes/api/ping.test.js
|
||||
packages/server/src/routes/api/ping.test.js.map
|
||||
packages/server/src/routes/api/sessions.d.ts
|
||||
packages/server/src/routes/api/sessions.js
|
||||
packages/server/src/routes/api/sessions.js.map
|
||||
packages/server/src/routes/api/sessions.test.d.ts
|
||||
packages/server/src/routes/api/sessions.test.js
|
||||
packages/server/src/routes/api/sessions.test.js.map
|
||||
packages/server/src/routes/default.d.ts
|
||||
packages/server/src/routes/default.js
|
||||
packages/server/src/routes/default.js.map
|
||||
packages/server/src/routes/index/files.d.ts
|
||||
packages/server/src/routes/index/files.js
|
||||
packages/server/src/routes/index/files.js.map
|
||||
packages/server/src/routes/index/home.d.ts
|
||||
packages/server/src/routes/index/home.js
|
||||
packages/server/src/routes/index/home.js.map
|
||||
packages/server/src/routes/index/home.test.d.ts
|
||||
packages/server/src/routes/index/home.test.js
|
||||
packages/server/src/routes/index/home.test.js.map
|
||||
packages/server/src/routes/index/login.d.ts
|
||||
packages/server/src/routes/index/login.js
|
||||
packages/server/src/routes/index/login.js.map
|
||||
packages/server/src/routes/index/login.test.d.ts
|
||||
packages/server/src/routes/index/login.test.js
|
||||
packages/server/src/routes/index/login.test.js.map
|
||||
packages/server/src/routes/index/logout.d.ts
|
||||
packages/server/src/routes/index/logout.js
|
||||
packages/server/src/routes/index/logout.js.map
|
||||
packages/server/src/routes/index/logout.test.d.ts
|
||||
packages/server/src/routes/index/logout.test.js
|
||||
packages/server/src/routes/index/logout.test.js.map
|
||||
packages/server/src/routes/index/notifications.d.ts
|
||||
packages/server/src/routes/index/notifications.js
|
||||
packages/server/src/routes/index/notifications.js.map
|
||||
packages/server/src/routes/index/notifications.test.d.ts
|
||||
packages/server/src/routes/index/notifications.test.js
|
||||
packages/server/src/routes/index/notifications.test.js.map
|
||||
packages/server/src/routes/index/users.d.ts
|
||||
packages/server/src/routes/index/users.js
|
||||
packages/server/src/routes/index/users.js.map
|
||||
packages/server/src/routes/index/users.test.d.ts
|
||||
packages/server/src/routes/index/users.test.js
|
||||
packages/server/src/routes/index/users.test.js.map
|
||||
packages/server/src/routes/oauth2/authorize.d.ts
|
||||
packages/server/src/routes/oauth2/authorize.js
|
||||
packages/server/src/routes/oauth2/authorize.js.map
|
||||
packages/server/src/routes/routes.d.ts
|
||||
packages/server/src/routes/routes.js
|
||||
packages/server/src/routes/routes.js.map
|
||||
packages/server/src/services/MustacheService.d.ts
|
||||
packages/server/src/services/MustacheService.js
|
||||
packages/server/src/services/MustacheService.js.map
|
||||
packages/server/src/tools/db-migrate.d.ts
|
||||
packages/server/src/tools/db-migrate.js
|
||||
packages/server/src/tools/db-migrate.js.map
|
||||
packages/server/src/tools/dbTools.d.ts
|
||||
packages/server/src/tools/dbTools.js
|
||||
packages/server/src/tools/dbTools.js.map
|
||||
packages/server/src/tools/generate-types.d.ts
|
||||
packages/server/src/tools/generate-types.js
|
||||
packages/server/src/tools/generate-types.js.map
|
||||
packages/server/src/utils/Router.d.ts
|
||||
packages/server/src/utils/Router.js
|
||||
packages/server/src/utils/Router.js.map
|
||||
packages/server/src/utils/TransactionHandler.d.ts
|
||||
packages/server/src/utils/TransactionHandler.js
|
||||
packages/server/src/utils/TransactionHandler.js.map
|
||||
packages/server/src/utils/auth.d.ts
|
||||
packages/server/src/utils/auth.js
|
||||
packages/server/src/utils/auth.js.map
|
||||
packages/server/src/utils/base64.d.ts
|
||||
packages/server/src/utils/base64.js
|
||||
packages/server/src/utils/base64.js.map
|
||||
packages/server/src/utils/cache.d.ts
|
||||
packages/server/src/utils/cache.js
|
||||
packages/server/src/utils/cache.js.map
|
||||
packages/server/src/utils/defaultView.d.ts
|
||||
packages/server/src/utils/defaultView.js
|
||||
packages/server/src/utils/defaultView.js.map
|
||||
packages/server/src/utils/errors.d.ts
|
||||
packages/server/src/utils/errors.js
|
||||
packages/server/src/utils/errors.js.map
|
||||
packages/server/src/utils/htmlUtils.d.ts
|
||||
packages/server/src/utils/htmlUtils.js
|
||||
packages/server/src/utils/htmlUtils.js.map
|
||||
packages/server/src/utils/koaIf.d.ts
|
||||
packages/server/src/utils/koaIf.js
|
||||
packages/server/src/utils/koaIf.js.map
|
||||
packages/server/src/utils/requestUtils.d.ts
|
||||
packages/server/src/utils/requestUtils.js
|
||||
packages/server/src/utils/requestUtils.js.map
|
||||
packages/server/src/utils/routeUtils.d.ts
|
||||
packages/server/src/utils/routeUtils.js
|
||||
packages/server/src/utils/routeUtils.js.map
|
||||
packages/server/src/utils/routeUtils.test.d.ts
|
||||
packages/server/src/utils/routeUtils.test.js
|
||||
packages/server/src/utils/routeUtils.test.js.map
|
||||
packages/server/src/utils/testing/apiUtils.d.ts
|
||||
packages/server/src/utils/testing/apiUtils.js
|
||||
packages/server/src/utils/testing/apiUtils.js.map
|
||||
packages/server/src/utils/testing/koa/FakeCookies.d.ts
|
||||
packages/server/src/utils/testing/koa/FakeCookies.js
|
||||
packages/server/src/utils/testing/koa/FakeCookies.js.map
|
||||
packages/server/src/utils/testing/koa/FakeRequest.d.ts
|
||||
packages/server/src/utils/testing/koa/FakeRequest.js
|
||||
packages/server/src/utils/testing/koa/FakeRequest.js.map
|
||||
packages/server/src/utils/testing/koa/FakeResponse.d.ts
|
||||
packages/server/src/utils/testing/koa/FakeResponse.js
|
||||
packages/server/src/utils/testing/koa/FakeResponse.js.map
|
||||
packages/server/src/utils/testing/testRouters.d.ts
|
||||
packages/server/src/utils/testing/testRouters.js
|
||||
packages/server/src/utils/testing/testRouters.js.map
|
||||
packages/server/src/utils/testing/testUtils.d.ts
|
||||
packages/server/src/utils/testing/testUtils.js
|
||||
packages/server/src/utils/testing/testUtils.js.map
|
||||
packages/server/src/utils/time.d.ts
|
||||
packages/server/src/utils/time.js
|
||||
packages/server/src/utils/time.js.map
|
||||
packages/server/src/utils/types.d.ts
|
||||
packages/server/src/utils/types.js
|
||||
packages/server/src/utils/types.js.map
|
||||
packages/server/src/utils/urlUtils.d.ts
|
||||
packages/server/src/utils/urlUtils.js
|
||||
packages/server/src/utils/urlUtils.js.map
|
||||
packages/server/src/utils/uuidgen.d.ts
|
||||
packages/server/src/utils/uuidgen.js
|
||||
packages/server/src/utils/uuidgen.js.map
|
||||
packages/tools/generate-database-types.d.ts
|
||||
packages/tools/generate-database-types.js
|
||||
packages/tools/generate-database-types.js.map
|
||||
packages/tools/lerna-add.d.ts
|
||||
packages/tools/lerna-add.js
|
||||
packages/tools/lerna-add.js.map
|
||||
|
237
.gitignore
vendored
237
.gitignore
vendored
@ -84,6 +84,9 @@ packages/app-cli/tests/Synchronizer.resources.js.map
|
||||
packages/app-cli/tests/Synchronizer.revisions.d.ts
|
||||
packages/app-cli/tests/Synchronizer.revisions.js
|
||||
packages/app-cli/tests/Synchronizer.revisions.js.map
|
||||
packages/app-cli/tests/Synchronizer.sharing.d.ts
|
||||
packages/app-cli/tests/Synchronizer.sharing.js
|
||||
packages/app-cli/tests/Synchronizer.sharing.js.map
|
||||
packages/app-cli/tests/Synchronizer.tags.d.ts
|
||||
packages/app-cli/tests/Synchronizer.tags.js
|
||||
packages/app-cli/tests/Synchronizer.tags.js.map
|
||||
@ -771,15 +774,18 @@ packages/lib/BaseApplication.js.map
|
||||
packages/lib/BaseModel.d.ts
|
||||
packages/lib/BaseModel.js
|
||||
packages/lib/BaseModel.js.map
|
||||
packages/lib/BaseSyncTarget.d.ts
|
||||
packages/lib/BaseSyncTarget.js
|
||||
packages/lib/BaseSyncTarget.js.map
|
||||
packages/lib/InMemoryCache.d.ts
|
||||
packages/lib/InMemoryCache.js
|
||||
packages/lib/InMemoryCache.js.map
|
||||
packages/lib/JoplinDatabase.d.ts
|
||||
packages/lib/JoplinDatabase.js
|
||||
packages/lib/JoplinDatabase.js.map
|
||||
packages/lib/JoplinServerApi.d.ts
|
||||
packages/lib/JoplinServerApi.js
|
||||
packages/lib/JoplinServerApi.js.map
|
||||
packages/lib/JoplinServerApi2.d.ts
|
||||
packages/lib/JoplinServerApi2.js
|
||||
packages/lib/JoplinServerApi2.js.map
|
||||
packages/lib/Logger.d.ts
|
||||
packages/lib/Logger.js
|
||||
packages/lib/Logger.js.map
|
||||
@ -804,6 +810,9 @@ packages/lib/commands/historyForward.js.map
|
||||
packages/lib/commands/synchronize.d.ts
|
||||
packages/lib/commands/synchronize.js
|
||||
packages/lib/commands/synchronize.js.map
|
||||
packages/lib/database.d.ts
|
||||
packages/lib/database.js
|
||||
packages/lib/database.js.map
|
||||
packages/lib/dummy.test.d.ts
|
||||
packages/lib/dummy.test.js
|
||||
packages/lib/dummy.test.js.map
|
||||
@ -816,6 +825,9 @@ packages/lib/eventManager.js.map
|
||||
packages/lib/file-api-driver-joplinServer.d.ts
|
||||
packages/lib/file-api-driver-joplinServer.js
|
||||
packages/lib/file-api-driver-joplinServer.js.map
|
||||
packages/lib/file-api.d.ts
|
||||
packages/lib/file-api.js
|
||||
packages/lib/file-api.js.map
|
||||
packages/lib/fs-driver-base.d.ts
|
||||
packages/lib/fs-driver-base.js
|
||||
packages/lib/fs-driver-base.js.map
|
||||
@ -903,6 +915,9 @@ packages/lib/path-utils.js.map
|
||||
packages/lib/reducer.d.ts
|
||||
packages/lib/reducer.js
|
||||
packages/lib/reducer.js.map
|
||||
packages/lib/registry.d.ts
|
||||
packages/lib/registry.js
|
||||
packages/lib/registry.js.map
|
||||
packages/lib/services/AlarmService.d.ts
|
||||
packages/lib/services/AlarmService.js
|
||||
packages/lib/services/AlarmService.js.map
|
||||
@ -1419,219 +1434,9 @@ packages/renderer/pathUtils.js.map
|
||||
packages/renderer/utils.d.ts
|
||||
packages/renderer/utils.js
|
||||
packages/renderer/utils.js.map
|
||||
packages/server/src/app.d.ts
|
||||
packages/server/src/app.js
|
||||
packages/server/src/app.js.map
|
||||
packages/server/src/config.d.ts
|
||||
packages/server/src/config.js
|
||||
packages/server/src/config.js.map
|
||||
packages/server/src/db.d.ts
|
||||
packages/server/src/db.js
|
||||
packages/server/src/db.js.map
|
||||
packages/server/src/middleware/notificationHandler.d.ts
|
||||
packages/server/src/middleware/notificationHandler.js
|
||||
packages/server/src/middleware/notificationHandler.js.map
|
||||
packages/server/src/middleware/notificationHandler.test.d.ts
|
||||
packages/server/src/middleware/notificationHandler.test.js
|
||||
packages/server/src/middleware/notificationHandler.test.js.map
|
||||
packages/server/src/middleware/ownerHandler.d.ts
|
||||
packages/server/src/middleware/ownerHandler.js
|
||||
packages/server/src/middleware/ownerHandler.js.map
|
||||
packages/server/src/middleware/ownerHandler.test.d.ts
|
||||
packages/server/src/middleware/ownerHandler.test.js
|
||||
packages/server/src/middleware/ownerHandler.test.js.map
|
||||
packages/server/src/middleware/routeHandler.d.ts
|
||||
packages/server/src/middleware/routeHandler.js
|
||||
packages/server/src/middleware/routeHandler.js.map
|
||||
packages/server/src/migrations/20190913171451_create.d.ts
|
||||
packages/server/src/migrations/20190913171451_create.js
|
||||
packages/server/src/migrations/20190913171451_create.js.map
|
||||
packages/server/src/migrations/20203012152842_notifications.d.ts
|
||||
packages/server/src/migrations/20203012152842_notifications.js
|
||||
packages/server/src/migrations/20203012152842_notifications.js.map
|
||||
packages/server/src/models/ApiClientModel.d.ts
|
||||
packages/server/src/models/ApiClientModel.js
|
||||
packages/server/src/models/ApiClientModel.js.map
|
||||
packages/server/src/models/BaseModel.d.ts
|
||||
packages/server/src/models/BaseModel.js
|
||||
packages/server/src/models/BaseModel.js.map
|
||||
packages/server/src/models/ChangeModel.d.ts
|
||||
packages/server/src/models/ChangeModel.js
|
||||
packages/server/src/models/ChangeModel.js.map
|
||||
packages/server/src/models/ChangeModel.test.d.ts
|
||||
packages/server/src/models/ChangeModel.test.js
|
||||
packages/server/src/models/ChangeModel.test.js.map
|
||||
packages/server/src/models/FileModel.d.ts
|
||||
packages/server/src/models/FileModel.js
|
||||
packages/server/src/models/FileModel.js.map
|
||||
packages/server/src/models/FileModel.test.d.ts
|
||||
packages/server/src/models/FileModel.test.js
|
||||
packages/server/src/models/FileModel.test.js.map
|
||||
packages/server/src/models/NotificationModel.d.ts
|
||||
packages/server/src/models/NotificationModel.js
|
||||
packages/server/src/models/NotificationModel.js.map
|
||||
packages/server/src/models/NotificationModel.test.d.ts
|
||||
packages/server/src/models/NotificationModel.test.js
|
||||
packages/server/src/models/NotificationModel.test.js.map
|
||||
packages/server/src/models/PermissionModel.d.ts
|
||||
packages/server/src/models/PermissionModel.js
|
||||
packages/server/src/models/PermissionModel.js.map
|
||||
packages/server/src/models/SessionModel.d.ts
|
||||
packages/server/src/models/SessionModel.js
|
||||
packages/server/src/models/SessionModel.js.map
|
||||
packages/server/src/models/UserModel.d.ts
|
||||
packages/server/src/models/UserModel.js
|
||||
packages/server/src/models/UserModel.js.map
|
||||
packages/server/src/models/UserModel.test.d.ts
|
||||
packages/server/src/models/UserModel.test.js
|
||||
packages/server/src/models/UserModel.test.js.map
|
||||
packages/server/src/models/factory.d.ts
|
||||
packages/server/src/models/factory.js
|
||||
packages/server/src/models/factory.js.map
|
||||
packages/server/src/models/utils/pagination.d.ts
|
||||
packages/server/src/models/utils/pagination.js
|
||||
packages/server/src/models/utils/pagination.js.map
|
||||
packages/server/src/models/utils/pagination.test.d.ts
|
||||
packages/server/src/models/utils/pagination.test.js
|
||||
packages/server/src/models/utils/pagination.test.js.map
|
||||
packages/server/src/routes/api/files.d.ts
|
||||
packages/server/src/routes/api/files.js
|
||||
packages/server/src/routes/api/files.js.map
|
||||
packages/server/src/routes/api/files.test.d.ts
|
||||
packages/server/src/routes/api/files.test.js
|
||||
packages/server/src/routes/api/files.test.js.map
|
||||
packages/server/src/routes/api/ping.d.ts
|
||||
packages/server/src/routes/api/ping.js
|
||||
packages/server/src/routes/api/ping.js.map
|
||||
packages/server/src/routes/api/ping.test.d.ts
|
||||
packages/server/src/routes/api/ping.test.js
|
||||
packages/server/src/routes/api/ping.test.js.map
|
||||
packages/server/src/routes/api/sessions.d.ts
|
||||
packages/server/src/routes/api/sessions.js
|
||||
packages/server/src/routes/api/sessions.js.map
|
||||
packages/server/src/routes/api/sessions.test.d.ts
|
||||
packages/server/src/routes/api/sessions.test.js
|
||||
packages/server/src/routes/api/sessions.test.js.map
|
||||
packages/server/src/routes/default.d.ts
|
||||
packages/server/src/routes/default.js
|
||||
packages/server/src/routes/default.js.map
|
||||
packages/server/src/routes/index/files.d.ts
|
||||
packages/server/src/routes/index/files.js
|
||||
packages/server/src/routes/index/files.js.map
|
||||
packages/server/src/routes/index/home.d.ts
|
||||
packages/server/src/routes/index/home.js
|
||||
packages/server/src/routes/index/home.js.map
|
||||
packages/server/src/routes/index/home.test.d.ts
|
||||
packages/server/src/routes/index/home.test.js
|
||||
packages/server/src/routes/index/home.test.js.map
|
||||
packages/server/src/routes/index/login.d.ts
|
||||
packages/server/src/routes/index/login.js
|
||||
packages/server/src/routes/index/login.js.map
|
||||
packages/server/src/routes/index/login.test.d.ts
|
||||
packages/server/src/routes/index/login.test.js
|
||||
packages/server/src/routes/index/login.test.js.map
|
||||
packages/server/src/routes/index/logout.d.ts
|
||||
packages/server/src/routes/index/logout.js
|
||||
packages/server/src/routes/index/logout.js.map
|
||||
packages/server/src/routes/index/logout.test.d.ts
|
||||
packages/server/src/routes/index/logout.test.js
|
||||
packages/server/src/routes/index/logout.test.js.map
|
||||
packages/server/src/routes/index/notifications.d.ts
|
||||
packages/server/src/routes/index/notifications.js
|
||||
packages/server/src/routes/index/notifications.js.map
|
||||
packages/server/src/routes/index/notifications.test.d.ts
|
||||
packages/server/src/routes/index/notifications.test.js
|
||||
packages/server/src/routes/index/notifications.test.js.map
|
||||
packages/server/src/routes/index/users.d.ts
|
||||
packages/server/src/routes/index/users.js
|
||||
packages/server/src/routes/index/users.js.map
|
||||
packages/server/src/routes/index/users.test.d.ts
|
||||
packages/server/src/routes/index/users.test.js
|
||||
packages/server/src/routes/index/users.test.js.map
|
||||
packages/server/src/routes/oauth2/authorize.d.ts
|
||||
packages/server/src/routes/oauth2/authorize.js
|
||||
packages/server/src/routes/oauth2/authorize.js.map
|
||||
packages/server/src/routes/routes.d.ts
|
||||
packages/server/src/routes/routes.js
|
||||
packages/server/src/routes/routes.js.map
|
||||
packages/server/src/services/MustacheService.d.ts
|
||||
packages/server/src/services/MustacheService.js
|
||||
packages/server/src/services/MustacheService.js.map
|
||||
packages/server/src/tools/db-migrate.d.ts
|
||||
packages/server/src/tools/db-migrate.js
|
||||
packages/server/src/tools/db-migrate.js.map
|
||||
packages/server/src/tools/dbTools.d.ts
|
||||
packages/server/src/tools/dbTools.js
|
||||
packages/server/src/tools/dbTools.js.map
|
||||
packages/server/src/tools/generate-types.d.ts
|
||||
packages/server/src/tools/generate-types.js
|
||||
packages/server/src/tools/generate-types.js.map
|
||||
packages/server/src/utils/Router.d.ts
|
||||
packages/server/src/utils/Router.js
|
||||
packages/server/src/utils/Router.js.map
|
||||
packages/server/src/utils/TransactionHandler.d.ts
|
||||
packages/server/src/utils/TransactionHandler.js
|
||||
packages/server/src/utils/TransactionHandler.js.map
|
||||
packages/server/src/utils/auth.d.ts
|
||||
packages/server/src/utils/auth.js
|
||||
packages/server/src/utils/auth.js.map
|
||||
packages/server/src/utils/base64.d.ts
|
||||
packages/server/src/utils/base64.js
|
||||
packages/server/src/utils/base64.js.map
|
||||
packages/server/src/utils/cache.d.ts
|
||||
packages/server/src/utils/cache.js
|
||||
packages/server/src/utils/cache.js.map
|
||||
packages/server/src/utils/defaultView.d.ts
|
||||
packages/server/src/utils/defaultView.js
|
||||
packages/server/src/utils/defaultView.js.map
|
||||
packages/server/src/utils/errors.d.ts
|
||||
packages/server/src/utils/errors.js
|
||||
packages/server/src/utils/errors.js.map
|
||||
packages/server/src/utils/htmlUtils.d.ts
|
||||
packages/server/src/utils/htmlUtils.js
|
||||
packages/server/src/utils/htmlUtils.js.map
|
||||
packages/server/src/utils/koaIf.d.ts
|
||||
packages/server/src/utils/koaIf.js
|
||||
packages/server/src/utils/koaIf.js.map
|
||||
packages/server/src/utils/requestUtils.d.ts
|
||||
packages/server/src/utils/requestUtils.js
|
||||
packages/server/src/utils/requestUtils.js.map
|
||||
packages/server/src/utils/routeUtils.d.ts
|
||||
packages/server/src/utils/routeUtils.js
|
||||
packages/server/src/utils/routeUtils.js.map
|
||||
packages/server/src/utils/routeUtils.test.d.ts
|
||||
packages/server/src/utils/routeUtils.test.js
|
||||
packages/server/src/utils/routeUtils.test.js.map
|
||||
packages/server/src/utils/testing/apiUtils.d.ts
|
||||
packages/server/src/utils/testing/apiUtils.js
|
||||
packages/server/src/utils/testing/apiUtils.js.map
|
||||
packages/server/src/utils/testing/koa/FakeCookies.d.ts
|
||||
packages/server/src/utils/testing/koa/FakeCookies.js
|
||||
packages/server/src/utils/testing/koa/FakeCookies.js.map
|
||||
packages/server/src/utils/testing/koa/FakeRequest.d.ts
|
||||
packages/server/src/utils/testing/koa/FakeRequest.js
|
||||
packages/server/src/utils/testing/koa/FakeRequest.js.map
|
||||
packages/server/src/utils/testing/koa/FakeResponse.d.ts
|
||||
packages/server/src/utils/testing/koa/FakeResponse.js
|
||||
packages/server/src/utils/testing/koa/FakeResponse.js.map
|
||||
packages/server/src/utils/testing/testRouters.d.ts
|
||||
packages/server/src/utils/testing/testRouters.js
|
||||
packages/server/src/utils/testing/testRouters.js.map
|
||||
packages/server/src/utils/testing/testUtils.d.ts
|
||||
packages/server/src/utils/testing/testUtils.js
|
||||
packages/server/src/utils/testing/testUtils.js.map
|
||||
packages/server/src/utils/time.d.ts
|
||||
packages/server/src/utils/time.js
|
||||
packages/server/src/utils/time.js.map
|
||||
packages/server/src/utils/types.d.ts
|
||||
packages/server/src/utils/types.js
|
||||
packages/server/src/utils/types.js.map
|
||||
packages/server/src/utils/urlUtils.d.ts
|
||||
packages/server/src/utils/urlUtils.js
|
||||
packages/server/src/utils/urlUtils.js.map
|
||||
packages/server/src/utils/uuidgen.d.ts
|
||||
packages/server/src/utils/uuidgen.js
|
||||
packages/server/src/utils/uuidgen.js.map
|
||||
packages/tools/generate-database-types.d.ts
|
||||
packages/tools/generate-database-types.js
|
||||
packages/tools/generate-database-types.js.map
|
||||
packages/tools/lerna-add.d.ts
|
||||
packages/tools/lerna-add.js
|
||||
packages/tools/lerna-add.js.map
|
||||
|
@ -117,6 +117,7 @@ The Web Clipper is a browser extension that allows you to save web pages and scr
|
||||
- [Search Sorting spec](https://github.com/laurent22/joplin/blob/dev/readme/spec/search_sorting.md)
|
||||
- [Server: File URL Format](https://github.com/laurent22/joplin/blob/dev/readme/spec/server_file_url_format.md)
|
||||
- [Server: Delta Sync](https://github.com/laurent22/joplin/blob/dev/readme/spec/server_delta_sync.md)
|
||||
- [Server: Sharing](https://github.com/laurent22/joplin/blob/dev/readme/spec/server_sharing.md)
|
||||
|
||||
- Google Summer of Code 2020
|
||||
|
||||
|
@ -4,7 +4,7 @@ const fs = require('fs-extra');
|
||||
const Logger = require('@joplin/lib/Logger').default;
|
||||
const { dirname } = require('@joplin/lib/path-utils');
|
||||
const { DatabaseDriverNode } = require('@joplin/lib/database-driver-node.js');
|
||||
const { JoplinDatabase } = require('@joplin/lib/joplin-database.js');
|
||||
const JoplinDatabase = require('@joplin/lib/JoplinDatabase').default;
|
||||
const BaseModel = require('@joplin/lib/BaseModel').default;
|
||||
const Folder = require('@joplin/lib/models/Folder').default;
|
||||
const Note = require('@joplin/lib/models/Note').default;
|
||||
|
@ -4,7 +4,7 @@ const BaseModel = require('@joplin/lib/BaseModel').default;
|
||||
const { toTitleCase } = require('@joplin/lib/string-utils.js');
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
const markdownUtils = require('@joplin/lib/markdownUtils').default;
|
||||
const { Database } = require('@joplin/lib/database.js');
|
||||
const Database = require('@joplin/lib/database').default;
|
||||
const shim = require('@joplin/lib/shim').default;
|
||||
|
||||
class Command extends BaseCommand {
|
||||
|
@ -2,7 +2,7 @@ const { BaseCommand } = require('./base-command.js');
|
||||
const { app } = require('./app.js');
|
||||
const { _ } = require('@joplin/lib/locale');
|
||||
const BaseModel = require('@joplin/lib/BaseModel').default;
|
||||
const { Database } = require('@joplin/lib/database.js');
|
||||
const Database = require('@joplin/lib/database').default;
|
||||
const Note = require('@joplin/lib/models/Note').default;
|
||||
|
||||
class Command extends BaseCommand {
|
||||
|
39
packages/app-cli/tests/Synchronizer.sharing.ts
Normal file
39
packages/app-cli/tests/Synchronizer.sharing.ts
Normal file
@ -0,0 +1,39 @@
|
||||
import { afterAllCleanUp, synchronizerStart, setupDatabaseAndSynchronizer, switchClient } from './test-utils';
|
||||
import Note from '@joplin/lib/models/Note';
|
||||
import BaseItem from '@joplin/lib/models/BaseItem';
|
||||
import shim from '@joplin/lib/shim';
|
||||
import Resource from '@joplin/lib/models/Resource';
|
||||
|
||||
describe('Synchronizer.sharing', function() {
|
||||
|
||||
beforeEach(async (done) => {
|
||||
await setupDatabaseAndSynchronizer(1);
|
||||
await setupDatabaseAndSynchronizer(2);
|
||||
await switchClient(1);
|
||||
done();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await afterAllCleanUp();
|
||||
});
|
||||
|
||||
it('should mark link resources as shared before syncing', (async () => {
|
||||
let note1 = await Note.save({ title: 'note1' });
|
||||
note1 = await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
const resourceId1 = (await Note.linkedResourceIds(note1.body))[0];
|
||||
|
||||
const note2 = await Note.save({ title: 'note2' });
|
||||
await shim.attachFileToNote(note2, `${__dirname}/../tests/support/photo.jpg`);
|
||||
|
||||
expect((await Resource.sharedResourceIds()).length).toBe(0);
|
||||
|
||||
await BaseItem.updateShareStatus(note1, true);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const sharedResourceIds = await Resource.sharedResourceIds();
|
||||
expect(sharedResourceIds.length).toBe(1);
|
||||
expect(sharedResourceIds[0]).toBe(resourceId1);
|
||||
}));
|
||||
|
||||
});
|
@ -6,7 +6,7 @@ const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synch
|
||||
const Folder = require('@joplin/lib/models/Folder').default;
|
||||
const Note = require('@joplin/lib/models/Note').default;
|
||||
const Tag = require('@joplin/lib/models/Tag').default;
|
||||
const { Database } = require('@joplin/lib/database.js');
|
||||
const Database = require('@joplin/lib/database').default;
|
||||
const Setting = require('@joplin/lib/models/Setting').default;
|
||||
const BaseItem = require('@joplin/lib/models/BaseItem').default;
|
||||
const BaseModel = require('@joplin/lib/BaseModel').default;
|
||||
|
@ -18,9 +18,9 @@ import PluginService from '@joplin/lib/services/plugins/PluginService';
|
||||
import FileApiDriverJoplinServer from '@joplin/lib/file-api-driver-joplinServer';
|
||||
import OneDriveApi from '@joplin/lib/onedrive-api';
|
||||
import SyncTargetOneDrive from '@joplin/lib/SyncTargetOneDrive';
|
||||
import JoplinDatabase from '@joplin/lib/JoplinDatabase';
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const { JoplinDatabase } = require('@joplin/lib/joplin-database.js');
|
||||
const { DatabaseDriverNode } = require('@joplin/lib/database-driver-node.js');
|
||||
import Folder from '@joplin/lib/models/Folder';
|
||||
import Note from '@joplin/lib/models/Note';
|
||||
@ -52,7 +52,7 @@ import RevisionService from '@joplin/lib/services/RevisionService';
|
||||
import ResourceFetcher from '@joplin/lib/services/ResourceFetcher';
|
||||
const WebDavApi = require('@joplin/lib/WebDavApi');
|
||||
const DropboxApi = require('@joplin/lib/DropboxApi');
|
||||
import JoplinServerApi from '@joplin/lib/JoplinServerApi2';
|
||||
import JoplinServerApi from '@joplin/lib/JoplinServerApi';
|
||||
const { loadKeychainServiceAndSettings } = require('@joplin/lib/services/SettingUtils');
|
||||
const md5 = require('md5');
|
||||
const S3 = require('aws-sdk/clients/s3');
|
||||
@ -402,7 +402,7 @@ async function setupDatabaseAndSynchronizer(id: number, options: any = null) {
|
||||
await fileApi().clearRoot();
|
||||
}
|
||||
|
||||
function db(id: number = null) {
|
||||
function db(id: number = null): JoplinDatabase {
|
||||
if (id === null) id = currentClient_;
|
||||
return databases_[id];
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ import MasterKey from '@joplin/lib/models/MasterKey';
|
||||
import Folder from '@joplin/lib/models/Folder';
|
||||
const fs = require('fs-extra');
|
||||
import Tag from '@joplin/lib/models/Tag';
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
const packageInfo = require('./packageInfo.js');
|
||||
import DecryptionWorker from '@joplin/lib/services/DecryptionWorker';
|
||||
const ClipperServer = require('@joplin/lib/ClipperServer');
|
||||
@ -704,7 +704,7 @@ class Application extends BaseApplication {
|
||||
if (Setting.value('env') === 'dev') {
|
||||
void AlarmService.updateAllNotifications();
|
||||
} else {
|
||||
reg.scheduleSync(1000).then(() => {
|
||||
void reg.scheduleSync(1000).then(() => {
|
||||
// Wait for the first sync before updating the notifications, since synchronisation
|
||||
// might change the notifications.
|
||||
void AlarmService.updateAllNotifications();
|
||||
|
@ -42,7 +42,7 @@ class ConfigScreenComponent extends React.Component<any, any> {
|
||||
|
||||
this.sidebar_selectionChange = this.sidebar_selectionChange.bind(this);
|
||||
this.checkSyncConfig_ = this.checkSyncConfig_.bind(this);
|
||||
this.checkNextcloudAppButton_click = this.checkNextcloudAppButton_click.bind(this);
|
||||
// this.checkNextcloudAppButton_click = this.checkNextcloudAppButton_click.bind(this);
|
||||
this.showLogButton_click = this.showLogButton_click.bind(this);
|
||||
this.nextcloudAppHelpLink_click = this.nextcloudAppHelpLink_click.bind(this);
|
||||
this.onCancelClick = this.onCancelClick.bind(this);
|
||||
@ -57,10 +57,10 @@ class ConfigScreenComponent extends React.Component<any, any> {
|
||||
await shared.checkSyncConfig(this, this.state.settings);
|
||||
}
|
||||
|
||||
async checkNextcloudAppButton_click() {
|
||||
this.setState({ showNextcloudAppLog: true });
|
||||
await shared.checkNextcloudApp(this, this.state.settings);
|
||||
}
|
||||
// async checkNextcloudAppButton_click() {
|
||||
// this.setState({ showNextcloudAppLog: true });
|
||||
// await shared.checkNextcloudApp(this, this.state.settings);
|
||||
// }
|
||||
|
||||
showLogButton_click() {
|
||||
this.setState({ showNextcloudAppLog: true });
|
||||
@ -203,48 +203,48 @@ class ConfigScreenComponent extends React.Component<any, any> {
|
||||
);
|
||||
}
|
||||
|
||||
if (syncTargetMd.name === 'nextcloud') {
|
||||
const syncTarget = settings['sync.5.syncTargets'][settings['sync.5.path']];
|
||||
// if (syncTargetMd.name === 'nextcloud') {
|
||||
// const syncTarget = settings['sync.5.syncTargets'][settings['sync.5.path']];
|
||||
|
||||
let status = _('Unknown');
|
||||
let errorMessage = null;
|
||||
// let status = _('Unknown');
|
||||
// let errorMessage = null;
|
||||
|
||||
if (this.state.checkNextcloudAppResult === 'checking') {
|
||||
status = _('Checking...');
|
||||
} else if (syncTarget) {
|
||||
if (syncTarget.uuid) status = _('OK');
|
||||
if (syncTarget.error) {
|
||||
status = _('Error');
|
||||
errorMessage = syncTarget.error;
|
||||
}
|
||||
}
|
||||
// if (this.state.checkNextcloudAppResult === 'checking') {
|
||||
// status = _('Checking...');
|
||||
// } else if (syncTarget) {
|
||||
// if (syncTarget.uuid) status = _('OK');
|
||||
// if (syncTarget.error) {
|
||||
// status = _('Error');
|
||||
// errorMessage = syncTarget.error;
|
||||
// }
|
||||
// }
|
||||
|
||||
const statusComp = !errorMessage || this.state.checkNextcloudAppResult === 'checking' || !this.state.showNextcloudAppLog ? null : (
|
||||
<div style={statusStyle}>
|
||||
<p style={theme.textStyle}>{_('The Joplin Nextcloud App is either not installed or misconfigured. Please see the full error message below:')}</p>
|
||||
<pre>{errorMessage}</pre>
|
||||
</div>
|
||||
);
|
||||
// const statusComp = !errorMessage || this.state.checkNextcloudAppResult === 'checking' || !this.state.showNextcloudAppLog ? null : (
|
||||
// <div style={statusStyle}>
|
||||
// <p style={theme.textStyle}>{_('The Joplin Nextcloud App is either not installed or misconfigured. Please see the full error message below:')}</p>
|
||||
// <pre>{errorMessage}</pre>
|
||||
// </div>
|
||||
// );
|
||||
|
||||
const showLogButton = !errorMessage || this.state.showNextcloudAppLog ? null : (
|
||||
<a style={theme.urlStyle} href="#" onClick={this.showLogButton_click}>[{_('Show Log')}]</a>
|
||||
);
|
||||
// const showLogButton = !errorMessage || this.state.showNextcloudAppLog ? null : (
|
||||
// <a style={theme.urlStyle} href="#" onClick={this.showLogButton_click}>[{_('Show Log')}]</a>
|
||||
// );
|
||||
|
||||
const appStatusStyle = Object.assign({}, theme.textStyle, { fontWeight: 'bold' });
|
||||
// const appStatusStyle = Object.assign({}, theme.textStyle, { fontWeight: 'bold' });
|
||||
|
||||
settingComps.push(
|
||||
<div key="nextcloud_app_check" style={this.rowStyle_}>
|
||||
<span style={theme.textStyle}>Beta: {_('Joplin Nextcloud App status:')} </span><span style={appStatusStyle}>{status}</span>
|
||||
|
||||
{showLogButton}
|
||||
|
||||
<Button level={ButtonLevel.Secondary} style={{ display: 'inline-block' }} title={_('Check Status')} disabled={this.state.checkNextcloudAppResult === 'checking'} onClick={this.checkNextcloudAppButton_click}/>
|
||||
|
||||
<a style={theme.urlStyle} href="#" onClick={this.nextcloudAppHelpLink_click}>[{_('Help')}]</a>
|
||||
{statusComp}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
// settingComps.push(
|
||||
// <div key="nextcloud_app_check" style={this.rowStyle_}>
|
||||
// <span style={theme.textStyle}>Beta: {_('Joplin Nextcloud App status:')} </span><span style={appStatusStyle}>{status}</span>
|
||||
//
|
||||
// {showLogButton}
|
||||
//
|
||||
// <Button level={ButtonLevel.Secondary} style={{ display: 'inline-block' }} title={_('Check Status')} disabled={this.state.checkNextcloudAppResult === 'checking'} onClick={this.checkNextcloudAppButton_click}/>
|
||||
//
|
||||
// <a style={theme.urlStyle} href="#" onClick={this.nextcloudAppHelpLink_click}>[{_('Help')}]</a>
|
||||
// {statusComp}
|
||||
// </div>
|
||||
// );
|
||||
// }
|
||||
}
|
||||
|
||||
let advancedSettingsButton = null;
|
||||
|
@ -19,7 +19,7 @@ import stateToWhenClauseContext from '../services/commands/stateToWhenClauseCont
|
||||
import bridge from '../services/bridge';
|
||||
|
||||
const { connect } = require('react-redux');
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
const packageInfo = require('../packageInfo.js');
|
||||
const { clipboard } = require('electron');
|
||||
const Menu = bridge().Menu;
|
||||
|
@ -30,7 +30,7 @@ const { clipboard } = require('electron');
|
||||
const shared = require('@joplin/lib/components/shared/note-screen-shared.js');
|
||||
const Menu = bridge().Menu;
|
||||
const MenuItem = bridge().MenuItem;
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
|
||||
const menuUtils = new MenuUtils(CommandService.instance());
|
||||
|
||||
@ -371,7 +371,7 @@ function CodeMirror(props: NoteBodyEditorProps, ref: any) {
|
||||
/* These must be important to prevent the codemirror defaults from taking over*/
|
||||
.CodeMirror {
|
||||
font-family: monospace;
|
||||
font-size: ${theme.editorFontSize}px;
|
||||
font-size: ${props.fontSize}px;
|
||||
height: 100% !important;
|
||||
width: 100% !important;
|
||||
color: inherit !important;
|
||||
|
@ -31,7 +31,7 @@ import Setting from '@joplin/lib/models/Setting';
|
||||
|
||||
// import eventManager from '@joplin/lib/eventManager';
|
||||
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
|
||||
// Based on http://pypl.github.io/PYPL.html
|
||||
const topLanguages = [
|
||||
|
@ -2,7 +2,7 @@ import { NoteBodyEditorProps } from '../../../utils/types';
|
||||
const { buildStyle } = require('@joplin/lib/theme');
|
||||
|
||||
export default function styles(props: NoteBodyEditorProps) {
|
||||
return buildStyle('CodeMirror', props.themeId, (theme: any) => {
|
||||
return buildStyle(['CodeMirror', props.fontSize], props.themeId, (theme: any) => {
|
||||
return {
|
||||
root: {
|
||||
position: 'relative',
|
||||
@ -49,8 +49,8 @@ export default function styles(props: NoteBodyEditorProps) {
|
||||
flex: 1,
|
||||
overflowY: 'hidden',
|
||||
paddingTop: 0,
|
||||
lineHeight: `${theme.textAreaLineHeight}px`,
|
||||
fontSize: `${theme.editorFontSize}px`,
|
||||
lineHeight: `${Math.round(17 * props.fontSize / 12)}px`,
|
||||
fontSize: `${props.fontSize}px`,
|
||||
color: theme.color,
|
||||
backgroundColor: theme.backgroundColor,
|
||||
codeMirrorTheme: theme.codeMirrorTheme, // Defined in theme.js
|
||||
|
@ -5,7 +5,7 @@ import { extname } from 'path';
|
||||
import shim from '@joplin/lib/shim';
|
||||
import uuid from '@joplin/lib/uuid';
|
||||
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
|
||||
export default function useExternalPlugins(CodeMirror: any, plugins: PluginStates) {
|
||||
|
||||
|
@ -3,7 +3,7 @@ import CommandService from '@joplin/lib/services/CommandService';
|
||||
import KeymapService, { KeymapItem } from '@joplin/lib/services/KeymapService';
|
||||
import { EditorCommand } from '../../../utils/types';
|
||||
import shim from '@joplin/lib/shim';
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
|
||||
export default function useKeymap(CodeMirror: any) {
|
||||
|
||||
|
@ -16,7 +16,7 @@ import shim from '@joplin/lib/shim';
|
||||
|
||||
const { MarkupToHtml } = require('@joplin/renderer');
|
||||
const taboverride = require('taboverride');
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
import BaseItem from '@joplin/lib/models/BaseItem';
|
||||
const { themeStyle } = require('@joplin/lib/theme');
|
||||
const { clipboard } = require('electron');
|
||||
|
@ -34,7 +34,7 @@ import ExternalEditWatcher from '@joplin/lib/services/ExternalEditWatcher';
|
||||
const { themeStyle } = require('@joplin/lib/theme');
|
||||
const { substrWithEllipsis } = require('@joplin/lib/string-utils');
|
||||
const NoteSearchBar = require('../NoteSearchBar.min.js');
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
import Note from '@joplin/lib/models/Note';
|
||||
import Folder from '@joplin/lib/models/Folder';
|
||||
const bridge = require('electron').remote.require('./bridge').default;
|
||||
@ -399,6 +399,7 @@ function NoteEditor(props: NoteEditorProps) {
|
||||
onDrop: onDrop,
|
||||
noteToolbarButtonInfos: props.toolbarButtonInfos,
|
||||
plugins: props.plugins,
|
||||
fontSize: Setting.value('style.editor.fontSize'),
|
||||
};
|
||||
|
||||
let editor = null;
|
||||
|
@ -5,7 +5,7 @@ import BaseModel from '@joplin/lib/BaseModel';
|
||||
import Resource from '@joplin/lib/models/Resource';
|
||||
const bridge = require('electron').remote.require('./bridge').default;
|
||||
import ResourceFetcher from '@joplin/lib/services/ResourceFetcher';
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
const joplinRendererUtils = require('@joplin/renderer').utils;
|
||||
const { clipboard } = require('electron');
|
||||
const mimeUtils = require('@joplin/lib/mime-utils.js').mime;
|
||||
|
@ -64,6 +64,7 @@ export interface NoteBodyEditorProps {
|
||||
onDrop: Function;
|
||||
noteToolbarButtonInfos: ToolbarButtonInfo[];
|
||||
plugins: PluginStates;
|
||||
fontSize: number;
|
||||
}
|
||||
|
||||
export interface FormNote {
|
||||
|
@ -10,7 +10,7 @@ import ResourceEditWatcher from '@joplin/lib/services/ResourceEditWatcher/index'
|
||||
|
||||
const { MarkupToHtml } = require('@joplin/renderer');
|
||||
import Note from '@joplin/lib/models/Note';
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
import ResourceFetcher from '@joplin/lib/services/ResourceFetcher';
|
||||
import DecryptionWorker from '@joplin/lib/services/DecryptionWorker';
|
||||
|
||||
|
@ -12,7 +12,7 @@ const bridge = require('electron').remote.require('./bridge').default;
|
||||
const { urlDecode } = require('@joplin/lib/string-utils');
|
||||
const urlUtils = require('@joplin/lib/urlUtils');
|
||||
import ResourceFetcher from '@joplin/lib/services/ResourceFetcher';
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
|
||||
export default function useMessageHandler(scrollWhenReady: any, setScrollWhenReady: Function, editorRef: any, setLocalSearchResultCount: Function, dispatch: Function, formNote: FormNote) {
|
||||
return useCallback(async (event: any) => {
|
||||
|
@ -2,7 +2,7 @@ import { useEffect } from 'react';
|
||||
import { FormNote, ScrollOptionTypes } from './types';
|
||||
import CommandService, { CommandDeclaration, CommandRuntime, CommandContext } from '@joplin/lib/services/CommandService';
|
||||
import time from '@joplin/lib/time';
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
|
||||
const commandsWithDependencies = [
|
||||
require('../commands/showLocalSearch'),
|
||||
|
@ -1,7 +1,7 @@
|
||||
import PostMessageService, { MessageResponse, ResponderComponentType } from '@joplin/lib/services/PostMessageService';
|
||||
import * as React from 'react';
|
||||
const { connect } = require('react-redux');
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
|
||||
interface Props {
|
||||
onDomReady: Function;
|
||||
|
@ -3,7 +3,7 @@ import ButtonBar from './ConfigScreen/ButtonBar';
|
||||
import { _ } from '@joplin/lib/locale';
|
||||
|
||||
const { connect } = require('react-redux');
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
import Setting from '@joplin/lib/models/Setting';
|
||||
const bridge = require('electron').remote.require('./bridge').default;
|
||||
const { themeStyle } = require('@joplin/lib/theme');
|
||||
@ -44,7 +44,7 @@ class OneDriveLoginScreenComponent extends React.Component<any, any> {
|
||||
if (!auth) {
|
||||
log(_('Authentication was not completed (did not receive an authentication token).'));
|
||||
} else {
|
||||
reg.scheduleSync(0);
|
||||
void reg.scheduleSync(0);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,14 +1,15 @@
|
||||
import * as React from 'react';
|
||||
import { useState, useEffect } from 'react';
|
||||
import JoplinServerApi from '@joplin/lib/JoplinServerApi';
|
||||
|
||||
import { _, _n } from '@joplin/lib/locale';
|
||||
const { themeStyle, buildStyle } = require('@joplin/lib/theme');
|
||||
const DialogButtonRow = require('./DialogButtonRow.min');
|
||||
import Note from '@joplin/lib/models/Note';
|
||||
import Setting from '@joplin/lib/models/Setting';
|
||||
import BaseItem from '@joplin/lib/models/BaseItem';
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import SyncTargetJoplinServer from '@joplin/lib/SyncTargetJoplinServer';
|
||||
|
||||
const { themeStyle, buildStyle } = require('@joplin/lib/theme');
|
||||
const DialogButtonRow = require('./DialogButtonRow.min');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
const { clipboard } = require('electron');
|
||||
|
||||
interface ShareNoteDialogProps {
|
||||
@ -82,17 +83,22 @@ export default function ShareNoteDialog(props: ShareNoteDialogProps) {
|
||||
void fetchNotes();
|
||||
}, [props.noteIds]);
|
||||
|
||||
const appApi = async () => {
|
||||
return reg.syncTargetNextcloud().appApi();
|
||||
const fileApi = async () => {
|
||||
const syncTarget = reg.syncTarget() as SyncTargetJoplinServer;
|
||||
return syncTarget.fileApi();
|
||||
};
|
||||
|
||||
const joplinServerApi = async (): Promise<JoplinServerApi> => {
|
||||
return (await fileApi()).driver().api();
|
||||
};
|
||||
|
||||
const buttonRow_click = () => {
|
||||
props.onClose();
|
||||
};
|
||||
|
||||
const copyLinksToClipboard = (shares: SharesMap) => {
|
||||
const copyLinksToClipboard = (api: JoplinServerApi, shares: SharesMap) => {
|
||||
const links = [];
|
||||
for (const n in shares) links.push(shares[n]._url);
|
||||
for (const n in shares) links.push(api.shareUrl(shares[n]));
|
||||
clipboard.writeText(links.join('\n'));
|
||||
};
|
||||
|
||||
@ -110,17 +116,15 @@ export default function ShareNoteDialog(props: ShareNoteDialogProps) {
|
||||
|
||||
setSharesState('creating');
|
||||
|
||||
const api = await appApi();
|
||||
const syncTargetId = api.syncTargetId(Setting.toPlainObject());
|
||||
const api = await joplinServerApi();
|
||||
|
||||
const newShares = Object.assign({}, shares);
|
||||
let sharedStatusChanged = false;
|
||||
|
||||
for (const note of notes) {
|
||||
const result = await api.exec('POST', 'shares', {
|
||||
syncTargetId: syncTargetId,
|
||||
noteId: note.id,
|
||||
});
|
||||
newShares[note.id] = result;
|
||||
const fullPath = (await fileApi()).fullPath(BaseItem.systemPath(note.id));
|
||||
const share = await api.shareFile(fullPath);
|
||||
newShares[note.id] = share;
|
||||
|
||||
const changed = await BaseItem.updateShareStatus(note, true);
|
||||
if (changed) sharedStatusChanged = true;
|
||||
@ -134,7 +138,7 @@ export default function ShareNoteDialog(props: ShareNoteDialogProps) {
|
||||
setSharesState('creating');
|
||||
}
|
||||
|
||||
copyLinksToClipboard(newShares);
|
||||
copyLinksToClipboard(api, newShares);
|
||||
|
||||
setSharesState('created');
|
||||
} catch (error) {
|
||||
@ -193,7 +197,14 @@ export default function ShareNoteDialog(props: ShareNoteDialogProps) {
|
||||
return '';
|
||||
};
|
||||
|
||||
const encryptionWarningMessage = !Setting.value('encryption.enabled') ? null : <div style={theme.textStyle}>{_('Note: When a note is shared, it will no longer be encrypted on the server.')}</div>;
|
||||
function renderEncryptionWarningMessage() {
|
||||
if (!Setting.value('encryption.enabled')) return null;
|
||||
return <div style={theme.textStyle}>{_('Note: When a note is shared, it will no longer be encrypted on the server.')}<hr/></div>;
|
||||
}
|
||||
|
||||
function renderBetaWarningMessage() {
|
||||
return <div style={theme.textStyle}>{'Sharing notes via Joplin Server is a Beta feature and the API might change later on. What it means is that if you share a note, the link might become invalid after an upgrade, and you will have to share it again.'}</div>;
|
||||
}
|
||||
|
||||
const rootStyle = Object.assign({}, theme.dialogBox);
|
||||
rootStyle.width = '50%';
|
||||
@ -205,7 +216,8 @@ export default function ShareNoteDialog(props: ShareNoteDialogProps) {
|
||||
{renderNoteList(notes)}
|
||||
<button disabled={['creating', 'synchronizing'].indexOf(sharesState) >= 0} style={styles.copyShareLinkButton} onClick={shareLinkButton_click}>{_n('Copy Shareable Link', 'Copy Shareable Links', noteCount)}</button>
|
||||
<div style={theme.textStyle}>{statusMessage(sharesState)}</div>
|
||||
{encryptionWarningMessage}
|
||||
{renderEncryptionWarningMessage()}
|
||||
{renderBetaWarningMessage()}
|
||||
<DialogButtonRow themeId={props.themeId} onClick={buttonRow_click} okButtonShow={false} cancelButtonLabel={_('Close')}/>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { utils as pluginUtils, PluginStates } from '@joplin/lib/services/plugins/reducer';
|
||||
import CommandService from '@joplin/lib/services/CommandService';
|
||||
import SyncTargetJoplinServer from '@joplin/lib/SyncTargetJoplinServer';
|
||||
import eventManager from '@joplin/lib/eventManager';
|
||||
import InteropService from '@joplin/lib/services/interop/InteropService';
|
||||
import MenuUtils from '@joplin/lib/services/commands/MenuUtils';
|
||||
@ -12,6 +13,7 @@ const bridge = require('electron').remote.require('./bridge').default;
|
||||
const Menu = bridge().Menu;
|
||||
const MenuItem = bridge().MenuItem;
|
||||
import Note from '@joplin/lib/models/Note';
|
||||
import Setting from '@joplin/lib/models/Setting';
|
||||
const { substrWithEllipsis } = require('@joplin/lib/string-utils');
|
||||
|
||||
interface ContextMenuProps {
|
||||
@ -131,11 +133,13 @@ export default class NoteListUtils {
|
||||
})
|
||||
);
|
||||
|
||||
menu.append(
|
||||
new MenuItem(
|
||||
menuUtils.commandToStatefulMenuItem('showShareNoteDialog', noteIds.slice())
|
||||
)
|
||||
);
|
||||
if (Setting.value('sync.target') === SyncTargetJoplinServer.id()) {
|
||||
menu.append(
|
||||
new MenuItem(
|
||||
menuUtils.commandToStatefulMenuItem('showShareNoteDialog', noteIds.slice())
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const exportMenu = new Menu();
|
||||
|
||||
|
@ -10,7 +10,7 @@ const { View } = require('react-native');
|
||||
const { WebView } = require('react-native-webview');
|
||||
const { themeStyle } = require('../global-style.js');
|
||||
import BackButtonDialogBox from '../BackButtonDialogBox';
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
|
||||
interface Props {
|
||||
themeId: number;
|
||||
|
@ -4,7 +4,7 @@ import shim from '@joplin/lib/shim';
|
||||
|
||||
const { ToastAndroid } = require('react-native');
|
||||
const { _ } = require('@joplin/lib/locale.js');
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
const { dialogs } = require('../../../utils/dialogs.js');
|
||||
import Resource from '@joplin/lib/models/Resource';
|
||||
const Share = require('react-native-share').default;
|
||||
|
@ -29,7 +29,7 @@ const NoteTagsDialog = require('./NoteTagsDialog');
|
||||
import time from '@joplin/lib/time';
|
||||
const { Checkbox } = require('../checkbox.js');
|
||||
const { _ } = require('@joplin/lib/locale');
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
import ResourceFetcher from '@joplin/lib/services/ResourceFetcher';
|
||||
const { BaseScreenComponent } = require('../base-screen.js');
|
||||
const { themeStyle, editorFont } = require('../global-style.js');
|
||||
|
@ -50,8 +50,8 @@ import BaseItem from '@joplin/lib/models/BaseItem';
|
||||
import MasterKey from '@joplin/lib/models/MasterKey';
|
||||
import Revision from '@joplin/lib/models/Revision';
|
||||
import RevisionService from '@joplin/lib/services/RevisionService';
|
||||
const { JoplinDatabase } = require('@joplin/lib/joplin-database.js');
|
||||
const { Database } = require('@joplin/lib/database.js');
|
||||
import JoplinDatabase from '@joplin/lib/JoplinDatabase';
|
||||
import Database from '@joplin/lib/database';
|
||||
const { NotesScreen } = require('./components/screens/notes.js');
|
||||
const { TagsScreen } = require('./components/screens/tags.js');
|
||||
const { ConfigScreen } = require('./components/screens/config.js');
|
||||
@ -67,7 +67,7 @@ const { SideMenu } = require('./components/side-menu.js');
|
||||
const { SideMenuContent } = require('./components/side-menu-content.js');
|
||||
const { SideMenuContentNote } = require('./components/side-menu-content-note.js');
|
||||
const { DatabaseDriverReactNative } = require('./utils/database-driver-react-native');
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
const { defaultState } = require('@joplin/lib/reducer');
|
||||
const { FileApiDriverLocal } = require('@joplin/lib/file-api-driver-local.js');
|
||||
import ResourceFetcher from '@joplin/lib/services/ResourceFetcher';
|
||||
@ -118,7 +118,7 @@ const generalMiddleware = (store: any) => (next: any) => async (action: any) =>
|
||||
if (action.type == 'NAV_GO') Keyboard.dismiss();
|
||||
|
||||
if (['NOTE_UPDATE_ONE', 'NOTE_DELETE', 'FOLDER_UPDATE_ONE', 'FOLDER_DELETE'].indexOf(action.type) >= 0) {
|
||||
if (!await reg.syncTarget().syncStarted()) reg.scheduleSync(5 * 1000, { syncSteps: ['update_remote', 'delete_remote'] });
|
||||
if (!await reg.syncTarget().syncStarted()) void reg.scheduleSync(5 * 1000, { syncSteps: ['update_remote', 'delete_remote'] });
|
||||
SearchEngine.instance().scheduleSyncTables();
|
||||
}
|
||||
|
||||
@ -151,7 +151,7 @@ const generalMiddleware = (store: any) => (next: any) => async (action: any) =>
|
||||
|
||||
// Schedule a sync operation so that items that need to be encrypted
|
||||
// are sent to sync target.
|
||||
reg.scheduleSync();
|
||||
void reg.scheduleSync();
|
||||
}
|
||||
|
||||
if (action.type == 'NAV_GO' && action.routeName == 'Notes') {
|
||||
@ -427,7 +427,7 @@ async function initialize(dispatch: Function) {
|
||||
db.setLogger(dbLogger);
|
||||
reg.setDb(db);
|
||||
|
||||
reg.dispatch = dispatch;
|
||||
// reg.dispatch = dispatch;
|
||||
BaseModel.dispatch = dispatch;
|
||||
FoldersScreenUtils.dispatch = dispatch;
|
||||
BaseSyncTarget.dispatch = dispatch;
|
||||
@ -585,7 +585,7 @@ async function initialize(dispatch: Function) {
|
||||
|
||||
// When the app starts we want the full sync to
|
||||
// start almost immediately to get the latest data.
|
||||
reg.scheduleSync(1000).then(() => {
|
||||
void reg.scheduleSync(1000).then(() => {
|
||||
// Wait for the first sync before updating the notifications, since synchronisation
|
||||
// might change the notifications.
|
||||
void AlarmService.updateAllNotifications();
|
||||
@ -672,7 +672,7 @@ class AppComponent extends React.Component {
|
||||
if (this.props.selectedFolderId) {
|
||||
await handleShared(sharedData, this.props.selectedFolderId, this.props.dispatch);
|
||||
} else {
|
||||
reg.logger.info('Cannot handle share - default folder id is not set');
|
||||
reg.logger().info('Cannot handle share - default folder id is not set');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ import * as QuickActions from 'react-native-quick-actions';
|
||||
import { _ } from '@joplin/lib/locale';
|
||||
const { DeviceEventEmitter } = require('react-native');
|
||||
import Note from '@joplin/lib/models/Note';
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
|
||||
type TData = {
|
||||
type: string;
|
||||
|
@ -11,7 +11,7 @@ import SyncTargetOneDrive from './SyncTargetOneDrive';
|
||||
|
||||
const { createStore, applyMiddleware } = require('redux');
|
||||
const { defaultState, stateUtils } = require('./reducer');
|
||||
const { JoplinDatabase } = require('./joplin-database.js');
|
||||
import JoplinDatabase from './JoplinDatabase';
|
||||
const { FoldersScreenUtils } = require('./folders-screen-utils.js');
|
||||
const { DatabaseDriverNode } = require('./database-driver-node.js');
|
||||
import BaseModel from './BaseModel';
|
||||
@ -20,9 +20,9 @@ import BaseItem from './models/BaseItem';
|
||||
import Note from './models/Note';
|
||||
import Tag from './models/Tag';
|
||||
const { splitCommandString } = require('./string-utils.js');
|
||||
const { reg } = require('./registry.js');
|
||||
import { reg } from './registry';
|
||||
import time from './time';
|
||||
const BaseSyncTarget = require('./BaseSyncTarget.js');
|
||||
import BaseSyncTarget from './BaseSyncTarget';
|
||||
const reduxSharedMiddleware = require('./components/shared/reduxSharedMiddleware');
|
||||
const os = require('os');
|
||||
const fs = require('fs-extra');
|
||||
@ -433,7 +433,7 @@ export default class BaseApplication {
|
||||
|
||||
// Schedule a sync operation so that items that need to be encrypted
|
||||
// are sent to sync target.
|
||||
reg.scheduleSync();
|
||||
void reg.scheduleSync();
|
||||
}
|
||||
},
|
||||
'sync.interval': async () => {
|
||||
@ -470,7 +470,7 @@ export default class BaseApplication {
|
||||
await reduxSharedMiddleware(store, next, action);
|
||||
|
||||
if (this.hasGui() && ['NOTE_UPDATE_ONE', 'NOTE_DELETE', 'FOLDER_UPDATE_ONE', 'FOLDER_DELETE'].indexOf(action.type) >= 0) {
|
||||
if (!(await reg.syncTarget().syncStarted())) reg.scheduleSync(30 * 1000, { syncSteps: ['update_remote', 'delete_remote'] });
|
||||
if (!(await reg.syncTarget().syncStarted())) void reg.scheduleSync(30 * 1000, { syncSteps: ['update_remote', 'delete_remote'] });
|
||||
SearchEngine.instance().scheduleSyncTables();
|
||||
}
|
||||
|
||||
@ -604,7 +604,7 @@ export default class BaseApplication {
|
||||
this.store_ = createStore(this.reducer, applyMiddleware(this.generalMiddlewareFn()));
|
||||
BaseModel.dispatch = this.store().dispatch;
|
||||
FoldersScreenUtils.dispatch = this.store().dispatch;
|
||||
reg.dispatch = this.store().dispatch;
|
||||
// reg.dispatch = this.store().dispatch;
|
||||
BaseSyncTarget.dispatch = this.store().dispatch;
|
||||
DecryptionWorker.instance().dispatch = this.store().dispatch;
|
||||
ResourceFetcher.instance().dispatch = this.store().dispatch;
|
||||
@ -614,7 +614,7 @@ export default class BaseApplication {
|
||||
this.store_ = null;
|
||||
BaseModel.dispatch = function() {};
|
||||
FoldersScreenUtils.dispatch = function() {};
|
||||
reg.dispatch = function() {};
|
||||
// reg.dispatch = function() {};
|
||||
BaseSyncTarget.dispatch = function() {};
|
||||
DecryptionWorker.instance().dispatch = function() {};
|
||||
ResourceFetcher.instance().dispatch = function() {};
|
||||
@ -720,8 +720,8 @@ export default class BaseApplication {
|
||||
|
||||
|
||||
|
||||
reg.setLogger(Logger.create(''));
|
||||
reg.dispatch = () => {};
|
||||
reg.setLogger(Logger.create('') as Logger);
|
||||
// reg.dispatch = () => {};
|
||||
|
||||
BaseService.logger_ = globalLogger;
|
||||
|
||||
|
@ -1,8 +1,9 @@
|
||||
import paginationToSql from './models/utils/paginationToSql';
|
||||
|
||||
const { Database } = require('./database.js');
|
||||
import Database from './database';
|
||||
import uuid from './uuid';
|
||||
import time from './time';
|
||||
import JoplinDatabase from './JoplinDatabase';
|
||||
const Mutex = require('async-mutex').Mutex;
|
||||
|
||||
// New code should make use of this enum
|
||||
@ -69,7 +70,7 @@ class BaseModel {
|
||||
public static dispatch: Function = function() {};
|
||||
private static saveMutexes_: any = {};
|
||||
|
||||
private static db_: any;
|
||||
private static db_: JoplinDatabase;
|
||||
|
||||
static modelType(): ModelType {
|
||||
throw new Error('Must be overriden');
|
||||
@ -631,12 +632,12 @@ class BaseModel {
|
||||
return this.db().exec(`DELETE FROM ${this.tableName()} WHERE id = ?`, [id]);
|
||||
}
|
||||
|
||||
static batchDelete(ids: string[], options: any = null) {
|
||||
static async batchDelete(ids: string[], options: any = null) {
|
||||
if (!ids.length) return;
|
||||
options = this.modOptions(options);
|
||||
const idFieldName = options.idFieldName ? options.idFieldName : 'id';
|
||||
const sql = `DELETE FROM ${this.tableName()} WHERE ${idFieldName} IN ("${ids.join('","')}")`;
|
||||
return this.db().exec(sql);
|
||||
await this.db().exec(sql);
|
||||
}
|
||||
|
||||
static db() {
|
||||
|
@ -1,129 +1,135 @@
|
||||
const EncryptionService = require('./services/EncryptionService').default;
|
||||
const shim = require('./shim').default;
|
||||
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const EncryptionService_1 = require("./services/EncryptionService");
|
||||
const shim_1 = require("./shim");
|
||||
const ResourceService_1 = require("./services/ResourceService");
|
||||
class BaseSyncTarget {
|
||||
constructor(db, options = null) {
|
||||
this.db_ = db;
|
||||
this.synchronizer_ = null;
|
||||
this.initState_ = null;
|
||||
this.logger_ = null;
|
||||
this.options_ = options;
|
||||
}
|
||||
|
||||
static supportsConfigCheck() {
|
||||
return false;
|
||||
}
|
||||
|
||||
option(name, defaultValue = null) {
|
||||
return this.options_ && name in this.options_ ? this.options_[name] : defaultValue;
|
||||
}
|
||||
|
||||
logger() {
|
||||
return this.logger_;
|
||||
}
|
||||
|
||||
setLogger(v) {
|
||||
this.logger_ = v;
|
||||
}
|
||||
|
||||
db() {
|
||||
return this.db_;
|
||||
}
|
||||
|
||||
// If [] is returned it means all platforms are supported
|
||||
static unsupportedPlatforms() {
|
||||
return [];
|
||||
}
|
||||
|
||||
async isAuthenticated() {
|
||||
return false;
|
||||
}
|
||||
|
||||
authRouteName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
static id() {
|
||||
throw new Error('id() not implemented');
|
||||
}
|
||||
|
||||
// Note: it cannot be called just "name()" because that's a reserved keyword and
|
||||
// it would throw an obscure error in React Native.
|
||||
static targetName() {
|
||||
throw new Error('targetName() not implemented');
|
||||
}
|
||||
|
||||
static label() {
|
||||
throw new Error('label() not implemented');
|
||||
}
|
||||
|
||||
async initSynchronizer() {
|
||||
throw new Error('initSynchronizer() not implemented');
|
||||
}
|
||||
|
||||
async initFileApi() {
|
||||
throw new Error('initFileApi() not implemented');
|
||||
}
|
||||
|
||||
async fileApi() {
|
||||
if (this.fileApi_) return this.fileApi_;
|
||||
this.fileApi_ = await this.initFileApi();
|
||||
return this.fileApi_;
|
||||
}
|
||||
|
||||
fileApiSync() {
|
||||
return this.fileApi_;
|
||||
}
|
||||
|
||||
// Usually each sync target should create and setup its own file API via initFileApi()
|
||||
// but for testing purposes it might be convenient to provide it here so that multiple
|
||||
// clients can share and sync to the same file api (see test-utils.js)
|
||||
setFileApi(v) {
|
||||
this.fileApi_ = v;
|
||||
}
|
||||
|
||||
async synchronizer() {
|
||||
if (this.synchronizer_) return this.synchronizer_;
|
||||
|
||||
if (this.initState_ == 'started') {
|
||||
// Synchronizer is already being initialized, so wait here till it's done.
|
||||
return new Promise((resolve, reject) => {
|
||||
const iid = shim.setInterval(() => {
|
||||
if (this.initState_ == 'ready') {
|
||||
shim.clearInterval(iid);
|
||||
resolve(this.synchronizer_);
|
||||
}
|
||||
if (this.initState_ == 'error') {
|
||||
shim.clearInterval(iid);
|
||||
reject(new Error('Could not initialise synchroniser'));
|
||||
}
|
||||
}, 1000);
|
||||
});
|
||||
} else {
|
||||
this.initState_ = 'started';
|
||||
|
||||
try {
|
||||
this.synchronizer_ = await this.initSynchronizer();
|
||||
this.synchronizer_.setLogger(this.logger());
|
||||
this.synchronizer_.setEncryptionService(EncryptionService.instance());
|
||||
this.synchronizer_.dispatch = BaseSyncTarget.dispatch;
|
||||
this.initState_ = 'ready';
|
||||
return this.synchronizer_;
|
||||
} catch (error) {
|
||||
this.initState_ = 'error';
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async syncStarted() {
|
||||
if (!this.synchronizer_) return false;
|
||||
if (!(await this.isAuthenticated())) return false;
|
||||
const sync = await this.synchronizer();
|
||||
return sync.state() != 'idle';
|
||||
}
|
||||
constructor(db, options = null) {
|
||||
this.synchronizer_ = null;
|
||||
this.initState_ = null;
|
||||
this.logger_ = null;
|
||||
this.db_ = db;
|
||||
this.options_ = options;
|
||||
}
|
||||
static supportsConfigCheck() {
|
||||
return false;
|
||||
}
|
||||
option(name, defaultValue = null) {
|
||||
return this.options_ && name in this.options_ ? this.options_[name] : defaultValue;
|
||||
}
|
||||
logger() {
|
||||
return this.logger_;
|
||||
}
|
||||
setLogger(v) {
|
||||
this.logger_ = v;
|
||||
}
|
||||
db() {
|
||||
return this.db_;
|
||||
}
|
||||
// If [] is returned it means all platforms are supported
|
||||
static unsupportedPlatforms() {
|
||||
return [];
|
||||
}
|
||||
isAuthenticated() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return false;
|
||||
});
|
||||
}
|
||||
authRouteName() {
|
||||
return null;
|
||||
}
|
||||
static id() {
|
||||
throw new Error('id() not implemented');
|
||||
}
|
||||
// Note: it cannot be called just "name()" because that's a reserved keyword and
|
||||
// it would throw an obscure error in React Native.
|
||||
static targetName() {
|
||||
throw new Error('targetName() not implemented');
|
||||
}
|
||||
static label() {
|
||||
throw new Error('label() not implemented');
|
||||
}
|
||||
initSynchronizer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
throw new Error('initSynchronizer() not implemented');
|
||||
});
|
||||
}
|
||||
initFileApi() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
throw new Error('initFileApi() not implemented');
|
||||
});
|
||||
}
|
||||
fileApi() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (this.fileApi_)
|
||||
return this.fileApi_;
|
||||
this.fileApi_ = yield this.initFileApi();
|
||||
return this.fileApi_;
|
||||
});
|
||||
}
|
||||
// Usually each sync target should create and setup its own file API via initFileApi()
|
||||
// but for testing purposes it might be convenient to provide it here so that multiple
|
||||
// clients can share and sync to the same file api (see test-utils.js)
|
||||
setFileApi(v) {
|
||||
this.fileApi_ = v;
|
||||
}
|
||||
synchronizer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (this.synchronizer_)
|
||||
return this.synchronizer_;
|
||||
if (this.initState_ == 'started') {
|
||||
// Synchronizer is already being initialized, so wait here till it's done.
|
||||
return new Promise((resolve, reject) => {
|
||||
const iid = shim_1.default.setInterval(() => {
|
||||
if (this.initState_ == 'ready') {
|
||||
shim_1.default.clearInterval(iid);
|
||||
resolve(this.synchronizer_);
|
||||
}
|
||||
if (this.initState_ == 'error') {
|
||||
shim_1.default.clearInterval(iid);
|
||||
reject(new Error('Could not initialise synchroniser'));
|
||||
}
|
||||
}, 1000);
|
||||
});
|
||||
}
|
||||
else {
|
||||
this.initState_ = 'started';
|
||||
try {
|
||||
this.synchronizer_ = yield this.initSynchronizer();
|
||||
this.synchronizer_.setLogger(this.logger());
|
||||
this.synchronizer_.setEncryptionService(EncryptionService_1.default.instance());
|
||||
this.synchronizer_.setResourceService(ResourceService_1.default.instance());
|
||||
this.synchronizer_.dispatch = BaseSyncTarget.dispatch;
|
||||
this.initState_ = 'ready';
|
||||
return this.synchronizer_;
|
||||
}
|
||||
catch (error) {
|
||||
this.initState_ = 'error';
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
syncStarted() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!this.synchronizer_)
|
||||
return false;
|
||||
if (!(yield this.isAuthenticated()))
|
||||
return false;
|
||||
const sync = yield this.synchronizer();
|
||||
return sync.state() != 'idle';
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
BaseSyncTarget.dispatch = () => {};
|
||||
|
||||
module.exports = BaseSyncTarget;
|
||||
exports.default = BaseSyncTarget;
|
||||
BaseSyncTarget.dispatch = () => { };
|
||||
//# sourceMappingURL=BaseSyncTarget.js.map
|
132
packages/lib/BaseSyncTarget.ts
Normal file
132
packages/lib/BaseSyncTarget.ts
Normal file
@ -0,0 +1,132 @@
|
||||
import Logger from './Logger';
|
||||
import Synchronizer from './Synchronizer';
|
||||
import EncryptionService from './services/EncryptionService';
|
||||
import shim from './shim';
|
||||
import ResourceService from './services/ResourceService';
|
||||
|
||||
export default class BaseSyncTarget {
|
||||
|
||||
public static dispatch: Function = () => {};
|
||||
|
||||
private synchronizer_: Synchronizer = null;
|
||||
private initState_: any = null;
|
||||
private logger_: Logger = null;
|
||||
private options_: any;
|
||||
private db_: any;
|
||||
protected fileApi_: any;
|
||||
|
||||
public constructor(db: any, options: any = null) {
|
||||
this.db_ = db;
|
||||
this.options_ = options;
|
||||
}
|
||||
|
||||
public static supportsConfigCheck() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public option(name: string, defaultValue: any = null) {
|
||||
return this.options_ && name in this.options_ ? this.options_[name] : defaultValue;
|
||||
}
|
||||
|
||||
protected logger() {
|
||||
return this.logger_;
|
||||
}
|
||||
|
||||
public setLogger(v: Logger) {
|
||||
this.logger_ = v;
|
||||
}
|
||||
|
||||
protected db() {
|
||||
return this.db_;
|
||||
}
|
||||
|
||||
// If [] is returned it means all platforms are supported
|
||||
public static unsupportedPlatforms(): any[] {
|
||||
return [];
|
||||
}
|
||||
|
||||
public async isAuthenticated() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public authRouteName(): string {
|
||||
return null;
|
||||
}
|
||||
|
||||
public static id() {
|
||||
throw new Error('id() not implemented');
|
||||
}
|
||||
|
||||
// Note: it cannot be called just "name()" because that's a reserved keyword and
|
||||
// it would throw an obscure error in React Native.
|
||||
public static targetName() {
|
||||
throw new Error('targetName() not implemented');
|
||||
}
|
||||
|
||||
public static label() {
|
||||
throw new Error('label() not implemented');
|
||||
}
|
||||
|
||||
protected async initSynchronizer(): Promise<Synchronizer> {
|
||||
throw new Error('initSynchronizer() not implemented');
|
||||
}
|
||||
|
||||
protected async initFileApi(): Promise<any> {
|
||||
throw new Error('initFileApi() not implemented');
|
||||
}
|
||||
|
||||
public async fileApi() {
|
||||
if (this.fileApi_) return this.fileApi_;
|
||||
this.fileApi_ = await this.initFileApi();
|
||||
return this.fileApi_;
|
||||
}
|
||||
|
||||
// Usually each sync target should create and setup its own file API via initFileApi()
|
||||
// but for testing purposes it might be convenient to provide it here so that multiple
|
||||
// clients can share and sync to the same file api (see test-utils.js)
|
||||
public setFileApi(v: any) {
|
||||
this.fileApi_ = v;
|
||||
}
|
||||
|
||||
public async synchronizer(): Promise<Synchronizer> {
|
||||
if (this.synchronizer_) return this.synchronizer_;
|
||||
|
||||
if (this.initState_ == 'started') {
|
||||
// Synchronizer is already being initialized, so wait here till it's done.
|
||||
return new Promise((resolve, reject) => {
|
||||
const iid = shim.setInterval(() => {
|
||||
if (this.initState_ == 'ready') {
|
||||
shim.clearInterval(iid);
|
||||
resolve(this.synchronizer_);
|
||||
}
|
||||
if (this.initState_ == 'error') {
|
||||
shim.clearInterval(iid);
|
||||
reject(new Error('Could not initialise synchroniser'));
|
||||
}
|
||||
}, 1000);
|
||||
});
|
||||
} else {
|
||||
this.initState_ = 'started';
|
||||
|
||||
try {
|
||||
this.synchronizer_ = await this.initSynchronizer();
|
||||
this.synchronizer_.setLogger(this.logger());
|
||||
this.synchronizer_.setEncryptionService(EncryptionService.instance());
|
||||
this.synchronizer_.setResourceService(ResourceService.instance());
|
||||
this.synchronizer_.dispatch = BaseSyncTarget.dispatch;
|
||||
this.initState_ = 'ready';
|
||||
return this.synchronizer_;
|
||||
} catch (error) {
|
||||
this.initState_ = 'error';
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async syncStarted() {
|
||||
if (!this.synchronizer_) return false;
|
||||
if (!(await this.isAuthenticated())) return false;
|
||||
const sync = await this.synchronizer();
|
||||
return sync.state() != 'idle';
|
||||
}
|
||||
}
|
@ -1,8 +1,9 @@
|
||||
import Resource from './models/Resource';
|
||||
import shim from './shim';
|
||||
import Database, { SqlQuery } from './database';
|
||||
|
||||
const { promiseChain } = require('./promise-utils.js');
|
||||
const { Database } = require('./database.js');
|
||||
const { sprintf } = require('sprintf-js');
|
||||
const Resource = require('./models/Resource').default;
|
||||
const shim = require('./shim').default;
|
||||
|
||||
const structureSql = `
|
||||
CREATE TABLE folders (
|
||||
@ -118,13 +119,28 @@ CREATE TABLE version (
|
||||
INSERT INTO version (version) VALUES (1);
|
||||
`;
|
||||
|
||||
class JoplinDatabase extends Database {
|
||||
constructor(driver) {
|
||||
interface TableField {
|
||||
name: string;
|
||||
type: number;
|
||||
default: any;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export default class JoplinDatabase extends Database {
|
||||
|
||||
public static TYPE_INT = 1;
|
||||
public static TYPE_TEXT = 2;
|
||||
public static TYPE_NUMERIC = 3;
|
||||
|
||||
private initialized_ = false;
|
||||
private tableFields_: Record<string, TableField[]> = null;
|
||||
private version_: number = null;
|
||||
private tableFieldNames_: Record<string, string[]> = {};
|
||||
private tableDescriptions_: any;
|
||||
|
||||
constructor(driver: any) {
|
||||
super(driver);
|
||||
this.initialized_ = false;
|
||||
this.tableFields_ = null;
|
||||
this.version_ = null;
|
||||
this.tableFieldNames_ = {};
|
||||
|
||||
// this.extensionToLoad = './build/lib/sql-extensions/spellfix';
|
||||
}
|
||||
|
||||
@ -132,12 +148,12 @@ class JoplinDatabase extends Database {
|
||||
return this.initialized_;
|
||||
}
|
||||
|
||||
async open(options) {
|
||||
async open(options: any) {
|
||||
await super.open(options);
|
||||
return this.initialize();
|
||||
}
|
||||
|
||||
tableFieldNames(tableName) {
|
||||
tableFieldNames(tableName: string) {
|
||||
if (this.tableFieldNames_[tableName]) return this.tableFieldNames_[tableName].slice();
|
||||
|
||||
const tf = this.tableFields(tableName);
|
||||
@ -150,7 +166,7 @@ class JoplinDatabase extends Database {
|
||||
return output.slice();
|
||||
}
|
||||
|
||||
tableFields(tableName, options = null) {
|
||||
tableFields(tableName: string, options: any = null) {
|
||||
if (options === null) options = {};
|
||||
|
||||
if (!this.tableFields_) throw new Error('Fields have not been loaded yet');
|
||||
@ -206,9 +222,9 @@ class JoplinDatabase extends Database {
|
||||
await this.transactionExecBatch(queries);
|
||||
}
|
||||
|
||||
createDefaultRow() {
|
||||
const row = {};
|
||||
const fields = this.tableFields('resource_local_states');
|
||||
createDefaultRow(tableName: string) {
|
||||
const row: any = {};
|
||||
const fields = this.tableFields(tableName);
|
||||
for (let i = 0; i < fields.length; i++) {
|
||||
const f = fields[i];
|
||||
row[f.name] = Database.formatValue(f.type, f.default);
|
||||
@ -216,7 +232,7 @@ class JoplinDatabase extends Database {
|
||||
return row;
|
||||
}
|
||||
|
||||
fieldByName(tableName, fieldName) {
|
||||
fieldByName(tableName: string, fieldName: string) {
|
||||
const fields = this.tableFields(tableName);
|
||||
for (const field of fields) {
|
||||
if (field.name === fieldName) return field;
|
||||
@ -224,11 +240,11 @@ class JoplinDatabase extends Database {
|
||||
throw new Error(`No such field: ${tableName}: ${fieldName}`);
|
||||
}
|
||||
|
||||
fieldDefaultValue(tableName, fieldName) {
|
||||
fieldDefaultValue(tableName: string, fieldName: string) {
|
||||
return this.fieldByName(tableName, fieldName).default;
|
||||
}
|
||||
|
||||
fieldDescription(tableName, fieldName) {
|
||||
fieldDescription(tableName: string, fieldName: string) {
|
||||
const sp = sprintf;
|
||||
|
||||
if (!this.tableDescriptions_) {
|
||||
@ -264,9 +280,9 @@ class JoplinDatabase extends Database {
|
||||
return d && d[fieldName] ? d[fieldName] : '';
|
||||
}
|
||||
|
||||
refreshTableFields(newVersion) {
|
||||
refreshTableFields(newVersion: number) {
|
||||
this.logger().info('Initializing tables...');
|
||||
const queries = [];
|
||||
const queries: SqlQuery[] = [];
|
||||
queries.push(this.wrapQuery('DELETE FROM table_fields'));
|
||||
|
||||
return this.selectAll('SELECT name FROM sqlite_master WHERE type="table"')
|
||||
@ -309,12 +325,12 @@ class JoplinDatabase extends Database {
|
||||
});
|
||||
}
|
||||
|
||||
addMigrationFile(num) {
|
||||
addMigrationFile(num: number) {
|
||||
const timestamp = Date.now();
|
||||
return { sql: 'INSERT INTO migrations (number, created_time, updated_time) VALUES (?, ?, ?)', params: [num, timestamp, timestamp] };
|
||||
}
|
||||
|
||||
async upgradeDatabase(fromVersion) {
|
||||
async upgradeDatabase(fromVersion: number) {
|
||||
// INSTRUCTIONS TO UPGRADE THE DATABASE:
|
||||
//
|
||||
// 1. Add the new version number to the existingDatabaseVersions array
|
||||
@ -353,7 +369,7 @@ class JoplinDatabase extends Database {
|
||||
const targetVersion = existingDatabaseVersions[currentVersionIndex + 1];
|
||||
this.logger().info(`Converting database to version ${targetVersion}`);
|
||||
|
||||
let queries = [];
|
||||
let queries: any[] = [];
|
||||
|
||||
if (targetVersion == 1) {
|
||||
queries = this.wrapQueries(this.sqlStringToLines(structureSql));
|
||||
@ -965,9 +981,3 @@ class JoplinDatabase extends Database {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Database.TYPE_INT = 1;
|
||||
Database.TYPE_TEXT = 2;
|
||||
Database.TYPE_NUMERIC = 3;
|
||||
|
||||
module.exports = { JoplinDatabase };
|
@ -1,163 +1,190 @@
|
||||
import shim from './shim';
|
||||
import { _ } from './locale';
|
||||
import Logger from './Logger';
|
||||
const { rtrimSlashes } = require('./path-utils.js');
|
||||
const JoplinError = require('./JoplinError');
|
||||
const { rtrimSlashes } = require('./path-utils');
|
||||
const base64 = require('base-64');
|
||||
const { stringify } = require('query-string');
|
||||
|
||||
interface JoplinServerApiOptions {
|
||||
username: Function;
|
||||
password: Function;
|
||||
baseUrl: Function;
|
||||
interface Options {
|
||||
baseUrl(): string;
|
||||
username(): string;
|
||||
password(): string;
|
||||
}
|
||||
|
||||
enum ExecOptionsResponseFormat {
|
||||
Json = 'json',
|
||||
Text = 'text',
|
||||
}
|
||||
|
||||
enum ExecOptionsTarget {
|
||||
String = 'string',
|
||||
File = 'file',
|
||||
}
|
||||
|
||||
interface ExecOptions {
|
||||
responseFormat?: ExecOptionsResponseFormat;
|
||||
target?: ExecOptionsTarget;
|
||||
path?: string;
|
||||
source?: string;
|
||||
}
|
||||
|
||||
export default class JoplinServerApi {
|
||||
|
||||
logger_: any;
|
||||
options_: JoplinServerApiOptions;
|
||||
kvStore_: any;
|
||||
private options_: Options;
|
||||
private session_: any;
|
||||
|
||||
constructor(options: JoplinServerApiOptions) {
|
||||
this.logger_ = new Logger();
|
||||
public constructor(options: Options) {
|
||||
this.options_ = options;
|
||||
this.kvStore_ = null;
|
||||
}
|
||||
|
||||
setLogger(l: any) {
|
||||
this.logger_ = l;
|
||||
}
|
||||
|
||||
logger(): any {
|
||||
return this.logger_;
|
||||
}
|
||||
|
||||
setKvStore(v: any) {
|
||||
this.kvStore_ = v;
|
||||
}
|
||||
|
||||
kvStore() {
|
||||
if (!this.kvStore_) throw new Error('JoplinServerApi.kvStore_ is not set!!');
|
||||
return this.kvStore_;
|
||||
}
|
||||
|
||||
authToken(): string {
|
||||
if (!this.options_.username() || !this.options_.password()) return null;
|
||||
try {
|
||||
// Note: Non-ASCII passwords will throw an error about Latin1 characters - https://github.com/laurent22/joplin/issues/246
|
||||
// Tried various things like the below, but it didn't work on React Native:
|
||||
// return base64.encode(utf8.encode(this.options_.username() + ':' + this.options_.password()));
|
||||
return base64.encode(`${this.options_.username()}:${this.options_.password()}`);
|
||||
} catch (error) {
|
||||
error.message = `Cannot encode username/password: ${error.message}`;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
baseUrl(): string {
|
||||
private baseUrl() {
|
||||
return rtrimSlashes(this.options_.baseUrl());
|
||||
}
|
||||
|
||||
static baseUrlFromNextcloudWebDavUrl(webDavUrl: string) {
|
||||
// http://nextcloud.local/remote.php/webdav/Joplin
|
||||
// http://nextcloud.local/index.php/apps/joplin/api
|
||||
const splitted = webDavUrl.split('/remote.php/webdav');
|
||||
if (splitted.length !== 2) throw new Error(`Unsupported WebDAV URL format: ${webDavUrl}`);
|
||||
return `${splitted[0]}/index.php/apps/joplin/api`;
|
||||
private async session() {
|
||||
// TODO: handle invalid session
|
||||
if (this.session_) return this.session_;
|
||||
|
||||
this.session_ = await this.exec('POST', 'api/sessions', null, {
|
||||
email: this.options_.username(),
|
||||
password: this.options_.password(),
|
||||
});
|
||||
|
||||
return this.session_;
|
||||
}
|
||||
|
||||
syncTargetId(settings: any) {
|
||||
const s = settings['sync.5.syncTargets'][settings['sync.5.path']];
|
||||
if (!s) throw new Error(`Joplin Nextcloud app not configured for URL: ${this.baseUrl()}`);
|
||||
return s.uuid;
|
||||
private async sessionId() {
|
||||
const session = await this.session();
|
||||
return session ? session.id : '';
|
||||
}
|
||||
|
||||
static connectionErrorMessage(error: any) {
|
||||
const msg = error && error.message ? error.message : 'Unknown error';
|
||||
return _('Could not connect to the Joplin Nextcloud app. Please check the configuration in the Synchronisation config screen. Full error was:\n\n%s', msg);
|
||||
}
|
||||
|
||||
async setupSyncTarget(webDavUrl: string) {
|
||||
return this.exec('POST', 'sync_targets', {
|
||||
webDavUrl: webDavUrl,
|
||||
public async shareFile(pathOrId: string) {
|
||||
return this.exec('POST', 'api/shares', null, {
|
||||
file_id: pathOrId,
|
||||
type: 1, // ShareType.Link
|
||||
});
|
||||
}
|
||||
|
||||
requestToCurl_(url: string, options: any) {
|
||||
const output = [];
|
||||
output.push('curl');
|
||||
output.push('-v');
|
||||
if (options.method) output.push(`-X ${options.method}`);
|
||||
if (options.headers) {
|
||||
for (const n in options.headers) {
|
||||
if (!options.headers.hasOwnProperty(n)) continue;
|
||||
output.push(`${'-H ' + '"'}${n}: ${options.headers[n]}"`);
|
||||
}
|
||||
}
|
||||
if (options.body) output.push(`${'--data ' + '\''}${options.body}'`);
|
||||
output.push(url);
|
||||
|
||||
return output.join(' ');
|
||||
public static connectionErrorMessage(error: any) {
|
||||
const msg = error && error.message ? error.message : 'Unknown error';
|
||||
return _('Could not connect to Joplin Server. Please check the Synchronisation options in the config screen. Full error was:\n\n%s', msg);
|
||||
}
|
||||
|
||||
async exec(method: string, path: string = '', body: any = null, headers: any = null, options: any = null): Promise<any> {
|
||||
public shareUrl(share: any): string {
|
||||
return `${this.baseUrl()}/shares/${share.id}`;
|
||||
}
|
||||
|
||||
// private requestToCurl_(url: string, options: any) {
|
||||
// const output = [];
|
||||
// output.push('curl');
|
||||
// output.push('-v');
|
||||
// if (options.method) output.push(`-X ${options.method}`);
|
||||
// if (options.headers) {
|
||||
// for (const n in options.headers) {
|
||||
// if (!options.headers.hasOwnProperty(n)) continue;
|
||||
// output.push(`${'-H ' + '"'}${n}: ${options.headers[n]}"`);
|
||||
// }
|
||||
// }
|
||||
// if (options.body) output.push(`${'--data ' + '\''}${JSON.stringify(options.body)}'`);
|
||||
// output.push(url);
|
||||
|
||||
// return output.join(' ');
|
||||
// }
|
||||
|
||||
public async exec(method: string, path: string = '', query: Record<string, any> = null, body: any = null, headers: any = null, options: ExecOptions = null) {
|
||||
if (headers === null) headers = {};
|
||||
if (options === null) options = {};
|
||||
if (!options.responseFormat) options.responseFormat = ExecOptionsResponseFormat.Json;
|
||||
if (!options.target) options.target = ExecOptionsTarget.String;
|
||||
|
||||
const authToken = this.authToken();
|
||||
let sessionId = '';
|
||||
if (path !== 'api/sessions' && !sessionId) {
|
||||
sessionId = await this.sessionId();
|
||||
}
|
||||
|
||||
if (authToken) headers['Authorization'] = `Basic ${authToken}`;
|
||||
|
||||
headers['Content-Type'] = 'application/json';
|
||||
|
||||
if (typeof body === 'object' && body !== null) body = JSON.stringify(body);
|
||||
if (sessionId) headers['X-API-AUTH'] = sessionId;
|
||||
|
||||
const fetchOptions: any = {};
|
||||
fetchOptions.headers = headers;
|
||||
fetchOptions.method = method;
|
||||
if (options.path) fetchOptions.path = options.path;
|
||||
if (body) fetchOptions.body = body;
|
||||
|
||||
const url = `${this.baseUrl()}/${path}`;
|
||||
if (body) {
|
||||
if (typeof body === 'object') {
|
||||
fetchOptions.body = JSON.stringify(body);
|
||||
fetchOptions.headers['Content-Type'] = 'application/json';
|
||||
} else {
|
||||
fetchOptions.body = body;
|
||||
}
|
||||
|
||||
let response = null;
|
||||
fetchOptions.headers['Content-Length'] = `${shim.stringByteLength(fetchOptions.body)}`;
|
||||
}
|
||||
|
||||
// console.info('WebDAV Call', method + ' ' + url, headers, options);
|
||||
console.info(this.requestToCurl_(url, fetchOptions));
|
||||
let url = `${this.baseUrl()}/${path}`;
|
||||
|
||||
if (typeof body === 'string') fetchOptions.headers['Content-Length'] = `${shim.stringByteLength(body)}`;
|
||||
response = await shim.fetch(url, fetchOptions);
|
||||
if (query) {
|
||||
url += url.indexOf('?') < 0 ? '?' : '&';
|
||||
url += stringify(query);
|
||||
}
|
||||
|
||||
let response: any = null;
|
||||
|
||||
// console.info('Joplin API Call', `${method} ${url}`, headers, options);
|
||||
// console.info(this.requestToCurl_(url, fetchOptions));
|
||||
|
||||
if (options.source == 'file' && (method == 'POST' || method == 'PUT')) {
|
||||
if (fetchOptions.path) {
|
||||
const fileStat = await shim.fsDriver().stat(fetchOptions.path);
|
||||
if (fileStat) fetchOptions.headers['Content-Length'] = `${fileStat.size}`;
|
||||
}
|
||||
response = await shim.uploadBlob(url, fetchOptions);
|
||||
} else if (options.target == 'string') {
|
||||
if (typeof body === 'string') fetchOptions.headers['Content-Length'] = `${shim.stringByteLength(body)}`;
|
||||
response = await shim.fetch(url, fetchOptions);
|
||||
} else {
|
||||
// file
|
||||
response = await shim.fetchBlob(url, fetchOptions);
|
||||
}
|
||||
|
||||
const responseText = await response.text();
|
||||
|
||||
const responseJson_: any = null;
|
||||
// console.info('Joplin API Response', responseText);
|
||||
|
||||
// Creates an error object with as much data as possible as it will appear in the log, which will make debugging easier
|
||||
const newError = (message: string, code: number = 0) => {
|
||||
// Gives a shorter response for error messages. Useful for cases where a full HTML page is accidentally loaded instead of
|
||||
// JSON. That way the error message will still show there's a problem but without filling up the log or screen.
|
||||
const shortResponseText = (`${responseText}`).substr(0, 1024);
|
||||
return new JoplinError(`${method} ${path}: ${message} (${code}): ${shortResponseText}`, code);
|
||||
};
|
||||
|
||||
let responseJson_: any = null;
|
||||
const loadResponseJson = async () => {
|
||||
if (!responseText) return null;
|
||||
if (responseJson_) return responseJson_;
|
||||
try {
|
||||
return JSON.parse(responseText);
|
||||
} catch (error) {
|
||||
throw new Error(`Cannot parse JSON: ${responseText.substr(0, 8192)}`);
|
||||
}
|
||||
};
|
||||
|
||||
const newError = (message: string, code: number = 0) => {
|
||||
return new JoplinError(`${method} ${path}: ${message} (${code})`, code);
|
||||
responseJson_ = JSON.parse(responseText);
|
||||
if (!responseJson_) throw newError('Cannot parse JSON response', response.status);
|
||||
return responseJson_;
|
||||
};
|
||||
|
||||
if (!response.ok) {
|
||||
if (options.target === 'file') throw newError('fetchBlob error', response.status);
|
||||
|
||||
let json = null;
|
||||
try {
|
||||
json = await loadResponseJson();
|
||||
} catch (error) {
|
||||
throw newError(`Unknown error: ${responseText.substr(0, 8192)}`, response.status);
|
||||
// Just send back the plain text in newErro()
|
||||
}
|
||||
|
||||
const trace = json.stacktrace ? `\n${json.stacktrace}` : '';
|
||||
let message = json.error;
|
||||
if (!message) message = responseText.substr(0, 8192);
|
||||
throw newError(message + trace, response.status);
|
||||
if (json && json.error) {
|
||||
throw newError(`${json.error}`, json.code ? json.code : response.status);
|
||||
}
|
||||
|
||||
throw newError('Unknown error', response.status);
|
||||
}
|
||||
|
||||
if (options.responseFormat === 'text') return responseText;
|
||||
|
||||
const output = await loadResponseJson();
|
||||
return output;
|
||||
}
|
||||
|
@ -1,174 +0,0 @@
|
||||
import shim from './shim';
|
||||
const { rtrimSlashes } = require('./path-utils.js');
|
||||
const JoplinError = require('./JoplinError');
|
||||
const { stringify } = require('query-string');
|
||||
|
||||
interface Options {
|
||||
baseUrl(): string;
|
||||
username(): string;
|
||||
password(): string;
|
||||
}
|
||||
|
||||
enum ExecOptionsResponseFormat {
|
||||
Json = 'json',
|
||||
Text = 'text',
|
||||
}
|
||||
|
||||
enum ExecOptionsTarget {
|
||||
String = 'string',
|
||||
File = 'file',
|
||||
}
|
||||
|
||||
interface ExecOptions {
|
||||
responseFormat?: ExecOptionsResponseFormat;
|
||||
target?: ExecOptionsTarget;
|
||||
path?: string;
|
||||
source?: string;
|
||||
}
|
||||
|
||||
export default class JoplinServerApi {
|
||||
|
||||
private options_: Options;
|
||||
private session_: any;
|
||||
|
||||
public constructor(options: Options) {
|
||||
this.options_ = options;
|
||||
}
|
||||
|
||||
private baseUrl() {
|
||||
return rtrimSlashes(this.options_.baseUrl());
|
||||
}
|
||||
|
||||
private async session() {
|
||||
// TODO: handle invalid session
|
||||
if (this.session_) return this.session_;
|
||||
|
||||
this.session_ = await this.exec('POST', 'api/sessions', null, {
|
||||
email: this.options_.username(),
|
||||
password: this.options_.password(),
|
||||
});
|
||||
|
||||
return this.session_;
|
||||
}
|
||||
|
||||
private async sessionId() {
|
||||
const session = await this.session();
|
||||
return session ? session.id : '';
|
||||
}
|
||||
|
||||
// private requestToCurl_(url: string, options: any) {
|
||||
// const output = [];
|
||||
// output.push('curl');
|
||||
// output.push('-v');
|
||||
// if (options.method) output.push(`-X ${options.method}`);
|
||||
// if (options.headers) {
|
||||
// for (const n in options.headers) {
|
||||
// if (!options.headers.hasOwnProperty(n)) continue;
|
||||
// output.push(`${'-H ' + '"'}${n}: ${options.headers[n]}"`);
|
||||
// }
|
||||
// }
|
||||
// if (options.body) output.push(`${'--data ' + '\''}${JSON.stringify(options.body)}'`);
|
||||
// output.push(url);
|
||||
|
||||
// return output.join(' ');
|
||||
// }
|
||||
|
||||
public async exec(method: string, path: string = '', query: Record<string, any> = null, body: any = null, headers: any = null, options: ExecOptions = null) {
|
||||
if (headers === null) headers = {};
|
||||
if (options === null) options = {};
|
||||
if (!options.responseFormat) options.responseFormat = ExecOptionsResponseFormat.Json;
|
||||
if (!options.target) options.target = ExecOptionsTarget.String;
|
||||
|
||||
let sessionId = '';
|
||||
if (path !== 'api/sessions' && !sessionId) {
|
||||
sessionId = await this.sessionId();
|
||||
}
|
||||
|
||||
if (sessionId) headers['X-API-AUTH'] = sessionId;
|
||||
|
||||
const fetchOptions: any = {};
|
||||
fetchOptions.headers = headers;
|
||||
fetchOptions.method = method;
|
||||
if (options.path) fetchOptions.path = options.path;
|
||||
|
||||
if (body) {
|
||||
if (typeof body === 'object') {
|
||||
fetchOptions.body = JSON.stringify(body);
|
||||
fetchOptions.headers['Content-Type'] = 'application/json';
|
||||
} else {
|
||||
fetchOptions.body = body;
|
||||
}
|
||||
|
||||
fetchOptions.headers['Content-Length'] = `${shim.stringByteLength(fetchOptions.body)}`;
|
||||
}
|
||||
|
||||
let url = `${this.baseUrl()}/${path}`;
|
||||
|
||||
if (query) {
|
||||
url += url.indexOf('?') < 0 ? '?' : '&';
|
||||
url += stringify(query);
|
||||
}
|
||||
|
||||
let response: any = null;
|
||||
|
||||
// console.info('Joplin API Call', `${method} ${url}`, headers, options);
|
||||
// console.info(this.requestToCurl_(url, fetchOptions));
|
||||
|
||||
if (options.source == 'file' && (method == 'POST' || method == 'PUT')) {
|
||||
if (fetchOptions.path) {
|
||||
const fileStat = await shim.fsDriver().stat(fetchOptions.path);
|
||||
if (fileStat) fetchOptions.headers['Content-Length'] = `${fileStat.size}`;
|
||||
}
|
||||
response = await shim.uploadBlob(url, fetchOptions);
|
||||
} else if (options.target == 'string') {
|
||||
if (typeof body === 'string') fetchOptions.headers['Content-Length'] = `${shim.stringByteLength(body)}`;
|
||||
response = await shim.fetch(url, fetchOptions);
|
||||
} else {
|
||||
// file
|
||||
response = await shim.fetchBlob(url, fetchOptions);
|
||||
}
|
||||
|
||||
const responseText = await response.text();
|
||||
|
||||
// console.info('Joplin API Response', responseText);
|
||||
|
||||
// Creates an error object with as much data as possible as it will appear in the log, which will make debugging easier
|
||||
const newError = (message: string, code: number = 0) => {
|
||||
// Gives a shorter response for error messages. Useful for cases where a full HTML page is accidentally loaded instead of
|
||||
// JSON. That way the error message will still show there's a problem but without filling up the log or screen.
|
||||
const shortResponseText = (`${responseText}`).substr(0, 1024);
|
||||
return new JoplinError(`${method} ${path}: ${message} (${code}): ${shortResponseText}`, code);
|
||||
};
|
||||
|
||||
let responseJson_: any = null;
|
||||
const loadResponseJson = async () => {
|
||||
if (!responseText) return null;
|
||||
if (responseJson_) return responseJson_;
|
||||
responseJson_ = JSON.parse(responseText);
|
||||
if (!responseJson_) throw newError('Cannot parse JSON response', response.status);
|
||||
return responseJson_;
|
||||
};
|
||||
|
||||
if (!response.ok) {
|
||||
if (options.target === 'file') throw newError('fetchBlob error', response.status);
|
||||
|
||||
let json = null;
|
||||
try {
|
||||
json = await loadResponseJson();
|
||||
} catch (error) {
|
||||
// Just send back the plain text in newErro()
|
||||
}
|
||||
|
||||
if (json && json.message) {
|
||||
throw newError(`${json.message}`, response.status);
|
||||
}
|
||||
|
||||
throw newError('Unknown error', response.status);
|
||||
}
|
||||
|
||||
if (options.responseFormat === 'text') return responseText;
|
||||
|
||||
const output = await loadResponseJson();
|
||||
return output;
|
||||
}
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
const BaseSyncTarget = require('./BaseSyncTarget.js');
|
||||
const BaseSyncTarget = require('./BaseSyncTarget').default;
|
||||
const { _ } = require('./locale');
|
||||
const Setting = require('./models/Setting').default;
|
||||
const { FileApi } = require('./file-api.js');
|
||||
|
@ -1,4 +1,4 @@
|
||||
const BaseSyncTarget = require('./BaseSyncTarget.js');
|
||||
const BaseSyncTarget = require('./BaseSyncTarget').default;
|
||||
const { _ } = require('./locale');
|
||||
const DropboxApi = require('./DropboxApi');
|
||||
const Setting = require('./models/Setting').default;
|
||||
|
@ -1,4 +1,4 @@
|
||||
const BaseSyncTarget = require('./BaseSyncTarget.js');
|
||||
const BaseSyncTarget = require('./BaseSyncTarget').default;
|
||||
const { _ } = require('./locale');
|
||||
const Setting = require('./models/Setting').default;
|
||||
const { FileApi } = require('./file-api.js');
|
||||
|
@ -2,10 +2,9 @@ import FileApiDriverJoplinServer from './file-api-driver-joplinServer';
|
||||
import Setting from './models/Setting';
|
||||
import Synchronizer from './Synchronizer';
|
||||
import { _ } from './locale.js';
|
||||
import JoplinServerApi from './JoplinServerApi2';
|
||||
|
||||
const BaseSyncTarget = require('./BaseSyncTarget.js');
|
||||
const { FileApi } = require('./file-api.js');
|
||||
import JoplinServerApi from './JoplinServerApi';
|
||||
import BaseSyncTarget from './BaseSyncTarget';
|
||||
import { FileApi } from './file-api';
|
||||
|
||||
interface FileApiOptions {
|
||||
path(): string;
|
||||
@ -16,27 +15,31 @@ interface FileApiOptions {
|
||||
|
||||
export default class SyncTargetJoplinServer extends BaseSyncTarget {
|
||||
|
||||
static id() {
|
||||
public static id() {
|
||||
return 9;
|
||||
}
|
||||
|
||||
static supportsConfigCheck() {
|
||||
public static supportsConfigCheck() {
|
||||
return true;
|
||||
}
|
||||
|
||||
static targetName() {
|
||||
public static targetName() {
|
||||
return 'joplinServer';
|
||||
}
|
||||
|
||||
static label() {
|
||||
public static label() {
|
||||
return _('Joplin Server');
|
||||
}
|
||||
|
||||
async isAuthenticated() {
|
||||
public async isAuthenticated() {
|
||||
return true;
|
||||
}
|
||||
|
||||
static async newFileApi_(options: FileApiOptions) {
|
||||
public async fileApi(): Promise<FileApi> {
|
||||
return super.fileApi();
|
||||
}
|
||||
|
||||
private static async newFileApi_(options: FileApiOptions) {
|
||||
const apiOptions = {
|
||||
baseUrl: () => options.path(),
|
||||
username: () => options.username(),
|
||||
@ -51,7 +54,7 @@ export default class SyncTargetJoplinServer extends BaseSyncTarget {
|
||||
return fileApi;
|
||||
}
|
||||
|
||||
static async checkConfig(options: FileApiOptions) {
|
||||
public static async checkConfig(options: FileApiOptions) {
|
||||
const output = {
|
||||
ok: false,
|
||||
errorMessage: '',
|
||||
@ -72,7 +75,7 @@ export default class SyncTargetJoplinServer extends BaseSyncTarget {
|
||||
return output;
|
||||
}
|
||||
|
||||
async initFileApi() {
|
||||
protected async initFileApi() {
|
||||
const fileApi = await SyncTargetJoplinServer.newFileApi_({
|
||||
path: () => Setting.value('sync.9.path'),
|
||||
username: () => Setting.value('sync.9.username'),
|
||||
@ -85,7 +88,7 @@ export default class SyncTargetJoplinServer extends BaseSyncTarget {
|
||||
return fileApi;
|
||||
}
|
||||
|
||||
async initSynchronizer() {
|
||||
protected async initSynchronizer() {
|
||||
return new Synchronizer(this.db(), await this.fileApi(), Setting.value('appType'));
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
const BaseSyncTarget = require('./BaseSyncTarget.js');
|
||||
const BaseSyncTarget = require('./BaseSyncTarget').default;
|
||||
const Setting = require('./models/Setting').default;
|
||||
const { FileApi } = require('./file-api.js');
|
||||
const { FileApiDriverMemory } = require('./file-api-driver-memory.js');
|
||||
|
@ -1,12 +1,11 @@
|
||||
// The Nextcloud sync target is essentially a wrapper over the WebDAV sync target,
|
||||
// thus all the calls to SyncTargetWebDAV to avoid duplicate code.
|
||||
|
||||
const BaseSyncTarget = require('./BaseSyncTarget.js');
|
||||
const BaseSyncTarget = require('./BaseSyncTarget').default;
|
||||
const { _ } = require('./locale');
|
||||
const Setting = require('./models/Setting').default;
|
||||
const Synchronizer = require('./Synchronizer').default;
|
||||
const SyncTargetWebDAV = require('./SyncTargetWebDAV');
|
||||
const JoplinServerApi = require('./JoplinServerApi.js').default;
|
||||
|
||||
class SyncTargetNextcloud extends BaseSyncTarget {
|
||||
|
||||
@ -50,24 +49,6 @@ class SyncTargetNextcloud extends BaseSyncTarget {
|
||||
return new Synchronizer(this.db(), await this.fileApi(), Setting.value('appType'));
|
||||
}
|
||||
|
||||
async appApi(settings = null) {
|
||||
const useCache = !settings;
|
||||
|
||||
if (this.appApi_ && useCache) return this.appApi_;
|
||||
|
||||
const appApi = new JoplinServerApi({
|
||||
baseUrl: () => JoplinServerApi.baseUrlFromNextcloudWebDavUrl(settings ? settings['sync.5.path'] : Setting.value('sync.5.path')),
|
||||
username: () => settings ? settings['sync.5.username'] : Setting.value('sync.5.username'),
|
||||
password: () => settings ? settings['sync.5.password'] : Setting.value('sync.5.password'),
|
||||
});
|
||||
|
||||
appApi.setLogger(this.logger());
|
||||
|
||||
if (useCache) this.appApi_ = appApi;
|
||||
|
||||
return appApi;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = SyncTargetNextcloud;
|
||||
|
@ -2,14 +2,16 @@ import OneDriveApi from './onedrive-api';
|
||||
import { _ } from './locale';
|
||||
import Setting from './models/Setting';
|
||||
import Synchronizer from './Synchronizer';
|
||||
import BaseSyncTarget from './BaseSyncTarget';
|
||||
|
||||
const BaseSyncTarget = require('./BaseSyncTarget.js');
|
||||
const { parameters } = require('./parameters.js');
|
||||
const { FileApi } = require('./file-api.js');
|
||||
const { FileApiDriverOneDrive } = require('./file-api-driver-onedrive.js');
|
||||
|
||||
export default class SyncTargetOneDrive extends BaseSyncTarget {
|
||||
|
||||
private api_: any;
|
||||
|
||||
static id() {
|
||||
return 3;
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
const BaseSyncTarget = require('./BaseSyncTarget.js');
|
||||
const BaseSyncTarget = require('./BaseSyncTarget').default;
|
||||
const { _ } = require('./locale');
|
||||
const Setting = require('./models/Setting').default;
|
||||
const { FileApi } = require('./file-api.js');
|
||||
|
@ -16,6 +16,9 @@ import MasterKey from './models/MasterKey';
|
||||
import BaseModel from './BaseModel';
|
||||
const { sprintf } = require('sprintf-js');
|
||||
import time from './time';
|
||||
import ResourceService from './services/ResourceService';
|
||||
import EncryptionService from './services/EncryptionService';
|
||||
import NoteResource from './models/NoteResource';
|
||||
const JoplinError = require('./JoplinError');
|
||||
const TaskQueue = require('./TaskQueue');
|
||||
const { Dirnames } = require('./services/synchronizer/utils/types');
|
||||
@ -39,7 +42,8 @@ export default class Synchronizer {
|
||||
private clientId_: string;
|
||||
private lockHandler_: LockHandler;
|
||||
private migrationHandler_: MigrationHandler;
|
||||
private encryptionService_: any = null;
|
||||
private encryptionService_: EncryptionService = null;
|
||||
private resourceService_: ResourceService = null;
|
||||
private syncTargetIsLocked_: boolean = false;
|
||||
|
||||
// Debug flags are used to test certain hard-to-test conditions
|
||||
@ -104,7 +108,7 @@ export default class Synchronizer {
|
||||
return this.appType_ === 'mobile' ? 100 * 1000 * 1000 : Infinity;
|
||||
}
|
||||
|
||||
setEncryptionService(v: any) {
|
||||
public setEncryptionService(v: any) {
|
||||
this.encryptionService_ = v;
|
||||
}
|
||||
|
||||
@ -112,6 +116,14 @@ export default class Synchronizer {
|
||||
return this.encryptionService_;
|
||||
}
|
||||
|
||||
public setResourceService(v: ResourceService) {
|
||||
this.resourceService_ = v;
|
||||
}
|
||||
|
||||
protected resourceService(): ResourceService {
|
||||
return this.resourceService_;
|
||||
}
|
||||
|
||||
async waitForSyncToFinish() {
|
||||
if (this.state() === 'idle') return;
|
||||
|
||||
@ -220,7 +232,7 @@ export default class Synchronizer {
|
||||
const iid = shim.setInterval(() => {
|
||||
if (this.state() == 'idle') {
|
||||
shim.clearInterval(iid);
|
||||
resolve();
|
||||
resolve(null);
|
||||
}
|
||||
}, 100);
|
||||
});
|
||||
@ -332,6 +344,19 @@ export default class Synchronizer {
|
||||
return `${Dirnames.Resources}/${resourceId}`;
|
||||
};
|
||||
|
||||
// We index resources and apply the "is_shared" flag before syncing
|
||||
// because it's going to affect what's sent encrypted, and what's sent
|
||||
// plain text.
|
||||
try {
|
||||
if (this.resourceService()) {
|
||||
this.logger().info('Indexing resources...');
|
||||
await this.resourceService().indexNoteResources();
|
||||
await NoteResource.applySharedStatusToLinkedResources();
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger().error('Error indexing resources:', error);
|
||||
}
|
||||
|
||||
let errorToThrow = null;
|
||||
let syncLock = null;
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { utils, CommandRuntime, CommandDeclaration, CommandContext } from '../services/CommandService';
|
||||
import { _ } from '../locale';
|
||||
const { reg } = require('../registry.js');
|
||||
import { reg } from '../registry';
|
||||
|
||||
export const declaration: CommandDeclaration = {
|
||||
name: 'synchronize',
|
||||
@ -43,7 +43,7 @@ export const runtime = (): CommandRuntime => {
|
||||
sync.cancel();
|
||||
return 'cancel';
|
||||
} else {
|
||||
reg.scheduleSync(0);
|
||||
void reg.scheduleSync(0);
|
||||
return 'sync';
|
||||
}
|
||||
},
|
||||
|
@ -3,7 +3,6 @@ const SyncTargetRegistry = require('../../SyncTargetRegistry');
|
||||
const ObjectUtils = require('../../ObjectUtils');
|
||||
const { _ } = require('../../locale');
|
||||
const { createSelector } = require('reselect');
|
||||
const { reg } = require('../../registry');
|
||||
|
||||
const shared = {};
|
||||
|
||||
@ -32,7 +31,7 @@ shared.checkSyncConfig = async function(comp, settings) {
|
||||
comp.setState({ checkSyncConfigResult: result });
|
||||
|
||||
if (result.ok) {
|
||||
await shared.checkNextcloudApp(comp, settings);
|
||||
// await shared.checkNextcloudApp(comp, settings);
|
||||
// Users often expect config to be auto-saved at this point, if the config check was successful
|
||||
shared.saveSettings(comp);
|
||||
}
|
||||
@ -54,30 +53,6 @@ shared.checkSyncConfigMessages = function(comp) {
|
||||
return output;
|
||||
};
|
||||
|
||||
shared.checkNextcloudApp = async function(comp, settings) {
|
||||
if (settings['sync.target'] !== 5) return;
|
||||
|
||||
comp.setState({ checkNextcloudAppResult: 'checking' });
|
||||
let result = null;
|
||||
const appApi = await reg.syncTargetNextcloud().appApi(settings);
|
||||
|
||||
try {
|
||||
result = await appApi.setupSyncTarget(settings['sync.5.path']);
|
||||
} catch (error) {
|
||||
reg.logger().error('Could not setup sync target:', error);
|
||||
result = { error: error.message };
|
||||
}
|
||||
|
||||
const newSyncTargets = Object.assign({}, settings['sync.5.syncTargets']);
|
||||
newSyncTargets[settings['sync.5.path']] = result;
|
||||
shared.updateSettingValue(comp, 'sync.5.syncTargets', newSyncTargets);
|
||||
|
||||
// Also immediately save the result as this is most likely what the user would expect
|
||||
Setting.setValue('sync.5.syncTargets', newSyncTargets);
|
||||
|
||||
comp.setState({ checkNextcloudAppResult: 'done' });
|
||||
};
|
||||
|
||||
shared.updateSettingValue = function(comp, key, value) {
|
||||
comp.setState(state => {
|
||||
const settings = Object.assign({}, state.settings);
|
||||
|
@ -1,321 +1,358 @@
|
||||
const Logger = require('./Logger').default;
|
||||
const time = require('./time').default;
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const Logger_1 = require("./Logger");
|
||||
const time_1 = require("./time");
|
||||
const shim_1 = require("./shim");
|
||||
const Mutex = require('async-mutex').Mutex;
|
||||
const shim = require('./shim').default;
|
||||
|
||||
class Database {
|
||||
constructor(driver) {
|
||||
this.debugMode_ = false;
|
||||
this.sqlQueryLogEnabled_ = false;
|
||||
this.driver_ = driver;
|
||||
this.logger_ = new Logger();
|
||||
this.logExcludedQueryTypes_ = [];
|
||||
this.batchTransactionMutex_ = new Mutex();
|
||||
this.profilingEnabled_ = false;
|
||||
this.queryId_ = 1;
|
||||
}
|
||||
|
||||
setLogExcludedQueryTypes(v) {
|
||||
this.logExcludedQueryTypes_ = v;
|
||||
}
|
||||
|
||||
// Converts the SQLite error to a regular JS error
|
||||
// so that it prints a stacktrace when passed to
|
||||
// console.error()
|
||||
sqliteErrorToJsError(error, sql = null, params = null) {
|
||||
return this.driver().sqliteErrorToJsError(error, sql, params);
|
||||
}
|
||||
|
||||
setLogger(l) {
|
||||
this.logger_ = l;
|
||||
}
|
||||
|
||||
logger() {
|
||||
return this.logger_;
|
||||
}
|
||||
|
||||
driver() {
|
||||
return this.driver_;
|
||||
}
|
||||
|
||||
async open(options) {
|
||||
try {
|
||||
await this.driver().open(options);
|
||||
} catch (error) {
|
||||
throw new Error(`Cannot open database: ${error.message}: ${JSON.stringify(options)}`);
|
||||
}
|
||||
|
||||
this.logger().info('Database was open successfully');
|
||||
}
|
||||
|
||||
escapeField(field) {
|
||||
if (field == '*') return '*';
|
||||
const p = field.split('.');
|
||||
if (p.length == 1) return `\`${field}\``;
|
||||
if (p.length == 2) return `${p[0]}.\`${p[1]}\``;
|
||||
|
||||
throw new Error(`Invalid field format: ${field}`);
|
||||
}
|
||||
|
||||
escapeFields(fields) {
|
||||
if (fields == '*') return '*';
|
||||
|
||||
const output = [];
|
||||
for (let i = 0; i < fields.length; i++) {
|
||||
output.push(this.escapeField(fields[i]));
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
async tryCall(callName, sql, params) {
|
||||
if (typeof sql === 'object') {
|
||||
params = sql.params;
|
||||
sql = sql.sql;
|
||||
}
|
||||
|
||||
let waitTime = 50;
|
||||
let totalWaitTime = 0;
|
||||
const callStartTime = Date.now();
|
||||
let profilingTimeoutId = null;
|
||||
while (true) {
|
||||
try {
|
||||
this.logQuery(sql, params);
|
||||
|
||||
const queryId = this.queryId_++;
|
||||
if (this.profilingEnabled_) {
|
||||
console.info(`SQL START ${queryId}`, sql, params);
|
||||
|
||||
profilingTimeoutId = shim.setInterval(() => {
|
||||
console.warn(`SQL ${queryId} has been running for ${Date.now() - callStartTime}: ${sql}`);
|
||||
}, 3000);
|
||||
}
|
||||
|
||||
const result = await this.driver()[callName](sql, params);
|
||||
|
||||
if (this.profilingEnabled_) {
|
||||
shim.clearInterval(profilingTimeoutId);
|
||||
profilingTimeoutId = null;
|
||||
const elapsed = Date.now() - callStartTime;
|
||||
if (elapsed > 10) console.info(`SQL END ${queryId}`, elapsed, sql, params);
|
||||
}
|
||||
|
||||
return result; // No exception was thrown
|
||||
} catch (error) {
|
||||
if (error && (error.code == 'SQLITE_IOERR' || error.code == 'SQLITE_BUSY')) {
|
||||
if (totalWaitTime >= 20000) throw this.sqliteErrorToJsError(error, sql, params);
|
||||
// NOTE: don't put logger statements here because it might log to the database, which
|
||||
// could result in an error being thrown again.
|
||||
// this.logger().warn(sprintf('Error %s: will retry in %s milliseconds', error.code, waitTime));
|
||||
// this.logger().warn('Error was: ' + error.toString());
|
||||
await time.msleep(waitTime);
|
||||
totalWaitTime += waitTime;
|
||||
waitTime *= 1.5;
|
||||
} else {
|
||||
throw this.sqliteErrorToJsError(error, sql, params);
|
||||
}
|
||||
} finally {
|
||||
if (profilingTimeoutId) shim.clearInterval(profilingTimeoutId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async selectOne(sql, params = null) {
|
||||
return this.tryCall('selectOne', sql, params);
|
||||
}
|
||||
|
||||
async loadExtension(/* path */) {
|
||||
return; // Disabled for now as fuzzy search extension is not in use
|
||||
|
||||
// let result = null;
|
||||
// try {
|
||||
// result = await this.driver().loadExtension(path);
|
||||
// return result;
|
||||
// } catch (e) {
|
||||
// throw new Error(`Could not load extension ${path}`);
|
||||
// }
|
||||
}
|
||||
|
||||
async selectAll(sql, params = null) {
|
||||
return this.tryCall('selectAll', sql, params);
|
||||
}
|
||||
|
||||
async selectAllFields(sql, params, field) {
|
||||
const rows = await this.tryCall('selectAll', sql, params);
|
||||
const output = [];
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const v = rows[i][field];
|
||||
if (!v) throw new Error(`No such field: ${field}. Query was: ${sql}`);
|
||||
output.push(rows[i][field]);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
async exec(sql, params = null) {
|
||||
return this.tryCall('exec', sql, params);
|
||||
}
|
||||
|
||||
async transactionExecBatch(queries) {
|
||||
if (queries.length <= 0) return;
|
||||
|
||||
if (queries.length == 1) {
|
||||
const q = this.wrapQuery(queries[0]);
|
||||
await this.exec(q.sql, q.params);
|
||||
return;
|
||||
}
|
||||
|
||||
// There can be only one transaction running at a time so use a mutex
|
||||
const release = await this.batchTransactionMutex_.acquire();
|
||||
|
||||
try {
|
||||
await this.exec('BEGIN TRANSACTION');
|
||||
|
||||
for (let i = 0; i < queries.length; i++) {
|
||||
const query = this.wrapQuery(queries[i]);
|
||||
await this.exec(query.sql, query.params);
|
||||
}
|
||||
|
||||
await this.exec('COMMIT');
|
||||
} catch (error) {
|
||||
await this.exec('ROLLBACK');
|
||||
throw error;
|
||||
} finally {
|
||||
release();
|
||||
}
|
||||
}
|
||||
|
||||
static enumId(type, s) {
|
||||
if (type == 'settings') {
|
||||
if (s == 'int') return 1;
|
||||
if (s == 'string') return 2;
|
||||
}
|
||||
if (type == 'fieldType') {
|
||||
if (s) s = s.toUpperCase();
|
||||
if (s == 'INTEGER') s = 'INT';
|
||||
if (!(`TYPE_${s}` in this)) throw new Error(`Unkonwn fieldType: ${s}`);
|
||||
return this[`TYPE_${s}`];
|
||||
}
|
||||
if (type == 'syncTarget') {
|
||||
if (s == 'memory') return 1;
|
||||
if (s == 'filesystem') return 2;
|
||||
if (s == 'onedrive') return 3;
|
||||
}
|
||||
throw new Error(`Unknown enum type or value: ${type}, ${s}`);
|
||||
}
|
||||
|
||||
static enumName(type, id) {
|
||||
if (type === 'fieldType') {
|
||||
if (id === Database.TYPE_UNKNOWN) return 'unknown';
|
||||
if (id === Database.TYPE_INT) return 'int';
|
||||
if (id === Database.TYPE_TEXT) return 'text';
|
||||
if (id === Database.TYPE_NUMERIC) return 'numeric';
|
||||
throw new Error(`Invalid type id: ${id}`);
|
||||
}
|
||||
}
|
||||
|
||||
static formatValue(type, value) {
|
||||
if (value === null || value === undefined) return null;
|
||||
if (type == this.TYPE_INT) return Number(value);
|
||||
if (type == this.TYPE_TEXT) return value;
|
||||
if (type == this.TYPE_NUMERIC) return Number(value);
|
||||
throw new Error(`Unknown type: ${type}`);
|
||||
}
|
||||
|
||||
sqlStringToLines(sql) {
|
||||
const output = [];
|
||||
const lines = sql.split('\n');
|
||||
let statement = '';
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (line == '') continue;
|
||||
if (line.substr(0, 2) == '--') continue;
|
||||
statement += line.trim();
|
||||
if (line[line.length - 1] == ',') statement += ' ';
|
||||
if (line[line.length - 1] == ';') {
|
||||
output.push(statement);
|
||||
statement = '';
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
logQuery(sql, params = null) {
|
||||
if (!this.sqlQueryLogEnabled_) return;
|
||||
|
||||
if (this.logExcludedQueryTypes_.length) {
|
||||
const temp = sql.toLowerCase();
|
||||
for (let i = 0; i < this.logExcludedQueryTypes_.length; i++) {
|
||||
if (temp.indexOf(this.logExcludedQueryTypes_[i].toLowerCase()) === 0) return;
|
||||
}
|
||||
}
|
||||
|
||||
this.logger().debug(sql);
|
||||
if (params !== null && params.length) this.logger().debug(JSON.stringify(params));
|
||||
}
|
||||
|
||||
static insertQuery(tableName, data) {
|
||||
if (!data || !Object.keys(data).length) throw new Error('Data is empty');
|
||||
|
||||
let keySql = '';
|
||||
let valueSql = '';
|
||||
const params = [];
|
||||
for (const key in data) {
|
||||
if (!data.hasOwnProperty(key)) continue;
|
||||
if (key[key.length - 1] == '_') continue;
|
||||
if (keySql != '') keySql += ', ';
|
||||
if (valueSql != '') valueSql += ', ';
|
||||
keySql += `\`${key}\``;
|
||||
valueSql += '?';
|
||||
params.push(data[key]);
|
||||
}
|
||||
return {
|
||||
sql: `INSERT INTO \`${tableName}\` (${keySql}) VALUES (${valueSql})`,
|
||||
params: params,
|
||||
};
|
||||
}
|
||||
|
||||
static updateQuery(tableName, data, where) {
|
||||
if (!data || !Object.keys(data).length) throw new Error('Data is empty');
|
||||
|
||||
let sql = '';
|
||||
const params = [];
|
||||
for (const key in data) {
|
||||
if (!data.hasOwnProperty(key)) continue;
|
||||
if (key[key.length - 1] == '_') continue;
|
||||
if (sql != '') sql += ', ';
|
||||
sql += `\`${key}\`=?`;
|
||||
params.push(data[key]);
|
||||
}
|
||||
|
||||
if (typeof where != 'string') {
|
||||
const s = [];
|
||||
for (const n in where) {
|
||||
if (!where.hasOwnProperty(n)) continue;
|
||||
params.push(where[n]);
|
||||
s.push(`\`${n}\`=?`);
|
||||
}
|
||||
where = s.join(' AND ');
|
||||
}
|
||||
|
||||
return {
|
||||
sql: `UPDATE \`${tableName}\` SET ${sql} WHERE ${where}`,
|
||||
params: params,
|
||||
};
|
||||
}
|
||||
|
||||
alterColumnQueries(tableName, fields) {
|
||||
const fieldsNoType = [];
|
||||
for (const n in fields) {
|
||||
if (!fields.hasOwnProperty(n)) continue;
|
||||
fieldsNoType.push(n);
|
||||
}
|
||||
|
||||
const fieldsWithType = [];
|
||||
for (const n in fields) {
|
||||
if (!fields.hasOwnProperty(n)) continue;
|
||||
fieldsWithType.push(`${this.escapeField(n)} ${fields[n]}`);
|
||||
}
|
||||
|
||||
let sql = `
|
||||
constructor(driver) {
|
||||
this.debugMode_ = false;
|
||||
this.sqlQueryLogEnabled_ = false;
|
||||
this.logger_ = new Logger_1.default();
|
||||
this.logExcludedQueryTypes_ = [];
|
||||
this.batchTransactionMutex_ = new Mutex();
|
||||
this.profilingEnabled_ = false;
|
||||
this.queryId_ = 1;
|
||||
this.driver_ = driver;
|
||||
}
|
||||
setLogExcludedQueryTypes(v) {
|
||||
this.logExcludedQueryTypes_ = v;
|
||||
}
|
||||
// Converts the SQLite error to a regular JS error
|
||||
// so that it prints a stacktrace when passed to
|
||||
// console.error()
|
||||
sqliteErrorToJsError(error, sql = null, params = null) {
|
||||
return this.driver().sqliteErrorToJsError(error, sql, params);
|
||||
}
|
||||
setLogger(l) {
|
||||
this.logger_ = l;
|
||||
}
|
||||
logger() {
|
||||
return this.logger_;
|
||||
}
|
||||
driver() {
|
||||
return this.driver_;
|
||||
}
|
||||
open(options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield this.driver().open(options);
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Cannot open database: ${error.message}: ${JSON.stringify(options)}`);
|
||||
}
|
||||
this.logger().info('Database was open successfully');
|
||||
});
|
||||
}
|
||||
escapeField(field) {
|
||||
if (field == '*')
|
||||
return '*';
|
||||
const p = field.split('.');
|
||||
if (p.length == 1)
|
||||
return `\`${field}\``;
|
||||
if (p.length == 2)
|
||||
return `${p[0]}.\`${p[1]}\``;
|
||||
throw new Error(`Invalid field format: ${field}`);
|
||||
}
|
||||
escapeFields(fields) {
|
||||
if (fields == '*')
|
||||
return '*';
|
||||
const output = [];
|
||||
for (let i = 0; i < fields.length; i++) {
|
||||
output.push(this.escapeField(fields[i]));
|
||||
}
|
||||
return output;
|
||||
}
|
||||
tryCall(callName, inputSql, inputParams) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let sql = null;
|
||||
let params = null;
|
||||
if (typeof inputSql === 'object') {
|
||||
params = inputSql.params;
|
||||
sql = inputSql.sql;
|
||||
}
|
||||
else {
|
||||
params = inputParams;
|
||||
sql = inputSql;
|
||||
}
|
||||
let waitTime = 50;
|
||||
let totalWaitTime = 0;
|
||||
const callStartTime = Date.now();
|
||||
let profilingTimeoutId = null;
|
||||
while (true) {
|
||||
try {
|
||||
this.logQuery(sql, params);
|
||||
const queryId = this.queryId_++;
|
||||
if (this.profilingEnabled_) {
|
||||
console.info(`SQL START ${queryId}`, sql, params);
|
||||
profilingTimeoutId = shim_1.default.setInterval(() => {
|
||||
console.warn(`SQL ${queryId} has been running for ${Date.now() - callStartTime}: ${sql}`);
|
||||
}, 3000);
|
||||
}
|
||||
const result = yield this.driver()[callName](sql, params);
|
||||
if (this.profilingEnabled_) {
|
||||
shim_1.default.clearInterval(profilingTimeoutId);
|
||||
profilingTimeoutId = null;
|
||||
const elapsed = Date.now() - callStartTime;
|
||||
if (elapsed > 10)
|
||||
console.info(`SQL END ${queryId}`, elapsed, sql, params);
|
||||
}
|
||||
return result; // No exception was thrown
|
||||
}
|
||||
catch (error) {
|
||||
if (error && (error.code == 'SQLITE_IOERR' || error.code == 'SQLITE_BUSY')) {
|
||||
if (totalWaitTime >= 20000)
|
||||
throw this.sqliteErrorToJsError(error, sql, params);
|
||||
// NOTE: don't put logger statements here because it might log to the database, which
|
||||
// could result in an error being thrown again.
|
||||
// this.logger().warn(sprintf('Error %s: will retry in %s milliseconds', error.code, waitTime));
|
||||
// this.logger().warn('Error was: ' + error.toString());
|
||||
yield time_1.default.msleep(waitTime);
|
||||
totalWaitTime += waitTime;
|
||||
waitTime *= 1.5;
|
||||
}
|
||||
else {
|
||||
throw this.sqliteErrorToJsError(error, sql, params);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
if (profilingTimeoutId)
|
||||
shim_1.default.clearInterval(profilingTimeoutId);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
selectOne(sql, params = null) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return this.tryCall('selectOne', sql, params);
|
||||
});
|
||||
}
|
||||
loadExtension( /* path */) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return; // Disabled for now as fuzzy search extension is not in use
|
||||
// let result = null;
|
||||
// try {
|
||||
// result = await this.driver().loadExtension(path);
|
||||
// return result;
|
||||
// } catch (e) {
|
||||
// throw new Error(`Could not load extension ${path}`);
|
||||
// }
|
||||
});
|
||||
}
|
||||
selectAll(sql, params = null) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return this.tryCall('selectAll', sql, params);
|
||||
});
|
||||
}
|
||||
selectAllFields(sql, params, field) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const rows = yield this.tryCall('selectAll', sql, params);
|
||||
const output = [];
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const v = rows[i][field];
|
||||
if (!v)
|
||||
throw new Error(`No such field: ${field}. Query was: ${sql}`);
|
||||
output.push(rows[i][field]);
|
||||
}
|
||||
return output;
|
||||
});
|
||||
}
|
||||
exec(sql, params = null) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return this.tryCall('exec', sql, params);
|
||||
});
|
||||
}
|
||||
transactionExecBatch(queries) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (queries.length <= 0)
|
||||
return;
|
||||
if (queries.length == 1) {
|
||||
const q = this.wrapQuery(queries[0]);
|
||||
yield this.exec(q.sql, q.params);
|
||||
return;
|
||||
}
|
||||
// There can be only one transaction running at a time so use a mutex
|
||||
const release = yield this.batchTransactionMutex_.acquire();
|
||||
try {
|
||||
yield this.exec('BEGIN TRANSACTION');
|
||||
for (let i = 0; i < queries.length; i++) {
|
||||
const query = this.wrapQuery(queries[i]);
|
||||
yield this.exec(query.sql, query.params);
|
||||
}
|
||||
yield this.exec('COMMIT');
|
||||
}
|
||||
catch (error) {
|
||||
yield this.exec('ROLLBACK');
|
||||
throw error;
|
||||
}
|
||||
finally {
|
||||
release();
|
||||
}
|
||||
});
|
||||
}
|
||||
static enumId(type, s) {
|
||||
if (type == 'settings') {
|
||||
if (s == 'int')
|
||||
return 1;
|
||||
if (s == 'string')
|
||||
return 2;
|
||||
}
|
||||
if (type == 'fieldType') {
|
||||
if (s)
|
||||
s = s.toUpperCase();
|
||||
if (s == 'INTEGER')
|
||||
s = 'INT';
|
||||
if (!(`TYPE_${s}` in this))
|
||||
throw new Error(`Unkonwn fieldType: ${s}`);
|
||||
return this[`TYPE_${s}`];
|
||||
}
|
||||
if (type == 'syncTarget') {
|
||||
if (s == 'memory')
|
||||
return 1;
|
||||
if (s == 'filesystem')
|
||||
return 2;
|
||||
if (s == 'onedrive')
|
||||
return 3;
|
||||
}
|
||||
throw new Error(`Unknown enum type or value: ${type}, ${s}`);
|
||||
}
|
||||
static enumName(type, id) {
|
||||
if (type === 'fieldType') {
|
||||
if (id === Database.TYPE_UNKNOWN)
|
||||
return 'unknown';
|
||||
if (id === Database.TYPE_INT)
|
||||
return 'int';
|
||||
if (id === Database.TYPE_TEXT)
|
||||
return 'text';
|
||||
if (id === Database.TYPE_NUMERIC)
|
||||
return 'numeric';
|
||||
throw new Error(`Invalid type id: ${id}`);
|
||||
}
|
||||
// Or maybe an error should be thrown
|
||||
return undefined;
|
||||
}
|
||||
static formatValue(type, value) {
|
||||
if (value === null || value === undefined)
|
||||
return null;
|
||||
if (type == this.TYPE_INT)
|
||||
return Number(value);
|
||||
if (type == this.TYPE_TEXT)
|
||||
return value;
|
||||
if (type == this.TYPE_NUMERIC)
|
||||
return Number(value);
|
||||
throw new Error(`Unknown type: ${type}`);
|
||||
}
|
||||
sqlStringToLines(sql) {
|
||||
const output = [];
|
||||
const lines = sql.split('\n');
|
||||
let statement = '';
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (line == '')
|
||||
continue;
|
||||
if (line.substr(0, 2) == '--')
|
||||
continue;
|
||||
statement += line.trim();
|
||||
if (line[line.length - 1] == ',')
|
||||
statement += ' ';
|
||||
if (line[line.length - 1] == ';') {
|
||||
output.push(statement);
|
||||
statement = '';
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
logQuery(sql, params = null) {
|
||||
if (!this.sqlQueryLogEnabled_)
|
||||
return;
|
||||
if (this.logExcludedQueryTypes_.length) {
|
||||
const temp = sql.toLowerCase();
|
||||
for (let i = 0; i < this.logExcludedQueryTypes_.length; i++) {
|
||||
if (temp.indexOf(this.logExcludedQueryTypes_[i].toLowerCase()) === 0)
|
||||
return;
|
||||
}
|
||||
}
|
||||
this.logger().debug(sql);
|
||||
if (params !== null && params.length)
|
||||
this.logger().debug(JSON.stringify(params));
|
||||
}
|
||||
static insertQuery(tableName, data) {
|
||||
if (!data || !Object.keys(data).length)
|
||||
throw new Error('Data is empty');
|
||||
let keySql = '';
|
||||
let valueSql = '';
|
||||
const params = [];
|
||||
for (const key in data) {
|
||||
if (!data.hasOwnProperty(key))
|
||||
continue;
|
||||
if (key[key.length - 1] == '_')
|
||||
continue;
|
||||
if (keySql != '')
|
||||
keySql += ', ';
|
||||
if (valueSql != '')
|
||||
valueSql += ', ';
|
||||
keySql += `\`${key}\``;
|
||||
valueSql += '?';
|
||||
params.push(data[key]);
|
||||
}
|
||||
return {
|
||||
sql: `INSERT INTO \`${tableName}\` (${keySql}) VALUES (${valueSql})`,
|
||||
params: params,
|
||||
};
|
||||
}
|
||||
static updateQuery(tableName, data, where) {
|
||||
if (!data || !Object.keys(data).length)
|
||||
throw new Error('Data is empty');
|
||||
let sql = '';
|
||||
const params = [];
|
||||
for (const key in data) {
|
||||
if (!data.hasOwnProperty(key))
|
||||
continue;
|
||||
if (key[key.length - 1] == '_')
|
||||
continue;
|
||||
if (sql != '')
|
||||
sql += ', ';
|
||||
sql += `\`${key}\`=?`;
|
||||
params.push(data[key]);
|
||||
}
|
||||
if (typeof where != 'string') {
|
||||
const s = [];
|
||||
for (const n in where) {
|
||||
if (!where.hasOwnProperty(n))
|
||||
continue;
|
||||
params.push(where[n]);
|
||||
s.push(`\`${n}\`=?`);
|
||||
}
|
||||
where = s.join(' AND ');
|
||||
}
|
||||
return {
|
||||
sql: `UPDATE \`${tableName}\` SET ${sql} WHERE ${where}`,
|
||||
params: params,
|
||||
};
|
||||
}
|
||||
alterColumnQueries(tableName, fields) {
|
||||
const fieldsNoType = [];
|
||||
for (const n in fields) {
|
||||
if (!fields.hasOwnProperty(n))
|
||||
continue;
|
||||
fieldsNoType.push(n);
|
||||
}
|
||||
const fieldsWithType = [];
|
||||
for (const n in fields) {
|
||||
if (!fields.hasOwnProperty(n))
|
||||
continue;
|
||||
fieldsWithType.push(`${this.escapeField(n)} ${fields[n]}`);
|
||||
}
|
||||
let sql = `
|
||||
CREATE TEMPORARY TABLE _BACKUP_TABLE_NAME_(_FIELDS_TYPE_);
|
||||
INSERT INTO _BACKUP_TABLE_NAME_ SELECT _FIELDS_NO_TYPE_ FROM _TABLE_NAME_;
|
||||
DROP TABLE _TABLE_NAME_;
|
||||
@ -323,42 +360,39 @@ class Database {
|
||||
INSERT INTO _TABLE_NAME_ SELECT _FIELDS_NO_TYPE_ FROM _BACKUP_TABLE_NAME_;
|
||||
DROP TABLE _BACKUP_TABLE_NAME_;
|
||||
`;
|
||||
|
||||
sql = sql.replace(/_BACKUP_TABLE_NAME_/g, this.escapeField(`${tableName}_backup`));
|
||||
sql = sql.replace(/_TABLE_NAME_/g, this.escapeField(tableName));
|
||||
sql = sql.replace(/_FIELDS_NO_TYPE_/g, this.escapeFields(fieldsNoType).join(','));
|
||||
sql = sql.replace(/_FIELDS_TYPE_/g, fieldsWithType.join(','));
|
||||
|
||||
return sql.trim().split('\n');
|
||||
}
|
||||
|
||||
wrapQueries(queries) {
|
||||
const output = [];
|
||||
for (let i = 0; i < queries.length; i++) {
|
||||
output.push(this.wrapQuery(queries[i]));
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
wrapQuery(sql, params = null) {
|
||||
if (!sql) throw new Error(`Cannot wrap empty string: ${sql}`);
|
||||
|
||||
if (sql.constructor === Array) {
|
||||
const output = {};
|
||||
output.sql = sql[0];
|
||||
output.params = sql.length >= 2 ? sql[1] : null;
|
||||
return output;
|
||||
} else if (typeof sql === 'string') {
|
||||
return { sql: sql, params: params };
|
||||
} else {
|
||||
return sql; // Already wrapped
|
||||
}
|
||||
}
|
||||
sql = sql.replace(/_BACKUP_TABLE_NAME_/g, this.escapeField(`${tableName}_backup`));
|
||||
sql = sql.replace(/_TABLE_NAME_/g, this.escapeField(tableName));
|
||||
sql = sql.replace(/_FIELDS_NO_TYPE_/g, this.escapeFields(fieldsNoType).join(','));
|
||||
sql = sql.replace(/_FIELDS_TYPE_/g, fieldsWithType.join(','));
|
||||
return sql.trim().split('\n');
|
||||
}
|
||||
wrapQueries(queries) {
|
||||
const output = [];
|
||||
for (let i = 0; i < queries.length; i++) {
|
||||
output.push(this.wrapQuery(queries[i]));
|
||||
}
|
||||
return output;
|
||||
}
|
||||
wrapQuery(sql, params = null) {
|
||||
if (!sql)
|
||||
throw new Error(`Cannot wrap empty string: ${sql}`);
|
||||
if (Array.isArray(sql)) {
|
||||
return {
|
||||
sql: sql[0],
|
||||
params: sql.length >= 2 ? sql[1] : null,
|
||||
};
|
||||
}
|
||||
else if (typeof sql === 'string') {
|
||||
return { sql: sql, params: params };
|
||||
}
|
||||
else {
|
||||
return sql; // Already wrapped
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.default = Database;
|
||||
Database.TYPE_UNKNOWN = 0;
|
||||
Database.TYPE_INT = 1;
|
||||
Database.TYPE_TEXT = 2;
|
||||
Database.TYPE_NUMERIC = 3;
|
||||
|
||||
module.exports = { Database };
|
||||
//# sourceMappingURL=database.js.map
|
386
packages/lib/database.ts
Normal file
386
packages/lib/database.ts
Normal file
@ -0,0 +1,386 @@
|
||||
import Logger from './Logger';
|
||||
import time from './time';
|
||||
import shim from './shim';
|
||||
|
||||
const Mutex = require('async-mutex').Mutex;
|
||||
|
||||
type SqlParams = Record<string, any>;
|
||||
|
||||
export interface SqlQuery {
|
||||
sql: string;
|
||||
params?: SqlParams;
|
||||
}
|
||||
|
||||
type StringOrSqlQuery = string | SqlQuery;
|
||||
|
||||
export type Row = Record<string, any>;
|
||||
|
||||
export default class Database {
|
||||
|
||||
public static TYPE_UNKNOWN = 0;
|
||||
public static TYPE_INT = 1;
|
||||
public static TYPE_TEXT = 2;
|
||||
public static TYPE_NUMERIC = 3;
|
||||
|
||||
protected debugMode_ = false;
|
||||
private sqlQueryLogEnabled_ = false;
|
||||
private driver_: any;
|
||||
private logger_ = new Logger();
|
||||
private logExcludedQueryTypes_: string[] = [];
|
||||
private batchTransactionMutex_ = new Mutex();
|
||||
private profilingEnabled_ = false;
|
||||
private queryId_ = 1;
|
||||
|
||||
public constructor(driver: any) {
|
||||
this.driver_ = driver;
|
||||
}
|
||||
|
||||
setLogExcludedQueryTypes(v: string[]) {
|
||||
this.logExcludedQueryTypes_ = v;
|
||||
}
|
||||
|
||||
// Converts the SQLite error to a regular JS error
|
||||
// so that it prints a stacktrace when passed to
|
||||
// console.error()
|
||||
sqliteErrorToJsError(error: any, sql: string = null, params: SqlParams = null) {
|
||||
return this.driver().sqliteErrorToJsError(error, sql, params);
|
||||
}
|
||||
|
||||
setLogger(l: Logger) {
|
||||
this.logger_ = l;
|
||||
}
|
||||
|
||||
logger() {
|
||||
return this.logger_;
|
||||
}
|
||||
|
||||
driver() {
|
||||
return this.driver_;
|
||||
}
|
||||
|
||||
async open(options: any) {
|
||||
try {
|
||||
await this.driver().open(options);
|
||||
} catch (error) {
|
||||
throw new Error(`Cannot open database: ${error.message}: ${JSON.stringify(options)}`);
|
||||
}
|
||||
|
||||
this.logger().info('Database was open successfully');
|
||||
}
|
||||
|
||||
escapeField(field: string) {
|
||||
if (field == '*') return '*';
|
||||
const p = field.split('.');
|
||||
if (p.length == 1) return `\`${field}\``;
|
||||
if (p.length == 2) return `${p[0]}.\`${p[1]}\``;
|
||||
|
||||
throw new Error(`Invalid field format: ${field}`);
|
||||
}
|
||||
|
||||
escapeFields(fields: string[] | string): string[] | string {
|
||||
if (fields == '*') return '*';
|
||||
|
||||
const output = [];
|
||||
for (let i = 0; i < fields.length; i++) {
|
||||
output.push(this.escapeField(fields[i]));
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
async tryCall(callName: string, inputSql: StringOrSqlQuery, inputParams: SqlParams) {
|
||||
let sql: string = null;
|
||||
let params: SqlParams = null;
|
||||
|
||||
if (typeof inputSql === 'object') {
|
||||
params = (inputSql as SqlQuery).params;
|
||||
sql = (inputSql as SqlQuery).sql;
|
||||
} else {
|
||||
params = inputParams;
|
||||
sql = inputSql as string;
|
||||
}
|
||||
|
||||
let waitTime = 50;
|
||||
let totalWaitTime = 0;
|
||||
const callStartTime = Date.now();
|
||||
let profilingTimeoutId = null;
|
||||
while (true) {
|
||||
try {
|
||||
this.logQuery(sql, params);
|
||||
|
||||
const queryId = this.queryId_++;
|
||||
if (this.profilingEnabled_) {
|
||||
console.info(`SQL START ${queryId}`, sql, params);
|
||||
|
||||
profilingTimeoutId = shim.setInterval(() => {
|
||||
console.warn(`SQL ${queryId} has been running for ${Date.now() - callStartTime}: ${sql}`);
|
||||
}, 3000);
|
||||
}
|
||||
|
||||
const result = await this.driver()[callName](sql, params);
|
||||
|
||||
if (this.profilingEnabled_) {
|
||||
shim.clearInterval(profilingTimeoutId);
|
||||
profilingTimeoutId = null;
|
||||
const elapsed = Date.now() - callStartTime;
|
||||
if (elapsed > 10) console.info(`SQL END ${queryId}`, elapsed, sql, params);
|
||||
}
|
||||
|
||||
return result; // No exception was thrown
|
||||
} catch (error) {
|
||||
if (error && (error.code == 'SQLITE_IOERR' || error.code == 'SQLITE_BUSY')) {
|
||||
if (totalWaitTime >= 20000) throw this.sqliteErrorToJsError(error, sql, params);
|
||||
// NOTE: don't put logger statements here because it might log to the database, which
|
||||
// could result in an error being thrown again.
|
||||
// this.logger().warn(sprintf('Error %s: will retry in %s milliseconds', error.code, waitTime));
|
||||
// this.logger().warn('Error was: ' + error.toString());
|
||||
await time.msleep(waitTime);
|
||||
totalWaitTime += waitTime;
|
||||
waitTime *= 1.5;
|
||||
} else {
|
||||
throw this.sqliteErrorToJsError(error, sql, params);
|
||||
}
|
||||
} finally {
|
||||
if (profilingTimeoutId) shim.clearInterval(profilingTimeoutId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async selectOne(sql: string, params: SqlParams = null): Promise<Row> {
|
||||
return this.tryCall('selectOne', sql, params);
|
||||
}
|
||||
|
||||
async loadExtension(/* path */) {
|
||||
return; // Disabled for now as fuzzy search extension is not in use
|
||||
|
||||
// let result = null;
|
||||
// try {
|
||||
// result = await this.driver().loadExtension(path);
|
||||
// return result;
|
||||
// } catch (e) {
|
||||
// throw new Error(`Could not load extension ${path}`);
|
||||
// }
|
||||
}
|
||||
|
||||
async selectAll(sql: string, params: SqlParams = null): Promise<Row[]> {
|
||||
return this.tryCall('selectAll', sql, params);
|
||||
}
|
||||
|
||||
async selectAllFields(sql: string, params: SqlParams, field: string): Promise<any[]> {
|
||||
const rows = await this.tryCall('selectAll', sql, params);
|
||||
const output = [];
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const v = rows[i][field];
|
||||
if (!v) throw new Error(`No such field: ${field}. Query was: ${sql}`);
|
||||
output.push(rows[i][field]);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
async exec(sql: StringOrSqlQuery, params: SqlParams = null) {
|
||||
return this.tryCall('exec', sql, params);
|
||||
}
|
||||
|
||||
async transactionExecBatch(queries: StringOrSqlQuery[]) {
|
||||
if (queries.length <= 0) return;
|
||||
|
||||
if (queries.length == 1) {
|
||||
const q = this.wrapQuery(queries[0]);
|
||||
await this.exec(q.sql, q.params);
|
||||
return;
|
||||
}
|
||||
|
||||
// There can be only one transaction running at a time so use a mutex
|
||||
const release = await this.batchTransactionMutex_.acquire();
|
||||
|
||||
try {
|
||||
await this.exec('BEGIN TRANSACTION');
|
||||
|
||||
for (let i = 0; i < queries.length; i++) {
|
||||
const query = this.wrapQuery(queries[i]);
|
||||
await this.exec(query.sql, query.params);
|
||||
}
|
||||
|
||||
await this.exec('COMMIT');
|
||||
} catch (error) {
|
||||
await this.exec('ROLLBACK');
|
||||
throw error;
|
||||
} finally {
|
||||
release();
|
||||
}
|
||||
}
|
||||
|
||||
static enumId(type: string, s: string) {
|
||||
if (type == 'settings') {
|
||||
if (s == 'int') return 1;
|
||||
if (s == 'string') return 2;
|
||||
}
|
||||
if (type == 'fieldType') {
|
||||
if (s) s = s.toUpperCase();
|
||||
if (s == 'INTEGER') s = 'INT';
|
||||
if (!(`TYPE_${s}` in this)) throw new Error(`Unkonwn fieldType: ${s}`);
|
||||
return (this as any)[`TYPE_${s}`];
|
||||
}
|
||||
if (type == 'syncTarget') {
|
||||
if (s == 'memory') return 1;
|
||||
if (s == 'filesystem') return 2;
|
||||
if (s == 'onedrive') return 3;
|
||||
}
|
||||
throw new Error(`Unknown enum type or value: ${type}, ${s}`);
|
||||
}
|
||||
|
||||
static enumName(type: string, id: number) {
|
||||
if (type === 'fieldType') {
|
||||
if (id === Database.TYPE_UNKNOWN) return 'unknown';
|
||||
if (id === Database.TYPE_INT) return 'int';
|
||||
if (id === Database.TYPE_TEXT) return 'text';
|
||||
if (id === Database.TYPE_NUMERIC) return 'numeric';
|
||||
throw new Error(`Invalid type id: ${id}`);
|
||||
}
|
||||
|
||||
// Or maybe an error should be thrown
|
||||
return undefined;
|
||||
}
|
||||
|
||||
static formatValue(type: number, value: any) {
|
||||
if (value === null || value === undefined) return null;
|
||||
if (type == this.TYPE_INT) return Number(value);
|
||||
if (type == this.TYPE_TEXT) return value;
|
||||
if (type == this.TYPE_NUMERIC) return Number(value);
|
||||
throw new Error(`Unknown type: ${type}`);
|
||||
}
|
||||
|
||||
sqlStringToLines(sql: string) {
|
||||
const output = [];
|
||||
const lines = sql.split('\n');
|
||||
let statement = '';
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (line == '') continue;
|
||||
if (line.substr(0, 2) == '--') continue;
|
||||
statement += line.trim();
|
||||
if (line[line.length - 1] == ',') statement += ' ';
|
||||
if (line[line.length - 1] == ';') {
|
||||
output.push(statement);
|
||||
statement = '';
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
logQuery(sql: string, params: SqlParams = null) {
|
||||
if (!this.sqlQueryLogEnabled_) return;
|
||||
|
||||
if (this.logExcludedQueryTypes_.length) {
|
||||
const temp = sql.toLowerCase();
|
||||
for (let i = 0; i < this.logExcludedQueryTypes_.length; i++) {
|
||||
if (temp.indexOf(this.logExcludedQueryTypes_[i].toLowerCase()) === 0) return;
|
||||
}
|
||||
}
|
||||
|
||||
this.logger().debug(sql);
|
||||
if (params !== null && params.length) this.logger().debug(JSON.stringify(params));
|
||||
}
|
||||
|
||||
static insertQuery(tableName: string, data: Record<string, any>) {
|
||||
if (!data || !Object.keys(data).length) throw new Error('Data is empty');
|
||||
|
||||
let keySql = '';
|
||||
let valueSql = '';
|
||||
const params = [];
|
||||
for (const key in data) {
|
||||
if (!data.hasOwnProperty(key)) continue;
|
||||
if (key[key.length - 1] == '_') continue;
|
||||
if (keySql != '') keySql += ', ';
|
||||
if (valueSql != '') valueSql += ', ';
|
||||
keySql += `\`${key}\``;
|
||||
valueSql += '?';
|
||||
params.push(data[key]);
|
||||
}
|
||||
return {
|
||||
sql: `INSERT INTO \`${tableName}\` (${keySql}) VALUES (${valueSql})`,
|
||||
params: params,
|
||||
};
|
||||
}
|
||||
|
||||
static updateQuery(tableName: string, data: Record<string, any>, where: string | Record<string, any>) {
|
||||
if (!data || !Object.keys(data).length) throw new Error('Data is empty');
|
||||
|
||||
let sql = '';
|
||||
const params = [];
|
||||
for (const key in data) {
|
||||
if (!data.hasOwnProperty(key)) continue;
|
||||
if (key[key.length - 1] == '_') continue;
|
||||
if (sql != '') sql += ', ';
|
||||
sql += `\`${key}\`=?`;
|
||||
params.push(data[key]);
|
||||
}
|
||||
|
||||
if (typeof where != 'string') {
|
||||
const s = [];
|
||||
for (const n in where) {
|
||||
if (!where.hasOwnProperty(n)) continue;
|
||||
params.push(where[n]);
|
||||
s.push(`\`${n}\`=?`);
|
||||
}
|
||||
where = s.join(' AND ');
|
||||
}
|
||||
|
||||
return {
|
||||
sql: `UPDATE \`${tableName}\` SET ${sql} WHERE ${where}`,
|
||||
params: params,
|
||||
};
|
||||
}
|
||||
|
||||
alterColumnQueries(tableName: string, fields: Record<string, string>) {
|
||||
const fieldsNoType = [];
|
||||
for (const n in fields) {
|
||||
if (!fields.hasOwnProperty(n)) continue;
|
||||
fieldsNoType.push(n);
|
||||
}
|
||||
|
||||
const fieldsWithType = [];
|
||||
for (const n in fields) {
|
||||
if (!fields.hasOwnProperty(n)) continue;
|
||||
fieldsWithType.push(`${this.escapeField(n)} ${fields[n]}`);
|
||||
}
|
||||
|
||||
let sql = `
|
||||
CREATE TEMPORARY TABLE _BACKUP_TABLE_NAME_(_FIELDS_TYPE_);
|
||||
INSERT INTO _BACKUP_TABLE_NAME_ SELECT _FIELDS_NO_TYPE_ FROM _TABLE_NAME_;
|
||||
DROP TABLE _TABLE_NAME_;
|
||||
CREATE TABLE _TABLE_NAME_(_FIELDS_TYPE_);
|
||||
INSERT INTO _TABLE_NAME_ SELECT _FIELDS_NO_TYPE_ FROM _BACKUP_TABLE_NAME_;
|
||||
DROP TABLE _BACKUP_TABLE_NAME_;
|
||||
`;
|
||||
|
||||
sql = sql.replace(/_BACKUP_TABLE_NAME_/g, this.escapeField(`${tableName}_backup`));
|
||||
sql = sql.replace(/_TABLE_NAME_/g, this.escapeField(tableName));
|
||||
sql = sql.replace(/_FIELDS_NO_TYPE_/g, (this.escapeFields(fieldsNoType) as string[]).join(','));
|
||||
sql = sql.replace(/_FIELDS_TYPE_/g, fieldsWithType.join(','));
|
||||
|
||||
return sql.trim().split('\n');
|
||||
}
|
||||
|
||||
wrapQueries(queries: any[]) {
|
||||
const output = [];
|
||||
for (let i = 0; i < queries.length; i++) {
|
||||
output.push(this.wrapQuery(queries[i]));
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
wrapQuery(sql: any, params: SqlParams = null): SqlQuery {
|
||||
if (!sql) throw new Error(`Cannot wrap empty string: ${sql}`);
|
||||
|
||||
if (Array.isArray(sql)) {
|
||||
return {
|
||||
sql: sql[0],
|
||||
params: sql.length >= 2 ? sql[1] : null,
|
||||
};
|
||||
} else if (typeof sql === 'string') {
|
||||
return { sql: sql, params: params };
|
||||
} else {
|
||||
return sql; // Already wrapped
|
||||
}
|
||||
}
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
import JoplinServerApi from './JoplinServerApi2';
|
||||
import JoplinServerApi from './JoplinServerApi';
|
||||
const { dirname, basename } = require('./path-utils');
|
||||
|
||||
function removeTrailingColon(path: string) {
|
||||
|
@ -198,7 +198,7 @@ class FileApiDriverOneDrive {
|
||||
|
||||
async clearRoot() {
|
||||
const recurseItems = async (path) => {
|
||||
const result = await this.list(this.fileApi_.fullPath_(path));
|
||||
const result = await this.list(this.fileApi_.fullPath(path));
|
||||
const output = [];
|
||||
|
||||
for (const item of result.items) {
|
||||
@ -206,7 +206,7 @@ class FileApiDriverOneDrive {
|
||||
if (item.isDir) {
|
||||
await recurseItems(fullPath);
|
||||
}
|
||||
await this.delete(this.fileApi_.fullPath_(fullPath));
|
||||
await this.delete(this.fileApi_.fullPath(fullPath));
|
||||
}
|
||||
|
||||
return output;
|
||||
|
@ -1,452 +1,429 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.basicDelta = exports.FileApi = void 0;
|
||||
const Logger_1 = require("./Logger");
|
||||
const shim_1 = require("./shim");
|
||||
const BaseItem_1 = require("./models/BaseItem");
|
||||
const time_1 = require("./time");
|
||||
const { isHidden } = require('./path-utils');
|
||||
const Logger = require('./Logger').default;
|
||||
const shim = require('./shim').default;
|
||||
const BaseItem = require('./models/BaseItem').default;
|
||||
const JoplinError = require('./JoplinError');
|
||||
const ArrayUtils = require('./ArrayUtils');
|
||||
const time = require('./time').default;
|
||||
const { sprintf } = require('sprintf-js');
|
||||
const Mutex = require('async-mutex').Mutex;
|
||||
|
||||
const logger = Logger.create('FileApi');
|
||||
|
||||
const logger = Logger_1.default.create('FileApi');
|
||||
function requestCanBeRepeated(error) {
|
||||
const errorCode = typeof error === 'object' && error.code ? error.code : null;
|
||||
|
||||
// The target is explicitely rejecting the item so repeating wouldn't make a difference.
|
||||
if (errorCode === 'rejectedByTarget') return false;
|
||||
|
||||
// We don't repeat failSafe errors because it's an indication of an issue at the
|
||||
// server-level issue which usually cannot be fixed by repeating the request.
|
||||
// Also we print the previous requests and responses to the log in this case,
|
||||
// so not repeating means there will be less noise in the log.
|
||||
if (errorCode === 'failSafe') return false;
|
||||
|
||||
return true;
|
||||
const errorCode = typeof error === 'object' && error.code ? error.code : null;
|
||||
// The target is explicitely rejecting the item so repeating wouldn't make a difference.
|
||||
if (errorCode === 'rejectedByTarget')
|
||||
return false;
|
||||
// We don't repeat failSafe errors because it's an indication of an issue at the
|
||||
// server-level issue which usually cannot be fixed by repeating the request.
|
||||
// Also we print the previous requests and responses to the log in this case,
|
||||
// so not repeating means there will be less noise in the log.
|
||||
if (errorCode === 'failSafe')
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
async function tryAndRepeat(fn, count) {
|
||||
let retryCount = 0;
|
||||
|
||||
// Don't use internal fetch retry mechanim since we
|
||||
// are already retrying here.
|
||||
const shimFetchMaxRetryPrevious = shim.fetchMaxRetrySet(0);
|
||||
const defer = () => {
|
||||
shim.fetchMaxRetrySet(shimFetchMaxRetryPrevious);
|
||||
};
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
const result = await fn();
|
||||
defer();
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (retryCount >= count || !requestCanBeRepeated(error)) {
|
||||
defer();
|
||||
throw error;
|
||||
}
|
||||
retryCount++;
|
||||
await time.sleep(1 + retryCount * 3);
|
||||
}
|
||||
}
|
||||
function tryAndRepeat(fn, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let retryCount = 0;
|
||||
// Don't use internal fetch retry mechanim since we
|
||||
// are already retrying here.
|
||||
const shimFetchMaxRetryPrevious = shim_1.default.fetchMaxRetrySet(0);
|
||||
const defer = () => {
|
||||
shim_1.default.fetchMaxRetrySet(shimFetchMaxRetryPrevious);
|
||||
};
|
||||
while (true) {
|
||||
try {
|
||||
const result = yield fn();
|
||||
defer();
|
||||
return result;
|
||||
}
|
||||
catch (error) {
|
||||
if (retryCount >= count || !requestCanBeRepeated(error)) {
|
||||
defer();
|
||||
throw error;
|
||||
}
|
||||
retryCount++;
|
||||
yield time_1.default.sleep(1 + retryCount * 3);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
class FileApi {
|
||||
constructor(baseDir, driver) {
|
||||
this.baseDir_ = baseDir;
|
||||
this.driver_ = driver;
|
||||
this.logger_ = new Logger();
|
||||
this.syncTargetId_ = null;
|
||||
this.tempDirName_ = null;
|
||||
this.driver_.fileApi_ = this;
|
||||
this.requestRepeatCount_ = null; // For testing purpose only - normally this value should come from the driver
|
||||
this.remoteDateOffset_ = 0;
|
||||
this.remoteDateNextCheckTime_ = 0;
|
||||
this.remoteDateMutex_ = new Mutex();
|
||||
this.initialized_ = false;
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
if (this.initialized_) return;
|
||||
this.initialized_ = true;
|
||||
if (this.driver_.initialize) return this.driver_.initialize(this.fullPath_(''));
|
||||
}
|
||||
|
||||
async fetchRemoteDateOffset_() {
|
||||
const tempFile = `${this.tempDirName()}/timeCheck${Math.round(Math.random() * 1000000)}.txt`;
|
||||
const startTime = Date.now();
|
||||
await this.put(tempFile, 'timeCheck');
|
||||
|
||||
// Normally it should be possible to read the file back immediately but
|
||||
// just in case, read it in a loop.
|
||||
const loopStartTime = Date.now();
|
||||
let stat = null;
|
||||
while (Date.now() - loopStartTime < 5000) {
|
||||
stat = await this.stat(tempFile);
|
||||
if (stat) break;
|
||||
await time.msleep(200);
|
||||
}
|
||||
|
||||
if (!stat) throw new Error('Timed out trying to get sync target clock time');
|
||||
|
||||
this.delete(tempFile); // No need to await for this call
|
||||
|
||||
const endTime = Date.now();
|
||||
const expectedTime = Math.round((endTime + startTime) / 2);
|
||||
return stat.updated_time - expectedTime;
|
||||
}
|
||||
|
||||
// Approximates the current time on the sync target. It caches the time offset to
|
||||
// improve performance.
|
||||
async remoteDate() {
|
||||
const shouldSyncTime = () => {
|
||||
return !this.remoteDateNextCheckTime_ || Date.now() > this.remoteDateNextCheckTime_;
|
||||
};
|
||||
|
||||
if (shouldSyncTime()) {
|
||||
const release = await this.remoteDateMutex_.acquire();
|
||||
|
||||
try {
|
||||
// Another call might have refreshed the time while we were waiting for the mutex,
|
||||
// so check again if we need to refresh.
|
||||
if (shouldSyncTime()) {
|
||||
this.remoteDateOffset_ = await this.fetchRemoteDateOffset_();
|
||||
// The sync target clock should rarely change but the device one might,
|
||||
// so we need to refresh relatively frequently.
|
||||
this.remoteDateNextCheckTime_ = Date.now() + 10 * 60 * 1000;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Could not retrieve remote date - defaulting to device date:', error);
|
||||
this.remoteDateOffset_ = 0;
|
||||
this.remoteDateNextCheckTime_ = Date.now() + 60 * 1000;
|
||||
} finally {
|
||||
release();
|
||||
}
|
||||
}
|
||||
|
||||
return new Date(Date.now() + this.remoteDateOffset_);
|
||||
}
|
||||
|
||||
// Ideally all requests repeating should be done at the FileApi level to remove duplicate code in the drivers, but
|
||||
// historically some drivers (eg. OneDrive) are already handling request repeating, so this is optional, per driver,
|
||||
// and it defaults to no repeating.
|
||||
requestRepeatCount() {
|
||||
if (this.requestRepeatCount_ !== null) return this.requestRepeatCount_;
|
||||
if (this.driver_.requestRepeatCount) return this.driver_.requestRepeatCount();
|
||||
return 0;
|
||||
}
|
||||
|
||||
lastRequests() {
|
||||
return this.driver_.lastRequests ? this.driver_.lastRequests() : [];
|
||||
}
|
||||
|
||||
clearLastRequests() {
|
||||
if (this.driver_.clearLastRequests) this.driver_.clearLastRequests();
|
||||
}
|
||||
|
||||
baseDir() {
|
||||
return typeof this.baseDir_ === 'function' ? this.baseDir_() : this.baseDir_;
|
||||
}
|
||||
|
||||
tempDirName() {
|
||||
if (this.tempDirName_ === null) throw Error('Temp dir not set!');
|
||||
return this.tempDirName_;
|
||||
}
|
||||
|
||||
setTempDirName(v) {
|
||||
this.tempDirName_ = v;
|
||||
}
|
||||
|
||||
fsDriver() {
|
||||
return shim.fsDriver();
|
||||
}
|
||||
|
||||
driver() {
|
||||
return this.driver_;
|
||||
}
|
||||
|
||||
setSyncTargetId(v) {
|
||||
this.syncTargetId_ = v;
|
||||
}
|
||||
|
||||
syncTargetId() {
|
||||
if (this.syncTargetId_ === null) throw new Error('syncTargetId has not been set!!');
|
||||
return this.syncTargetId_;
|
||||
}
|
||||
|
||||
setLogger(l) {
|
||||
if (!l) l = new Logger();
|
||||
this.logger_ = l;
|
||||
}
|
||||
|
||||
logger() {
|
||||
return this.logger_;
|
||||
}
|
||||
|
||||
fullPath_(path) {
|
||||
const output = [];
|
||||
if (this.baseDir()) output.push(this.baseDir());
|
||||
if (path) output.push(path);
|
||||
return output.join('/');
|
||||
}
|
||||
|
||||
// DRIVER MUST RETURN PATHS RELATIVE TO `path`
|
||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||
async list(path = '', options = null) {
|
||||
if (!options) options = {};
|
||||
if (!('includeHidden' in options)) options.includeHidden = false;
|
||||
if (!('context' in options)) options.context = null;
|
||||
if (!('includeDirs' in options)) options.includeDirs = true;
|
||||
if (!('syncItemsOnly' in options)) options.syncItemsOnly = false;
|
||||
|
||||
logger.debug(`list ${this.baseDir()}`);
|
||||
|
||||
const result = await tryAndRepeat(() => this.driver_.list(this.fullPath_(path), options), this.requestRepeatCount());
|
||||
|
||||
if (!options.includeHidden) {
|
||||
const temp = [];
|
||||
for (let i = 0; i < result.items.length; i++) {
|
||||
if (!isHidden(result.items[i].path)) temp.push(result.items[i]);
|
||||
}
|
||||
result.items = temp;
|
||||
}
|
||||
|
||||
if (!options.includeDirs) {
|
||||
result.items = result.items.filter(f => !f.isDir);
|
||||
}
|
||||
|
||||
if (options.syncItemsOnly) {
|
||||
result.items = result.items.filter(f => !f.isDir && BaseItem.isSystemPath(f.path));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Deprectated
|
||||
setTimestamp(path, timestampMs) {
|
||||
logger.debug(`setTimestamp ${this.fullPath_(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.setTimestamp(this.fullPath_(path), timestampMs), this.requestRepeatCount());
|
||||
// return this.driver_.setTimestamp(this.fullPath_(path), timestampMs);
|
||||
}
|
||||
|
||||
mkdir(path) {
|
||||
logger.debug(`mkdir ${this.fullPath_(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.mkdir(this.fullPath_(path)), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
async stat(path) {
|
||||
logger.debug(`stat ${this.fullPath_(path)}`);
|
||||
|
||||
const output = await tryAndRepeat(() => this.driver_.stat(this.fullPath_(path)), this.requestRepeatCount());
|
||||
|
||||
if (!output) return output;
|
||||
output.path = path;
|
||||
return output;
|
||||
|
||||
// return this.driver_.stat(this.fullPath_(path)).then((output) => {
|
||||
// if (!output) return output;
|
||||
// output.path = path;
|
||||
// return output;
|
||||
// });
|
||||
}
|
||||
|
||||
// Returns UTF-8 encoded string by default, or a Response if `options.target = 'file'`
|
||||
get(path, options = null) {
|
||||
if (!options) options = {};
|
||||
if (!options.encoding) options.encoding = 'utf8';
|
||||
logger.debug(`get ${this.fullPath_(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.get(this.fullPath_(path), options), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
async put(path, content, options = null) {
|
||||
logger.debug(`put ${this.fullPath_(path)}`, options);
|
||||
|
||||
if (options && options.source === 'file') {
|
||||
if (!(await this.fsDriver().exists(options.path))) throw new JoplinError(`File not found: ${options.path}`, 'fileNotFound');
|
||||
}
|
||||
|
||||
return tryAndRepeat(() => this.driver_.put(this.fullPath_(path), content, options), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
delete(path) {
|
||||
logger.debug(`delete ${this.fullPath_(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.delete(this.fullPath_(path)), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
// Deprectated
|
||||
move(oldPath, newPath) {
|
||||
logger.debug(`move ${this.fullPath_(oldPath)} => ${this.fullPath_(newPath)}`);
|
||||
return tryAndRepeat(() => this.driver_.move(this.fullPath_(oldPath), this.fullPath_(newPath)), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
// Deprectated
|
||||
format() {
|
||||
return tryAndRepeat(() => this.driver_.format(), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
clearRoot() {
|
||||
return tryAndRepeat(() => this.driver_.clearRoot(this.baseDir()), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
delta(path, options = null) {
|
||||
logger.debug(`delta ${this.fullPath_(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.delta(this.fullPath_(path), options), this.requestRepeatCount());
|
||||
}
|
||||
constructor(baseDir, driver) {
|
||||
this.logger_ = new Logger_1.default();
|
||||
this.syncTargetId_ = null;
|
||||
this.tempDirName_ = null;
|
||||
this.requestRepeatCount_ = null; // For testing purpose only - normally this value should come from the driver
|
||||
this.remoteDateOffset_ = 0;
|
||||
this.remoteDateNextCheckTime_ = 0;
|
||||
this.remoteDateMutex_ = new Mutex();
|
||||
this.initialized_ = false;
|
||||
this.baseDir_ = baseDir;
|
||||
this.driver_ = driver;
|
||||
this.driver_.fileApi_ = this;
|
||||
}
|
||||
initialize() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (this.initialized_)
|
||||
return;
|
||||
this.initialized_ = true;
|
||||
if (this.driver_.initialize)
|
||||
return this.driver_.initialize(this.fullPath(''));
|
||||
});
|
||||
}
|
||||
fetchRemoteDateOffset_() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const tempFile = `${this.tempDirName()}/timeCheck${Math.round(Math.random() * 1000000)}.txt`;
|
||||
const startTime = Date.now();
|
||||
yield this.put(tempFile, 'timeCheck');
|
||||
// Normally it should be possible to read the file back immediately but
|
||||
// just in case, read it in a loop.
|
||||
const loopStartTime = Date.now();
|
||||
let stat = null;
|
||||
while (Date.now() - loopStartTime < 5000) {
|
||||
stat = yield this.stat(tempFile);
|
||||
if (stat)
|
||||
break;
|
||||
yield time_1.default.msleep(200);
|
||||
}
|
||||
if (!stat)
|
||||
throw new Error('Timed out trying to get sync target clock time');
|
||||
void this.delete(tempFile); // No need to await for this call
|
||||
const endTime = Date.now();
|
||||
const expectedTime = Math.round((endTime + startTime) / 2);
|
||||
return stat.updated_time - expectedTime;
|
||||
});
|
||||
}
|
||||
// Approximates the current time on the sync target. It caches the time offset to
|
||||
// improve performance.
|
||||
remoteDate() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const shouldSyncTime = () => {
|
||||
return !this.remoteDateNextCheckTime_ || Date.now() > this.remoteDateNextCheckTime_;
|
||||
};
|
||||
if (shouldSyncTime()) {
|
||||
const release = yield this.remoteDateMutex_.acquire();
|
||||
try {
|
||||
// Another call might have refreshed the time while we were waiting for the mutex,
|
||||
// so check again if we need to refresh.
|
||||
if (shouldSyncTime()) {
|
||||
this.remoteDateOffset_ = yield this.fetchRemoteDateOffset_();
|
||||
// The sync target clock should rarely change but the device one might,
|
||||
// so we need to refresh relatively frequently.
|
||||
this.remoteDateNextCheckTime_ = Date.now() + 10 * 60 * 1000;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
logger.warn('Could not retrieve remote date - defaulting to device date:', error);
|
||||
this.remoteDateOffset_ = 0;
|
||||
this.remoteDateNextCheckTime_ = Date.now() + 60 * 1000;
|
||||
}
|
||||
finally {
|
||||
release();
|
||||
}
|
||||
}
|
||||
return new Date(Date.now() + this.remoteDateOffset_);
|
||||
});
|
||||
}
|
||||
// Ideally all requests repeating should be done at the FileApi level to remove duplicate code in the drivers, but
|
||||
// historically some drivers (eg. OneDrive) are already handling request repeating, so this is optional, per driver,
|
||||
// and it defaults to no repeating.
|
||||
requestRepeatCount() {
|
||||
if (this.requestRepeatCount_ !== null)
|
||||
return this.requestRepeatCount_;
|
||||
if (this.driver_.requestRepeatCount)
|
||||
return this.driver_.requestRepeatCount();
|
||||
return 0;
|
||||
}
|
||||
lastRequests() {
|
||||
return this.driver_.lastRequests ? this.driver_.lastRequests() : [];
|
||||
}
|
||||
clearLastRequests() {
|
||||
if (this.driver_.clearLastRequests)
|
||||
this.driver_.clearLastRequests();
|
||||
}
|
||||
baseDir() {
|
||||
return typeof this.baseDir_ === 'function' ? this.baseDir_() : this.baseDir_;
|
||||
}
|
||||
tempDirName() {
|
||||
if (this.tempDirName_ === null)
|
||||
throw Error('Temp dir not set!');
|
||||
return this.tempDirName_;
|
||||
}
|
||||
setTempDirName(v) {
|
||||
this.tempDirName_ = v;
|
||||
}
|
||||
fsDriver() {
|
||||
return shim_1.default.fsDriver();
|
||||
}
|
||||
driver() {
|
||||
return this.driver_;
|
||||
}
|
||||
setSyncTargetId(v) {
|
||||
this.syncTargetId_ = v;
|
||||
}
|
||||
syncTargetId() {
|
||||
if (this.syncTargetId_ === null)
|
||||
throw new Error('syncTargetId has not been set!!');
|
||||
return this.syncTargetId_;
|
||||
}
|
||||
setLogger(l) {
|
||||
if (!l)
|
||||
l = new Logger_1.default();
|
||||
this.logger_ = l;
|
||||
}
|
||||
logger() {
|
||||
return this.logger_;
|
||||
}
|
||||
fullPath(path) {
|
||||
const output = [];
|
||||
if (this.baseDir())
|
||||
output.push(this.baseDir());
|
||||
if (path)
|
||||
output.push(path);
|
||||
return output.join('/');
|
||||
}
|
||||
// DRIVER MUST RETURN PATHS RELATIVE TO `path`
|
||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||
list(path = '', options = null) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!options)
|
||||
options = {};
|
||||
if (!('includeHidden' in options))
|
||||
options.includeHidden = false;
|
||||
if (!('context' in options))
|
||||
options.context = null;
|
||||
if (!('includeDirs' in options))
|
||||
options.includeDirs = true;
|
||||
if (!('syncItemsOnly' in options))
|
||||
options.syncItemsOnly = false;
|
||||
logger.debug(`list ${this.baseDir()}`);
|
||||
const result = yield tryAndRepeat(() => this.driver_.list(this.fullPath(path), options), this.requestRepeatCount());
|
||||
if (!options.includeHidden) {
|
||||
const temp = [];
|
||||
for (let i = 0; i < result.items.length; i++) {
|
||||
if (!isHidden(result.items[i].path))
|
||||
temp.push(result.items[i]);
|
||||
}
|
||||
result.items = temp;
|
||||
}
|
||||
if (!options.includeDirs) {
|
||||
result.items = result.items.filter((f) => !f.isDir);
|
||||
}
|
||||
if (options.syncItemsOnly) {
|
||||
result.items = result.items.filter((f) => !f.isDir && BaseItem_1.default.isSystemPath(f.path));
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
// Deprectated
|
||||
setTimestamp(path, timestampMs) {
|
||||
logger.debug(`setTimestamp ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.setTimestamp(this.fullPath(path), timestampMs), this.requestRepeatCount());
|
||||
// return this.driver_.setTimestamp(this.fullPath(path), timestampMs);
|
||||
}
|
||||
mkdir(path) {
|
||||
logger.debug(`mkdir ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.mkdir(this.fullPath(path)), this.requestRepeatCount());
|
||||
}
|
||||
stat(path) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
logger.debug(`stat ${this.fullPath(path)}`);
|
||||
const output = yield tryAndRepeat(() => this.driver_.stat(this.fullPath(path)), this.requestRepeatCount());
|
||||
if (!output)
|
||||
return output;
|
||||
output.path = path;
|
||||
return output;
|
||||
// return this.driver_.stat(this.fullPath(path)).then((output) => {
|
||||
// if (!output) return output;
|
||||
// output.path = path;
|
||||
// return output;
|
||||
// });
|
||||
});
|
||||
}
|
||||
// Returns UTF-8 encoded string by default, or a Response if `options.target = 'file'`
|
||||
get(path, options = null) {
|
||||
if (!options)
|
||||
options = {};
|
||||
if (!options.encoding)
|
||||
options.encoding = 'utf8';
|
||||
logger.debug(`get ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.get(this.fullPath(path), options), this.requestRepeatCount());
|
||||
}
|
||||
put(path, content, options = null) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
logger.debug(`put ${this.fullPath(path)}`, options);
|
||||
if (options && options.source === 'file') {
|
||||
if (!(yield this.fsDriver().exists(options.path)))
|
||||
throw new JoplinError(`File not found: ${options.path}`, 'fileNotFound');
|
||||
}
|
||||
return tryAndRepeat(() => this.driver_.put(this.fullPath(path), content, options), this.requestRepeatCount());
|
||||
});
|
||||
}
|
||||
delete(path) {
|
||||
logger.debug(`delete ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.delete(this.fullPath(path)), this.requestRepeatCount());
|
||||
}
|
||||
// Deprectated
|
||||
move(oldPath, newPath) {
|
||||
logger.debug(`move ${this.fullPath(oldPath)} => ${this.fullPath(newPath)}`);
|
||||
return tryAndRepeat(() => this.driver_.move(this.fullPath(oldPath), this.fullPath(newPath)), this.requestRepeatCount());
|
||||
}
|
||||
// Deprectated
|
||||
format() {
|
||||
return tryAndRepeat(() => this.driver_.format(), this.requestRepeatCount());
|
||||
}
|
||||
clearRoot() {
|
||||
return tryAndRepeat(() => this.driver_.clearRoot(this.baseDir()), this.requestRepeatCount());
|
||||
}
|
||||
delta(path, options = null) {
|
||||
logger.debug(`delta ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.delta(this.fullPath(path), options), this.requestRepeatCount());
|
||||
}
|
||||
}
|
||||
|
||||
exports.FileApi = FileApi;
|
||||
function basicDeltaContextFromOptions_(options) {
|
||||
const output = {
|
||||
timestamp: 0,
|
||||
filesAtTimestamp: [],
|
||||
statsCache: null,
|
||||
statIdsCache: null,
|
||||
deletedItemsProcessed: false,
|
||||
};
|
||||
|
||||
if (!options || !options.context) return output;
|
||||
|
||||
const d = new Date(options.context.timestamp);
|
||||
|
||||
output.timestamp = isNaN(d.getTime()) ? 0 : options.context.timestamp;
|
||||
output.filesAtTimestamp = Array.isArray(options.context.filesAtTimestamp) ? options.context.filesAtTimestamp.slice() : [];
|
||||
output.statsCache = options.context && options.context.statsCache ? options.context.statsCache : null;
|
||||
output.statIdsCache = options.context && options.context.statIdsCache ? options.context.statIdsCache : null;
|
||||
output.deletedItemsProcessed = options.context && 'deletedItemsProcessed' in options.context ? options.context.deletedItemsProcessed : false;
|
||||
|
||||
return output;
|
||||
const output = {
|
||||
timestamp: 0,
|
||||
filesAtTimestamp: [],
|
||||
statsCache: null,
|
||||
statIdsCache: null,
|
||||
deletedItemsProcessed: false,
|
||||
};
|
||||
if (!options || !options.context)
|
||||
return output;
|
||||
const d = new Date(options.context.timestamp);
|
||||
output.timestamp = isNaN(d.getTime()) ? 0 : options.context.timestamp;
|
||||
output.filesAtTimestamp = Array.isArray(options.context.filesAtTimestamp) ? options.context.filesAtTimestamp.slice() : [];
|
||||
output.statsCache = options.context && options.context.statsCache ? options.context.statsCache : null;
|
||||
output.statIdsCache = options.context && options.context.statIdsCache ? options.context.statIdsCache : null;
|
||||
output.deletedItemsProcessed = options.context && 'deletedItemsProcessed' in options.context ? options.context.deletedItemsProcessed : false;
|
||||
return output;
|
||||
}
|
||||
|
||||
// This is the basic delta algorithm, which can be used in case the cloud service does not have
|
||||
// a built-in delta API. OneDrive and Dropbox have one for example, but Nextcloud and obviously
|
||||
// the file system do not.
|
||||
async function basicDelta(path, getDirStatFn, options) {
|
||||
const outputLimit = 50;
|
||||
const itemIds = await options.allItemIdsHandler();
|
||||
if (!Array.isArray(itemIds)) throw new Error('Delta API not supported - local IDs must be provided');
|
||||
|
||||
const logger = options && options.logger ? options.logger : new Logger();
|
||||
|
||||
const context = basicDeltaContextFromOptions_(options);
|
||||
|
||||
if (context.timestamp > Date.now()) {
|
||||
logger.warn(`BasicDelta: Context timestamp is greater than current time: ${context.timestamp}`);
|
||||
logger.warn('BasicDelta: Sync will continue but it is likely that nothing will be synced');
|
||||
}
|
||||
|
||||
const newContext = {
|
||||
timestamp: context.timestamp,
|
||||
filesAtTimestamp: context.filesAtTimestamp.slice(),
|
||||
statsCache: context.statsCache,
|
||||
statIdsCache: context.statIdsCache,
|
||||
deletedItemsProcessed: context.deletedItemsProcessed,
|
||||
};
|
||||
|
||||
// Stats are cached until all items have been processed (until hasMore is false)
|
||||
if (newContext.statsCache === null) {
|
||||
newContext.statsCache = await getDirStatFn(path);
|
||||
newContext.statsCache.sort(function(a, b) {
|
||||
return a.updated_time - b.updated_time;
|
||||
});
|
||||
newContext.statIdsCache = newContext.statsCache.filter(item => BaseItem.isSystemPath(item.path)).map(item => BaseItem.pathToId(item.path));
|
||||
newContext.statIdsCache.sort(); // Items must be sorted to use binary search below
|
||||
}
|
||||
|
||||
let output = [];
|
||||
|
||||
const updateReport = {
|
||||
timestamp: context.timestamp,
|
||||
older: 0,
|
||||
newer: 0,
|
||||
equal: 0,
|
||||
};
|
||||
|
||||
// Find out which files have been changed since the last time. Note that we keep
|
||||
// both the timestamp of the most recent change, *and* the items that exactly match
|
||||
// this timestamp. This to handle cases where an item is modified while this delta
|
||||
// function is running. For example:
|
||||
// t0: Item 1 is changed
|
||||
// t0: Sync items - run delta function
|
||||
// t0: While delta() is running, modify Item 2
|
||||
// Since item 2 was modified within the same millisecond, it would be skipped in the
|
||||
// next sync if we relied exclusively on a timestamp.
|
||||
for (let i = 0; i < newContext.statsCache.length; i++) {
|
||||
const stat = newContext.statsCache[i];
|
||||
|
||||
if (stat.isDir) continue;
|
||||
|
||||
if (stat.updated_time < context.timestamp) {
|
||||
updateReport.older++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Special case for items that exactly match the timestamp
|
||||
if (stat.updated_time === context.timestamp) {
|
||||
if (context.filesAtTimestamp.indexOf(stat.path) >= 0) {
|
||||
updateReport.equal++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (stat.updated_time > newContext.timestamp) {
|
||||
newContext.timestamp = stat.updated_time;
|
||||
newContext.filesAtTimestamp = [];
|
||||
updateReport.newer++;
|
||||
}
|
||||
|
||||
newContext.filesAtTimestamp.push(stat.path);
|
||||
output.push(stat);
|
||||
|
||||
if (output.length >= outputLimit) break;
|
||||
}
|
||||
|
||||
logger.info(`BasicDelta: Report: ${JSON.stringify(updateReport)}`);
|
||||
|
||||
if (!newContext.deletedItemsProcessed) {
|
||||
// Find out which items have been deleted on the sync target by comparing the items
|
||||
// we have to the items on the target.
|
||||
// Note that when deleted items are processed it might result in the output having
|
||||
// more items than outputLimit. This is acceptable since delete operations are cheap.
|
||||
const deletedItems = [];
|
||||
for (let i = 0; i < itemIds.length; i++) {
|
||||
const itemId = itemIds[i];
|
||||
|
||||
if (ArrayUtils.binarySearch(newContext.statIdsCache, itemId) < 0) {
|
||||
deletedItems.push({
|
||||
path: BaseItem.systemPath(itemId),
|
||||
isDeleted: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const percentDeleted = itemIds.length ? deletedItems.length / itemIds.length : 0;
|
||||
|
||||
// If more than 90% of the notes are going to be deleted, it's most likely a
|
||||
// configuration error or bug. For example, if the user moves their Nextcloud
|
||||
// directory, or if a network drive gets disconnected and returns an empty dir
|
||||
// instead of an error. In that case, we don't wipe out the user data, unless
|
||||
// they have switched off the fail-safe.
|
||||
if (options.wipeOutFailSafe && percentDeleted >= 0.90) throw new JoplinError(sprintf('Fail-safe: Sync was interrupted because %d%% of the data (%d items) is about to be deleted. To override this behaviour disable the fail-safe in the sync settings.', Math.round(percentDeleted * 100), deletedItems.length), 'failSafe');
|
||||
|
||||
output = output.concat(deletedItems);
|
||||
}
|
||||
|
||||
newContext.deletedItemsProcessed = true;
|
||||
|
||||
const hasMore = output.length >= outputLimit;
|
||||
|
||||
if (!hasMore) {
|
||||
// Clear temporary info from context. It's especially important to remove deletedItemsProcessed
|
||||
// so that they are processed again on the next sync.
|
||||
newContext.statsCache = null;
|
||||
newContext.statIdsCache = null;
|
||||
delete newContext.deletedItemsProcessed;
|
||||
}
|
||||
|
||||
return {
|
||||
hasMore: hasMore,
|
||||
context: newContext,
|
||||
items: output,
|
||||
};
|
||||
function basicDelta(path, getDirStatFn, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const outputLimit = 50;
|
||||
const itemIds = yield options.allItemIdsHandler();
|
||||
if (!Array.isArray(itemIds))
|
||||
throw new Error('Delta API not supported - local IDs must be provided');
|
||||
const logger = options && options.logger ? options.logger : new Logger_1.default();
|
||||
const context = basicDeltaContextFromOptions_(options);
|
||||
if (context.timestamp > Date.now()) {
|
||||
logger.warn(`BasicDelta: Context timestamp is greater than current time: ${context.timestamp}`);
|
||||
logger.warn('BasicDelta: Sync will continue but it is likely that nothing will be synced');
|
||||
}
|
||||
const newContext = {
|
||||
timestamp: context.timestamp,
|
||||
filesAtTimestamp: context.filesAtTimestamp.slice(),
|
||||
statsCache: context.statsCache,
|
||||
statIdsCache: context.statIdsCache,
|
||||
deletedItemsProcessed: context.deletedItemsProcessed,
|
||||
};
|
||||
// Stats are cached until all items have been processed (until hasMore is false)
|
||||
if (newContext.statsCache === null) {
|
||||
newContext.statsCache = yield getDirStatFn(path);
|
||||
newContext.statsCache.sort(function (a, b) {
|
||||
return a.updated_time - b.updated_time;
|
||||
});
|
||||
newContext.statIdsCache = newContext.statsCache.filter((item) => BaseItem_1.default.isSystemPath(item.path)).map((item) => BaseItem_1.default.pathToId(item.path));
|
||||
newContext.statIdsCache.sort(); // Items must be sorted to use binary search below
|
||||
}
|
||||
let output = [];
|
||||
const updateReport = {
|
||||
timestamp: context.timestamp,
|
||||
older: 0,
|
||||
newer: 0,
|
||||
equal: 0,
|
||||
};
|
||||
// Find out which files have been changed since the last time. Note that we keep
|
||||
// both the timestamp of the most recent change, *and* the items that exactly match
|
||||
// this timestamp. This to handle cases where an item is modified while this delta
|
||||
// function is running. For example:
|
||||
// t0: Item 1 is changed
|
||||
// t0: Sync items - run delta function
|
||||
// t0: While delta() is running, modify Item 2
|
||||
// Since item 2 was modified within the same millisecond, it would be skipped in the
|
||||
// next sync if we relied exclusively on a timestamp.
|
||||
for (let i = 0; i < newContext.statsCache.length; i++) {
|
||||
const stat = newContext.statsCache[i];
|
||||
if (stat.isDir)
|
||||
continue;
|
||||
if (stat.updated_time < context.timestamp) {
|
||||
updateReport.older++;
|
||||
continue;
|
||||
}
|
||||
// Special case for items that exactly match the timestamp
|
||||
if (stat.updated_time === context.timestamp) {
|
||||
if (context.filesAtTimestamp.indexOf(stat.path) >= 0) {
|
||||
updateReport.equal++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (stat.updated_time > newContext.timestamp) {
|
||||
newContext.timestamp = stat.updated_time;
|
||||
newContext.filesAtTimestamp = [];
|
||||
updateReport.newer++;
|
||||
}
|
||||
newContext.filesAtTimestamp.push(stat.path);
|
||||
output.push(stat);
|
||||
if (output.length >= outputLimit)
|
||||
break;
|
||||
}
|
||||
logger.info(`BasicDelta: Report: ${JSON.stringify(updateReport)}`);
|
||||
if (!newContext.deletedItemsProcessed) {
|
||||
// Find out which items have been deleted on the sync target by comparing the items
|
||||
// we have to the items on the target.
|
||||
// Note that when deleted items are processed it might result in the output having
|
||||
// more items than outputLimit. This is acceptable since delete operations are cheap.
|
||||
const deletedItems = [];
|
||||
for (let i = 0; i < itemIds.length; i++) {
|
||||
const itemId = itemIds[i];
|
||||
if (ArrayUtils.binarySearch(newContext.statIdsCache, itemId) < 0) {
|
||||
deletedItems.push({
|
||||
path: BaseItem_1.default.systemPath(itemId),
|
||||
isDeleted: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
const percentDeleted = itemIds.length ? deletedItems.length / itemIds.length : 0;
|
||||
// If more than 90% of the notes are going to be deleted, it's most likely a
|
||||
// configuration error or bug. For example, if the user moves their Nextcloud
|
||||
// directory, or if a network drive gets disconnected and returns an empty dir
|
||||
// instead of an error. In that case, we don't wipe out the user data, unless
|
||||
// they have switched off the fail-safe.
|
||||
if (options.wipeOutFailSafe && percentDeleted >= 0.90)
|
||||
throw new JoplinError(sprintf('Fail-safe: Sync was interrupted because %d%% of the data (%d items) is about to be deleted. To override this behaviour disable the fail-safe in the sync settings.', Math.round(percentDeleted * 100), deletedItems.length), 'failSafe');
|
||||
output = output.concat(deletedItems);
|
||||
}
|
||||
newContext.deletedItemsProcessed = true;
|
||||
const hasMore = output.length >= outputLimit;
|
||||
if (!hasMore) {
|
||||
// Clear temporary info from context. It's especially important to remove deletedItemsProcessed
|
||||
// so that they are processed again on the next sync.
|
||||
newContext.statsCache = null;
|
||||
newContext.statIdsCache = null;
|
||||
delete newContext.deletedItemsProcessed;
|
||||
}
|
||||
return {
|
||||
hasMore: hasMore,
|
||||
context: newContext,
|
||||
items: output,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { FileApi, basicDelta };
|
||||
exports.basicDelta = basicDelta;
|
||||
//# sourceMappingURL=file-api.js.map
|
457
packages/lib/file-api.ts
Normal file
457
packages/lib/file-api.ts
Normal file
@ -0,0 +1,457 @@
|
||||
import Logger from './Logger';
|
||||
import shim from './shim';
|
||||
import BaseItem from './models/BaseItem';
|
||||
import time from './time';
|
||||
|
||||
const { isHidden } = require('./path-utils');
|
||||
const JoplinError = require('./JoplinError');
|
||||
const ArrayUtils = require('./ArrayUtils');
|
||||
const { sprintf } = require('sprintf-js');
|
||||
const Mutex = require('async-mutex').Mutex;
|
||||
|
||||
const logger = Logger.create('FileApi');
|
||||
|
||||
function requestCanBeRepeated(error: any) {
|
||||
const errorCode = typeof error === 'object' && error.code ? error.code : null;
|
||||
|
||||
// The target is explicitely rejecting the item so repeating wouldn't make a difference.
|
||||
if (errorCode === 'rejectedByTarget') return false;
|
||||
|
||||
// We don't repeat failSafe errors because it's an indication of an issue at the
|
||||
// server-level issue which usually cannot be fixed by repeating the request.
|
||||
// Also we print the previous requests and responses to the log in this case,
|
||||
// so not repeating means there will be less noise in the log.
|
||||
if (errorCode === 'failSafe') return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async function tryAndRepeat(fn: Function, count: number) {
|
||||
let retryCount = 0;
|
||||
|
||||
// Don't use internal fetch retry mechanim since we
|
||||
// are already retrying here.
|
||||
const shimFetchMaxRetryPrevious = shim.fetchMaxRetrySet(0);
|
||||
const defer = () => {
|
||||
shim.fetchMaxRetrySet(shimFetchMaxRetryPrevious);
|
||||
};
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
const result = await fn();
|
||||
defer();
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (retryCount >= count || !requestCanBeRepeated(error)) {
|
||||
defer();
|
||||
throw error;
|
||||
}
|
||||
retryCount++;
|
||||
await time.sleep(1 + retryCount * 3);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class FileApi {
|
||||
|
||||
private baseDir_: any;
|
||||
private driver_: any;
|
||||
private logger_: Logger = new Logger();
|
||||
private syncTargetId_: number = null;
|
||||
private tempDirName_: string = null;
|
||||
public requestRepeatCount_: number = null; // For testing purpose only - normally this value should come from the driver
|
||||
private remoteDateOffset_ = 0;
|
||||
private remoteDateNextCheckTime_ = 0;
|
||||
private remoteDateMutex_ = new Mutex();
|
||||
private initialized_ = false;
|
||||
|
||||
constructor(baseDir: string | Function, driver: any) {
|
||||
this.baseDir_ = baseDir;
|
||||
this.driver_ = driver;
|
||||
this.driver_.fileApi_ = this;
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
if (this.initialized_) return;
|
||||
this.initialized_ = true;
|
||||
if (this.driver_.initialize) return this.driver_.initialize(this.fullPath(''));
|
||||
}
|
||||
|
||||
async fetchRemoteDateOffset_() {
|
||||
const tempFile = `${this.tempDirName()}/timeCheck${Math.round(Math.random() * 1000000)}.txt`;
|
||||
const startTime = Date.now();
|
||||
await this.put(tempFile, 'timeCheck');
|
||||
|
||||
// Normally it should be possible to read the file back immediately but
|
||||
// just in case, read it in a loop.
|
||||
const loopStartTime = Date.now();
|
||||
let stat = null;
|
||||
while (Date.now() - loopStartTime < 5000) {
|
||||
stat = await this.stat(tempFile);
|
||||
if (stat) break;
|
||||
await time.msleep(200);
|
||||
}
|
||||
|
||||
if (!stat) throw new Error('Timed out trying to get sync target clock time');
|
||||
|
||||
void this.delete(tempFile); // No need to await for this call
|
||||
|
||||
const endTime = Date.now();
|
||||
const expectedTime = Math.round((endTime + startTime) / 2);
|
||||
return stat.updated_time - expectedTime;
|
||||
}
|
||||
|
||||
// Approximates the current time on the sync target. It caches the time offset to
|
||||
// improve performance.
|
||||
async remoteDate() {
|
||||
const shouldSyncTime = () => {
|
||||
return !this.remoteDateNextCheckTime_ || Date.now() > this.remoteDateNextCheckTime_;
|
||||
};
|
||||
|
||||
if (shouldSyncTime()) {
|
||||
const release = await this.remoteDateMutex_.acquire();
|
||||
|
||||
try {
|
||||
// Another call might have refreshed the time while we were waiting for the mutex,
|
||||
// so check again if we need to refresh.
|
||||
if (shouldSyncTime()) {
|
||||
this.remoteDateOffset_ = await this.fetchRemoteDateOffset_();
|
||||
// The sync target clock should rarely change but the device one might,
|
||||
// so we need to refresh relatively frequently.
|
||||
this.remoteDateNextCheckTime_ = Date.now() + 10 * 60 * 1000;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Could not retrieve remote date - defaulting to device date:', error);
|
||||
this.remoteDateOffset_ = 0;
|
||||
this.remoteDateNextCheckTime_ = Date.now() + 60 * 1000;
|
||||
} finally {
|
||||
release();
|
||||
}
|
||||
}
|
||||
|
||||
return new Date(Date.now() + this.remoteDateOffset_);
|
||||
}
|
||||
|
||||
// Ideally all requests repeating should be done at the FileApi level to remove duplicate code in the drivers, but
|
||||
// historically some drivers (eg. OneDrive) are already handling request repeating, so this is optional, per driver,
|
||||
// and it defaults to no repeating.
|
||||
requestRepeatCount() {
|
||||
if (this.requestRepeatCount_ !== null) return this.requestRepeatCount_;
|
||||
if (this.driver_.requestRepeatCount) return this.driver_.requestRepeatCount();
|
||||
return 0;
|
||||
}
|
||||
|
||||
lastRequests() {
|
||||
return this.driver_.lastRequests ? this.driver_.lastRequests() : [];
|
||||
}
|
||||
|
||||
clearLastRequests() {
|
||||
if (this.driver_.clearLastRequests) this.driver_.clearLastRequests();
|
||||
}
|
||||
|
||||
baseDir() {
|
||||
return typeof this.baseDir_ === 'function' ? this.baseDir_() : this.baseDir_;
|
||||
}
|
||||
|
||||
tempDirName() {
|
||||
if (this.tempDirName_ === null) throw Error('Temp dir not set!');
|
||||
return this.tempDirName_;
|
||||
}
|
||||
|
||||
setTempDirName(v: string) {
|
||||
this.tempDirName_ = v;
|
||||
}
|
||||
|
||||
fsDriver() {
|
||||
return shim.fsDriver();
|
||||
}
|
||||
|
||||
driver() {
|
||||
return this.driver_;
|
||||
}
|
||||
|
||||
setSyncTargetId(v: number) {
|
||||
this.syncTargetId_ = v;
|
||||
}
|
||||
|
||||
syncTargetId() {
|
||||
if (this.syncTargetId_ === null) throw new Error('syncTargetId has not been set!!');
|
||||
return this.syncTargetId_;
|
||||
}
|
||||
|
||||
setLogger(l: Logger) {
|
||||
if (!l) l = new Logger();
|
||||
this.logger_ = l;
|
||||
}
|
||||
|
||||
logger() {
|
||||
return this.logger_;
|
||||
}
|
||||
|
||||
fullPath(path: string) {
|
||||
const output = [];
|
||||
if (this.baseDir()) output.push(this.baseDir());
|
||||
if (path) output.push(path);
|
||||
return output.join('/');
|
||||
}
|
||||
|
||||
// DRIVER MUST RETURN PATHS RELATIVE TO `path`
|
||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||
async list(path = '', options: any = null) {
|
||||
if (!options) options = {};
|
||||
if (!('includeHidden' in options)) options.includeHidden = false;
|
||||
if (!('context' in options)) options.context = null;
|
||||
if (!('includeDirs' in options)) options.includeDirs = true;
|
||||
if (!('syncItemsOnly' in options)) options.syncItemsOnly = false;
|
||||
|
||||
logger.debug(`list ${this.baseDir()}`);
|
||||
|
||||
const result = await tryAndRepeat(() => this.driver_.list(this.fullPath(path), options), this.requestRepeatCount());
|
||||
|
||||
if (!options.includeHidden) {
|
||||
const temp = [];
|
||||
for (let i = 0; i < result.items.length; i++) {
|
||||
if (!isHidden(result.items[i].path)) temp.push(result.items[i]);
|
||||
}
|
||||
result.items = temp;
|
||||
}
|
||||
|
||||
if (!options.includeDirs) {
|
||||
result.items = result.items.filter((f: any) => !f.isDir);
|
||||
}
|
||||
|
||||
if (options.syncItemsOnly) {
|
||||
result.items = result.items.filter((f: any) => !f.isDir && BaseItem.isSystemPath(f.path));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Deprectated
|
||||
setTimestamp(path: string, timestampMs: number) {
|
||||
logger.debug(`setTimestamp ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.setTimestamp(this.fullPath(path), timestampMs), this.requestRepeatCount());
|
||||
// return this.driver_.setTimestamp(this.fullPath(path), timestampMs);
|
||||
}
|
||||
|
||||
mkdir(path: string) {
|
||||
logger.debug(`mkdir ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.mkdir(this.fullPath(path)), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
async stat(path: string) {
|
||||
logger.debug(`stat ${this.fullPath(path)}`);
|
||||
|
||||
const output = await tryAndRepeat(() => this.driver_.stat(this.fullPath(path)), this.requestRepeatCount());
|
||||
|
||||
if (!output) return output;
|
||||
output.path = path;
|
||||
return output;
|
||||
|
||||
// return this.driver_.stat(this.fullPath(path)).then((output) => {
|
||||
// if (!output) return output;
|
||||
// output.path = path;
|
||||
// return output;
|
||||
// });
|
||||
}
|
||||
|
||||
// Returns UTF-8 encoded string by default, or a Response if `options.target = 'file'`
|
||||
get(path: string, options: any = null) {
|
||||
if (!options) options = {};
|
||||
if (!options.encoding) options.encoding = 'utf8';
|
||||
logger.debug(`get ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.get(this.fullPath(path), options), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
async put(path: string, content: any, options: any = null) {
|
||||
logger.debug(`put ${this.fullPath(path)}`, options);
|
||||
|
||||
if (options && options.source === 'file') {
|
||||
if (!(await this.fsDriver().exists(options.path))) throw new JoplinError(`File not found: ${options.path}`, 'fileNotFound');
|
||||
}
|
||||
|
||||
return tryAndRepeat(() => this.driver_.put(this.fullPath(path), content, options), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
delete(path: string) {
|
||||
logger.debug(`delete ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.delete(this.fullPath(path)), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
// Deprectated
|
||||
move(oldPath: string, newPath: string) {
|
||||
logger.debug(`move ${this.fullPath(oldPath)} => ${this.fullPath(newPath)}`);
|
||||
return tryAndRepeat(() => this.driver_.move(this.fullPath(oldPath), this.fullPath(newPath)), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
// Deprectated
|
||||
format() {
|
||||
return tryAndRepeat(() => this.driver_.format(), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
clearRoot() {
|
||||
return tryAndRepeat(() => this.driver_.clearRoot(this.baseDir()), this.requestRepeatCount());
|
||||
}
|
||||
|
||||
delta(path: string, options: any = null) {
|
||||
logger.debug(`delta ${this.fullPath(path)}`);
|
||||
return tryAndRepeat(() => this.driver_.delta(this.fullPath(path), options), this.requestRepeatCount());
|
||||
}
|
||||
}
|
||||
|
||||
function basicDeltaContextFromOptions_(options: any) {
|
||||
const output: any = {
|
||||
timestamp: 0,
|
||||
filesAtTimestamp: [],
|
||||
statsCache: null,
|
||||
statIdsCache: null,
|
||||
deletedItemsProcessed: false,
|
||||
};
|
||||
|
||||
if (!options || !options.context) return output;
|
||||
|
||||
const d = new Date(options.context.timestamp);
|
||||
|
||||
output.timestamp = isNaN(d.getTime()) ? 0 : options.context.timestamp;
|
||||
output.filesAtTimestamp = Array.isArray(options.context.filesAtTimestamp) ? options.context.filesAtTimestamp.slice() : [];
|
||||
output.statsCache = options.context && options.context.statsCache ? options.context.statsCache : null;
|
||||
output.statIdsCache = options.context && options.context.statIdsCache ? options.context.statIdsCache : null;
|
||||
output.deletedItemsProcessed = options.context && 'deletedItemsProcessed' in options.context ? options.context.deletedItemsProcessed : false;
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
// This is the basic delta algorithm, which can be used in case the cloud service does not have
|
||||
// a built-in delta API. OneDrive and Dropbox have one for example, but Nextcloud and obviously
|
||||
// the file system do not.
|
||||
async function basicDelta(path: string, getDirStatFn: Function, options: any) {
|
||||
const outputLimit = 50;
|
||||
const itemIds = await options.allItemIdsHandler();
|
||||
if (!Array.isArray(itemIds)) throw new Error('Delta API not supported - local IDs must be provided');
|
||||
|
||||
const logger = options && options.logger ? options.logger : new Logger();
|
||||
|
||||
const context = basicDeltaContextFromOptions_(options);
|
||||
|
||||
if (context.timestamp > Date.now()) {
|
||||
logger.warn(`BasicDelta: Context timestamp is greater than current time: ${context.timestamp}`);
|
||||
logger.warn('BasicDelta: Sync will continue but it is likely that nothing will be synced');
|
||||
}
|
||||
|
||||
const newContext = {
|
||||
timestamp: context.timestamp,
|
||||
filesAtTimestamp: context.filesAtTimestamp.slice(),
|
||||
statsCache: context.statsCache,
|
||||
statIdsCache: context.statIdsCache,
|
||||
deletedItemsProcessed: context.deletedItemsProcessed,
|
||||
};
|
||||
|
||||
// Stats are cached until all items have been processed (until hasMore is false)
|
||||
if (newContext.statsCache === null) {
|
||||
newContext.statsCache = await getDirStatFn(path);
|
||||
newContext.statsCache.sort(function(a: any, b: any) {
|
||||
return a.updated_time - b.updated_time;
|
||||
});
|
||||
newContext.statIdsCache = newContext.statsCache.filter((item: any) => BaseItem.isSystemPath(item.path)).map((item: any) => BaseItem.pathToId(item.path));
|
||||
newContext.statIdsCache.sort(); // Items must be sorted to use binary search below
|
||||
}
|
||||
|
||||
let output = [];
|
||||
|
||||
const updateReport = {
|
||||
timestamp: context.timestamp,
|
||||
older: 0,
|
||||
newer: 0,
|
||||
equal: 0,
|
||||
};
|
||||
|
||||
// Find out which files have been changed since the last time. Note that we keep
|
||||
// both the timestamp of the most recent change, *and* the items that exactly match
|
||||
// this timestamp. This to handle cases where an item is modified while this delta
|
||||
// function is running. For example:
|
||||
// t0: Item 1 is changed
|
||||
// t0: Sync items - run delta function
|
||||
// t0: While delta() is running, modify Item 2
|
||||
// Since item 2 was modified within the same millisecond, it would be skipped in the
|
||||
// next sync if we relied exclusively on a timestamp.
|
||||
for (let i = 0; i < newContext.statsCache.length; i++) {
|
||||
const stat = newContext.statsCache[i];
|
||||
|
||||
if (stat.isDir) continue;
|
||||
|
||||
if (stat.updated_time < context.timestamp) {
|
||||
updateReport.older++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Special case for items that exactly match the timestamp
|
||||
if (stat.updated_time === context.timestamp) {
|
||||
if (context.filesAtTimestamp.indexOf(stat.path) >= 0) {
|
||||
updateReport.equal++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (stat.updated_time > newContext.timestamp) {
|
||||
newContext.timestamp = stat.updated_time;
|
||||
newContext.filesAtTimestamp = [];
|
||||
updateReport.newer++;
|
||||
}
|
||||
|
||||
newContext.filesAtTimestamp.push(stat.path);
|
||||
output.push(stat);
|
||||
|
||||
if (output.length >= outputLimit) break;
|
||||
}
|
||||
|
||||
logger.info(`BasicDelta: Report: ${JSON.stringify(updateReport)}`);
|
||||
|
||||
if (!newContext.deletedItemsProcessed) {
|
||||
// Find out which items have been deleted on the sync target by comparing the items
|
||||
// we have to the items on the target.
|
||||
// Note that when deleted items are processed it might result in the output having
|
||||
// more items than outputLimit. This is acceptable since delete operations are cheap.
|
||||
const deletedItems = [];
|
||||
for (let i = 0; i < itemIds.length; i++) {
|
||||
const itemId = itemIds[i];
|
||||
|
||||
if (ArrayUtils.binarySearch(newContext.statIdsCache, itemId) < 0) {
|
||||
deletedItems.push({
|
||||
path: BaseItem.systemPath(itemId),
|
||||
isDeleted: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const percentDeleted = itemIds.length ? deletedItems.length / itemIds.length : 0;
|
||||
|
||||
// If more than 90% of the notes are going to be deleted, it's most likely a
|
||||
// configuration error or bug. For example, if the user moves their Nextcloud
|
||||
// directory, or if a network drive gets disconnected and returns an empty dir
|
||||
// instead of an error. In that case, we don't wipe out the user data, unless
|
||||
// they have switched off the fail-safe.
|
||||
if (options.wipeOutFailSafe && percentDeleted >= 0.90) throw new JoplinError(sprintf('Fail-safe: Sync was interrupted because %d%% of the data (%d items) is about to be deleted. To override this behaviour disable the fail-safe in the sync settings.', Math.round(percentDeleted * 100), deletedItems.length), 'failSafe');
|
||||
|
||||
output = output.concat(deletedItems);
|
||||
}
|
||||
|
||||
newContext.deletedItemsProcessed = true;
|
||||
|
||||
const hasMore = output.length >= outputLimit;
|
||||
|
||||
if (!hasMore) {
|
||||
// Clear temporary info from context. It's especially important to remove deletedItemsProcessed
|
||||
// so that they are processed again on the next sync.
|
||||
newContext.statsCache = null;
|
||||
newContext.statIdsCache = null;
|
||||
delete newContext.deletedItemsProcessed;
|
||||
}
|
||||
|
||||
return {
|
||||
hasMore: hasMore,
|
||||
context: newContext,
|
||||
items: output,
|
||||
};
|
||||
}
|
||||
|
||||
export { FileApi, basicDelta };
|
@ -6,7 +6,7 @@ import time from '../time';
|
||||
import markdownUtils from '../markdownUtils';
|
||||
import { _ } from '../locale';
|
||||
|
||||
const { Database } = require('../database.js');
|
||||
import Database from '../database';
|
||||
import ItemChange from './ItemChange';
|
||||
const JoplinError = require('../JoplinError.js');
|
||||
const { sprintf } = require('sprintf-js');
|
||||
@ -115,7 +115,7 @@ export default class BaseItem extends BaseModel {
|
||||
return r.total;
|
||||
}
|
||||
|
||||
static systemPath(itemOrId: any, extension: string = null) {
|
||||
public static systemPath(itemOrId: any, extension: string = null) {
|
||||
if (extension === null) extension = 'md';
|
||||
|
||||
if (typeof itemOrId === 'string') return `${itemOrId}.${extension}`;
|
||||
@ -225,7 +225,7 @@ export default class BaseItem extends BaseModel {
|
||||
|
||||
// Don't create a deleted_items entry when conflicted notes are deleted
|
||||
// since no other client have (or should have) them.
|
||||
let conflictNoteIds = [];
|
||||
let conflictNoteIds: string[] = [];
|
||||
if (this.modelType() == BaseModel.TYPE_NOTE) {
|
||||
const conflictNotes = await this.db().selectAll(`SELECT id FROM notes WHERE id IN ("${ids.join('","')}") AND is_conflict = 1`);
|
||||
conflictNoteIds = conflictNotes.map((n: NoteEntity) => {
|
||||
|
@ -4,7 +4,7 @@ import time from '../time';
|
||||
import { _ } from '../locale';
|
||||
|
||||
import Note from './Note';
|
||||
const { Database } = require('../database.js');
|
||||
import Database from '../database';
|
||||
import BaseItem from './BaseItem';
|
||||
const { substrWithEllipsis } = require('../string-utils.js');
|
||||
|
||||
@ -107,7 +107,7 @@ export default class Folder extends BaseItem {
|
||||
return 'c04f1c7c04f1c7c04f1c7c04f1c7c04f';
|
||||
}
|
||||
|
||||
static conflictFolder() {
|
||||
static conflictFolder(): FolderEntity {
|
||||
return {
|
||||
type_: this.TYPE_FOLDER,
|
||||
id: this.conflictFolderId(),
|
||||
@ -380,8 +380,8 @@ export default class Folder extends BaseItem {
|
||||
return output;
|
||||
}
|
||||
|
||||
static load(id: string) {
|
||||
if (id == this.conflictFolderId()) return this.conflictFolder();
|
||||
static load(id: string, _options: any = null): Promise<FolderEntity> {
|
||||
if (id == this.conflictFolderId()) return Promise.resolve(this.conflictFolder());
|
||||
return super.load(id);
|
||||
}
|
||||
|
||||
|
@ -110,7 +110,7 @@ export default class Note extends BaseItem {
|
||||
return BaseModel.TYPE_NOTE;
|
||||
}
|
||||
|
||||
static linkedItemIds(body: string) {
|
||||
static linkedItemIds(body: string): string[] {
|
||||
if (!body || body.length <= 32) return [];
|
||||
|
||||
const links = urlUtils.extractResourceUrls(body);
|
||||
@ -319,7 +319,8 @@ export default class Note extends BaseItem {
|
||||
|
||||
static previewFieldsSql(fields: string[] = null) {
|
||||
if (fields === null) fields = this.previewFields();
|
||||
return this.db().escapeFields(fields).join(',');
|
||||
const escaped = this.db().escapeFields(fields);
|
||||
return Array.isArray(escaped) ? escaped.join(',') : escaped;
|
||||
}
|
||||
|
||||
static async loadFolderNoteByField(folderId: string, field: string, value: any) {
|
||||
|
@ -1,4 +1,5 @@
|
||||
import BaseModel from '../BaseModel';
|
||||
import { SqlQuery } from '../database';
|
||||
|
||||
// - If is_associated = 1, note_resources indicates which note_id is currently associated with the given resource_id
|
||||
// - If is_associated = 0, note_resources indicates which note_id *was* associated with the given resource_id
|
||||
@ -14,6 +15,27 @@ export default class NoteResource extends BaseModel {
|
||||
return BaseModel.TYPE_NOTE_RESOURCE;
|
||||
}
|
||||
|
||||
public static async applySharedStatusToLinkedResources() {
|
||||
const queries: SqlQuery[] = [];
|
||||
|
||||
queries.push({ sql: `
|
||||
UPDATE resources
|
||||
SET is_shared = 0
|
||||
` });
|
||||
|
||||
queries.push({ sql: `
|
||||
UPDATE resources
|
||||
SET is_shared = 1
|
||||
WHERE id IN (
|
||||
SELECT DISTINCT note_resources.resource_id
|
||||
FROM notes JOIN note_resources ON notes.id = note_resources.note_id
|
||||
WHERE notes.is_shared = 1
|
||||
)
|
||||
` });
|
||||
|
||||
await this.db().transactionExecBatch(queries);
|
||||
}
|
||||
|
||||
static async associatedNoteIds(resourceId: string): Promise<string[]> {
|
||||
const rows = await this.modelSelectAll('SELECT note_id FROM note_resources WHERE resource_id = ? AND is_associated = 1', [resourceId]);
|
||||
return rows.map((r: any) => r.note_id);
|
||||
|
@ -42,11 +42,15 @@ export default class Resource extends BaseItem {
|
||||
return imageMimeTypes.indexOf(type.toLowerCase()) >= 0;
|
||||
}
|
||||
|
||||
static fetchStatuses(resourceIds: string[]) {
|
||||
if (!resourceIds.length) return [];
|
||||
static fetchStatuses(resourceIds: string[]): Promise<any[]> {
|
||||
if (!resourceIds.length) return Promise.resolve([]);
|
||||
return this.db().selectAll(`SELECT resource_id, fetch_status FROM resource_local_states WHERE resource_id IN ("${resourceIds.join('","')}")`);
|
||||
}
|
||||
|
||||
public static sharedResourceIds(): Promise<string[]> {
|
||||
return this.db().selectAllFields('SELECT id FROM resources WHERE is_shared = 1', {}, 'id');
|
||||
}
|
||||
|
||||
static errorFetchStatuses() {
|
||||
return this.db().selectAll(`
|
||||
SELECT title AS resource_title, resource_id, fetch_error
|
||||
|
@ -1,6 +1,6 @@
|
||||
import BaseModel from '../BaseModel';
|
||||
import { ResourceLocalStateEntity } from '../services/database/types';
|
||||
const { Database } = require('../database.js');
|
||||
import Database from '../database';
|
||||
|
||||
export default class ResourceLocalState extends BaseModel {
|
||||
static tableName() {
|
||||
|
@ -3,7 +3,7 @@ import { _, supportedLocalesToLanguages, defaultLocale } from '../locale';
|
||||
import { ltrimSlashes } from '../path-utils';
|
||||
import eventManager from '../eventManager';
|
||||
import BaseModel from '../BaseModel';
|
||||
const { Database } = require('../database.js');
|
||||
import Database from '../database';
|
||||
const SyncTargetRegistry = require('../SyncTargetRegistry.js');
|
||||
import time from '../time';
|
||||
const { sprintf } = require('sprintf-js');
|
||||
|
@ -1,233 +1,227 @@
|
||||
const Logger = require('./Logger').default;
|
||||
const Setting = require('./models/Setting').default;
|
||||
const shim = require('./shim').default;
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.reg = void 0;
|
||||
const Logger_1 = require("./Logger");
|
||||
const Setting_1 = require("./models/Setting");
|
||||
const shim_1 = require("./shim");
|
||||
const SyncTargetRegistry = require('./SyncTargetRegistry.js');
|
||||
|
||||
const reg = {};
|
||||
|
||||
reg.syncTargets_ = {};
|
||||
|
||||
reg.logger = () => {
|
||||
if (!reg.logger_) {
|
||||
// console.warn('Calling logger before it is initialized');
|
||||
return new Logger();
|
||||
}
|
||||
|
||||
return reg.logger_;
|
||||
};
|
||||
|
||||
reg.setLogger = l => {
|
||||
reg.logger_ = l;
|
||||
};
|
||||
|
||||
reg.setShowErrorMessageBoxHandler = v => {
|
||||
reg.showErrorMessageBoxHandler_ = v;
|
||||
};
|
||||
|
||||
reg.showErrorMessageBox = message => {
|
||||
if (!reg.showErrorMessageBoxHandler_) return;
|
||||
reg.showErrorMessageBoxHandler_(message);
|
||||
};
|
||||
|
||||
reg.resetSyncTarget = (syncTargetId = null) => {
|
||||
if (syncTargetId === null) syncTargetId = Setting.value('sync.target');
|
||||
delete reg.syncTargets_[syncTargetId];
|
||||
};
|
||||
|
||||
reg.syncTargetNextcloud = () => {
|
||||
return reg.syncTarget(SyncTargetRegistry.nameToId('nextcloud'));
|
||||
};
|
||||
|
||||
reg.syncTarget = (syncTargetId = null) => {
|
||||
if (syncTargetId === null) syncTargetId = Setting.value('sync.target');
|
||||
if (reg.syncTargets_[syncTargetId]) return reg.syncTargets_[syncTargetId];
|
||||
|
||||
const SyncTargetClass = SyncTargetRegistry.classById(syncTargetId);
|
||||
if (!reg.db()) throw new Error('Cannot initialize sync without a db');
|
||||
|
||||
const target = new SyncTargetClass(reg.db());
|
||||
target.setLogger(reg.logger());
|
||||
reg.syncTargets_[syncTargetId] = target;
|
||||
return target;
|
||||
};
|
||||
|
||||
// This can be used when some data has been modified and we want to make
|
||||
// sure it gets synced. So we wait for the current sync operation to
|
||||
// finish (if one is running), then we trigger a sync just after.
|
||||
reg.waitForSyncFinishedThenSync = async () => {
|
||||
reg.waitForReSyncCalls_.push(true);
|
||||
try {
|
||||
const synchronizer = await reg.syncTarget().synchronizer();
|
||||
await synchronizer.waitForSyncToFinish();
|
||||
await reg.scheduleSync(0);
|
||||
} finally {
|
||||
reg.waitForReSyncCalls_.pop();
|
||||
}
|
||||
};
|
||||
|
||||
reg.scheduleSync = async (delay = null, syncOptions = null) => {
|
||||
reg.schedSyncCalls_.push(true);
|
||||
|
||||
try {
|
||||
if (delay === null) delay = 1000 * 10;
|
||||
if (syncOptions === null) syncOptions = {};
|
||||
|
||||
let promiseResolve = null;
|
||||
const promise = new Promise((resolve) => {
|
||||
promiseResolve = resolve;
|
||||
});
|
||||
|
||||
if (reg.scheduleSyncId_) {
|
||||
shim.clearTimeout(reg.scheduleSyncId_);
|
||||
reg.scheduleSyncId_ = null;
|
||||
}
|
||||
|
||||
reg.logger().debug('Scheduling sync operation...', delay);
|
||||
|
||||
if (Setting.value('env') === 'dev' && delay !== 0) {
|
||||
reg.logger().info('Schedule sync DISABLED!!!');
|
||||
return;
|
||||
}
|
||||
|
||||
const timeoutCallback = async () => {
|
||||
reg.timerCallbackCalls_.push(true);
|
||||
try {
|
||||
reg.scheduleSyncId_ = null;
|
||||
reg.logger().info('Preparing scheduled sync');
|
||||
|
||||
const syncTargetId = Setting.value('sync.target');
|
||||
|
||||
if (!(await reg.syncTarget(syncTargetId).isAuthenticated())) {
|
||||
reg.logger().info('Synchroniser is missing credentials - manual sync required to authenticate.');
|
||||
promiseResolve();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const sync = await reg.syncTarget(syncTargetId).synchronizer();
|
||||
|
||||
const contextKey = `sync.${syncTargetId}.context`;
|
||||
let context = Setting.value(contextKey);
|
||||
try {
|
||||
context = context ? JSON.parse(context) : {};
|
||||
} catch (error) {
|
||||
// Clearing the context is inefficient since it means all items are going to be re-downloaded
|
||||
// however it won't result in duplicate items since the synchroniser is going to compare each
|
||||
// item to the current state.
|
||||
reg.logger().warn(`Could not parse JSON sync context ${contextKey}:`, context);
|
||||
reg.logger().info('Clearing context and starting from scratch');
|
||||
context = null;
|
||||
}
|
||||
|
||||
try {
|
||||
reg.logger().info('Starting scheduled sync');
|
||||
const options = Object.assign({}, syncOptions, { context: context });
|
||||
if (!options.saveContextHandler) {
|
||||
options.saveContextHandler = newContext => {
|
||||
Setting.setValue(contextKey, JSON.stringify(newContext));
|
||||
};
|
||||
}
|
||||
const newContext = await sync.start(options);
|
||||
Setting.setValue(contextKey, JSON.stringify(newContext));
|
||||
} catch (error) {
|
||||
if (error.code == 'alreadyStarted') {
|
||||
reg.logger().info(error.message);
|
||||
} else {
|
||||
promiseResolve();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
reg.logger().info('Could not run background sync:');
|
||||
reg.logger().info(error);
|
||||
}
|
||||
reg.setupRecurrentSync();
|
||||
promiseResolve();
|
||||
|
||||
} finally {
|
||||
reg.timerCallbackCalls_.pop();
|
||||
}
|
||||
};
|
||||
|
||||
if (delay === 0) {
|
||||
timeoutCallback();
|
||||
} else {
|
||||
reg.scheduleSyncId_ = shim.setTimeout(timeoutCallback, delay);
|
||||
}
|
||||
return promise;
|
||||
|
||||
} finally {
|
||||
reg.schedSyncCalls_.pop();
|
||||
}
|
||||
};
|
||||
|
||||
reg.setupRecurrentSync = () => {
|
||||
reg.setupRecurrentCalls_.push(true);
|
||||
|
||||
try {
|
||||
if (reg.recurrentSyncId_) {
|
||||
shim.clearInterval(reg.recurrentSyncId_);
|
||||
reg.recurrentSyncId_ = null;
|
||||
}
|
||||
|
||||
if (!Setting.value('sync.interval')) {
|
||||
reg.logger().debug('Recurrent sync is disabled');
|
||||
} else {
|
||||
reg.logger().debug(`Setting up recurrent sync with interval ${Setting.value('sync.interval')}`);
|
||||
|
||||
if (Setting.value('env') === 'dev') {
|
||||
reg.logger().info('Recurrent sync operation DISABLED!!!');
|
||||
return;
|
||||
}
|
||||
|
||||
reg.recurrentSyncId_ = shim.setInterval(() => {
|
||||
reg.logger().info('Running background sync on timer...');
|
||||
reg.scheduleSync(0);
|
||||
}, 1000 * Setting.value('sync.interval'));
|
||||
}
|
||||
} finally {
|
||||
reg.setupRecurrentCalls_.pop();
|
||||
}
|
||||
};
|
||||
|
||||
reg.setDb = v => {
|
||||
reg.db_ = v;
|
||||
};
|
||||
|
||||
reg.db = () => {
|
||||
return reg.db_;
|
||||
};
|
||||
|
||||
reg.cancelTimers_ = () => {
|
||||
if (this.recurrentSyncId_) {
|
||||
shim.clearInterval(reg.recurrentSyncId_);
|
||||
this.recurrentSyncId_ = null;
|
||||
}
|
||||
if (reg.scheduleSyncId_) {
|
||||
shim.clearTimeout(reg.scheduleSyncId_);
|
||||
reg.scheduleSyncId_ = null;
|
||||
}
|
||||
};
|
||||
|
||||
reg.cancelTimers = async () => {
|
||||
reg.logger().info('Cancelling sync timers');
|
||||
reg.cancelTimers_();
|
||||
|
||||
return new Promise((resolve) => {
|
||||
shim.setInterval(() => {
|
||||
// ensure processing complete
|
||||
if (!reg.setupRecurrentCalls_.length && !reg.schedSyncCalls_.length && !reg.timerCallbackCalls_.length && !reg.waitForReSyncCalls_.length) {
|
||||
reg.cancelTimers_();
|
||||
resolve();
|
||||
}
|
||||
}, 100);
|
||||
});
|
||||
};
|
||||
|
||||
reg.syncCalls_ = [];
|
||||
reg.schedSyncCalls_ = [];
|
||||
reg.waitForReSyncCalls_ = [];
|
||||
reg.setupRecurrentCalls_ = [];
|
||||
reg.timerCallbackCalls_ = [];
|
||||
|
||||
module.exports = { reg };
|
||||
class Registry {
|
||||
constructor() {
|
||||
this.syncTargets_ = {};
|
||||
this.logger_ = null;
|
||||
this.schedSyncCalls_ = [];
|
||||
this.waitForReSyncCalls_ = [];
|
||||
this.setupRecurrentCalls_ = [];
|
||||
this.timerCallbackCalls_ = [];
|
||||
this.syncTarget = (syncTargetId = null) => {
|
||||
if (syncTargetId === null)
|
||||
syncTargetId = Setting_1.default.value('sync.target');
|
||||
if (this.syncTargets_[syncTargetId])
|
||||
return this.syncTargets_[syncTargetId];
|
||||
const SyncTargetClass = SyncTargetRegistry.classById(syncTargetId);
|
||||
if (!this.db())
|
||||
throw new Error('Cannot initialize sync without a db');
|
||||
const target = new SyncTargetClass(this.db());
|
||||
target.setLogger(this.logger());
|
||||
this.syncTargets_[syncTargetId] = target;
|
||||
return target;
|
||||
};
|
||||
// This can be used when some data has been modified and we want to make
|
||||
// sure it gets synced. So we wait for the current sync operation to
|
||||
// finish (if one is running), then we trigger a sync just after.
|
||||
this.waitForSyncFinishedThenSync = () => __awaiter(this, void 0, void 0, function* () {
|
||||
this.waitForReSyncCalls_.push(true);
|
||||
try {
|
||||
const synchronizer = yield this.syncTarget().synchronizer();
|
||||
yield synchronizer.waitForSyncToFinish();
|
||||
yield this.scheduleSync(0);
|
||||
}
|
||||
finally {
|
||||
this.waitForReSyncCalls_.pop();
|
||||
}
|
||||
});
|
||||
this.scheduleSync = (delay = null, syncOptions = null) => __awaiter(this, void 0, void 0, function* () {
|
||||
this.schedSyncCalls_.push(true);
|
||||
try {
|
||||
if (delay === null)
|
||||
delay = 1000 * 10;
|
||||
if (syncOptions === null)
|
||||
syncOptions = {};
|
||||
let promiseResolve = null;
|
||||
const promise = new Promise((resolve) => {
|
||||
promiseResolve = resolve;
|
||||
});
|
||||
if (this.scheduleSyncId_) {
|
||||
shim_1.default.clearTimeout(this.scheduleSyncId_);
|
||||
this.scheduleSyncId_ = null;
|
||||
}
|
||||
this.logger().debug('Scheduling sync operation...', delay);
|
||||
if (Setting_1.default.value('env') === 'dev' && delay !== 0) {
|
||||
this.logger().info('Schedule sync DISABLED!!!');
|
||||
return;
|
||||
}
|
||||
const timeoutCallback = () => __awaiter(this, void 0, void 0, function* () {
|
||||
this.timerCallbackCalls_.push(true);
|
||||
try {
|
||||
this.scheduleSyncId_ = null;
|
||||
this.logger().info('Preparing scheduled sync');
|
||||
const syncTargetId = Setting_1.default.value('sync.target');
|
||||
if (!(yield this.syncTarget(syncTargetId).isAuthenticated())) {
|
||||
this.logger().info('Synchroniser is missing credentials - manual sync required to authenticate.');
|
||||
promiseResolve();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const sync = yield this.syncTarget(syncTargetId).synchronizer();
|
||||
const contextKey = `sync.${syncTargetId}.context`;
|
||||
let context = Setting_1.default.value(contextKey);
|
||||
try {
|
||||
context = context ? JSON.parse(context) : {};
|
||||
}
|
||||
catch (error) {
|
||||
// Clearing the context is inefficient since it means all items are going to be re-downloaded
|
||||
// however it won't result in duplicate items since the synchroniser is going to compare each
|
||||
// item to the current state.
|
||||
this.logger().warn(`Could not parse JSON sync context ${contextKey}:`, context);
|
||||
this.logger().info('Clearing context and starting from scratch');
|
||||
context = null;
|
||||
}
|
||||
try {
|
||||
this.logger().info('Starting scheduled sync');
|
||||
const options = Object.assign({}, syncOptions, { context: context });
|
||||
if (!options.saveContextHandler) {
|
||||
options.saveContextHandler = (newContext) => {
|
||||
Setting_1.default.setValue(contextKey, JSON.stringify(newContext));
|
||||
};
|
||||
}
|
||||
const newContext = yield sync.start(options);
|
||||
Setting_1.default.setValue(contextKey, JSON.stringify(newContext));
|
||||
}
|
||||
catch (error) {
|
||||
if (error.code == 'alreadyStarted') {
|
||||
this.logger().info(error.message);
|
||||
}
|
||||
else {
|
||||
promiseResolve();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
this.logger().info('Could not run background sync:');
|
||||
this.logger().info(error);
|
||||
}
|
||||
this.setupRecurrentSync();
|
||||
promiseResolve();
|
||||
}
|
||||
finally {
|
||||
this.timerCallbackCalls_.pop();
|
||||
}
|
||||
});
|
||||
if (delay === 0) {
|
||||
void timeoutCallback();
|
||||
}
|
||||
else {
|
||||
this.scheduleSyncId_ = shim_1.default.setTimeout(timeoutCallback, delay);
|
||||
}
|
||||
return promise;
|
||||
}
|
||||
finally {
|
||||
this.schedSyncCalls_.pop();
|
||||
}
|
||||
});
|
||||
this.setDb = (v) => {
|
||||
this.db_ = v;
|
||||
};
|
||||
this.cancelTimers = () => __awaiter(this, void 0, void 0, function* () {
|
||||
this.logger().info('Cancelling sync timers');
|
||||
this.cancelTimers_();
|
||||
return new Promise((resolve) => {
|
||||
shim_1.default.setInterval(() => {
|
||||
// ensure processing complete
|
||||
if (!this.setupRecurrentCalls_.length && !this.schedSyncCalls_.length && !this.timerCallbackCalls_.length && !this.waitForReSyncCalls_.length) {
|
||||
this.cancelTimers_();
|
||||
resolve(null);
|
||||
}
|
||||
}, 100);
|
||||
});
|
||||
});
|
||||
}
|
||||
logger() {
|
||||
if (!this.logger_) {
|
||||
// console.warn('Calling logger before it is initialized');
|
||||
return new Logger_1.default();
|
||||
}
|
||||
return this.logger_;
|
||||
}
|
||||
setLogger(l) {
|
||||
this.logger_ = l;
|
||||
}
|
||||
setShowErrorMessageBoxHandler(v) {
|
||||
this.showErrorMessageBoxHandler_ = v;
|
||||
}
|
||||
showErrorMessageBox(message) {
|
||||
if (!this.showErrorMessageBoxHandler_)
|
||||
return;
|
||||
this.showErrorMessageBoxHandler_(message);
|
||||
}
|
||||
resetSyncTarget(syncTargetId = null) {
|
||||
if (syncTargetId === null)
|
||||
syncTargetId = Setting_1.default.value('sync.target');
|
||||
delete this.syncTargets_[syncTargetId];
|
||||
}
|
||||
syncTargetNextcloud() {
|
||||
return this.syncTarget(SyncTargetRegistry.nameToId('nextcloud'));
|
||||
}
|
||||
setupRecurrentSync() {
|
||||
this.setupRecurrentCalls_.push(true);
|
||||
try {
|
||||
if (this.recurrentSyncId_) {
|
||||
shim_1.default.clearInterval(this.recurrentSyncId_);
|
||||
this.recurrentSyncId_ = null;
|
||||
}
|
||||
if (!Setting_1.default.value('sync.interval')) {
|
||||
this.logger().debug('Recurrent sync is disabled');
|
||||
}
|
||||
else {
|
||||
this.logger().debug(`Setting up recurrent sync with interval ${Setting_1.default.value('sync.interval')}`);
|
||||
if (Setting_1.default.value('env') === 'dev') {
|
||||
this.logger().info('Recurrent sync operation DISABLED!!!');
|
||||
return;
|
||||
}
|
||||
this.recurrentSyncId_ = shim_1.default.setInterval(() => {
|
||||
this.logger().info('Running background sync on timer...');
|
||||
void this.scheduleSync(0);
|
||||
}, 1000 * Setting_1.default.value('sync.interval'));
|
||||
}
|
||||
}
|
||||
finally {
|
||||
this.setupRecurrentCalls_.pop();
|
||||
}
|
||||
}
|
||||
db() {
|
||||
return this.db_;
|
||||
}
|
||||
cancelTimers_() {
|
||||
if (this.recurrentSyncId_) {
|
||||
shim_1.default.clearInterval(this.recurrentSyncId_);
|
||||
this.recurrentSyncId_ = null;
|
||||
}
|
||||
if (this.scheduleSyncId_) {
|
||||
shim_1.default.clearTimeout(this.scheduleSyncId_);
|
||||
this.scheduleSyncId_ = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
const reg = new Registry();
|
||||
exports.reg = reg;
|
||||
//# sourceMappingURL=registry.js.map
|
241
packages/lib/registry.ts
Normal file
241
packages/lib/registry.ts
Normal file
@ -0,0 +1,241 @@
|
||||
import Logger from './Logger';
|
||||
import Setting from './models/Setting';
|
||||
import shim from './shim';
|
||||
const SyncTargetRegistry = require('./SyncTargetRegistry.js');
|
||||
|
||||
class Registry {
|
||||
|
||||
private syncTargets_: any = {};
|
||||
private logger_: Logger = null;
|
||||
private schedSyncCalls_: boolean[] = [];
|
||||
private waitForReSyncCalls_: boolean[]= [];
|
||||
private setupRecurrentCalls_: boolean[] = [];
|
||||
private timerCallbackCalls_: boolean[] = [];
|
||||
private showErrorMessageBoxHandler_: any;
|
||||
private scheduleSyncId_: any;
|
||||
private recurrentSyncId_: any;
|
||||
private db_: any;
|
||||
|
||||
logger() {
|
||||
if (!this.logger_) {
|
||||
// console.warn('Calling logger before it is initialized');
|
||||
return new Logger();
|
||||
}
|
||||
|
||||
return this.logger_;
|
||||
}
|
||||
|
||||
setLogger(l: Logger) {
|
||||
this.logger_ = l;
|
||||
}
|
||||
|
||||
setShowErrorMessageBoxHandler(v: any) {
|
||||
this.showErrorMessageBoxHandler_ = v;
|
||||
}
|
||||
|
||||
showErrorMessageBox(message: string) {
|
||||
if (!this.showErrorMessageBoxHandler_) return;
|
||||
this.showErrorMessageBoxHandler_(message);
|
||||
}
|
||||
|
||||
resetSyncTarget(syncTargetId: number = null) {
|
||||
if (syncTargetId === null) syncTargetId = Setting.value('sync.target');
|
||||
delete this.syncTargets_[syncTargetId];
|
||||
}
|
||||
|
||||
syncTargetNextcloud() {
|
||||
return this.syncTarget(SyncTargetRegistry.nameToId('nextcloud'));
|
||||
}
|
||||
|
||||
syncTarget = (syncTargetId: number = null) => {
|
||||
if (syncTargetId === null) syncTargetId = Setting.value('sync.target');
|
||||
if (this.syncTargets_[syncTargetId]) return this.syncTargets_[syncTargetId];
|
||||
|
||||
const SyncTargetClass = SyncTargetRegistry.classById(syncTargetId);
|
||||
if (!this.db()) throw new Error('Cannot initialize sync without a db');
|
||||
|
||||
const target = new SyncTargetClass(this.db());
|
||||
target.setLogger(this.logger());
|
||||
this.syncTargets_[syncTargetId] = target;
|
||||
return target;
|
||||
};
|
||||
|
||||
// This can be used when some data has been modified and we want to make
|
||||
// sure it gets synced. So we wait for the current sync operation to
|
||||
// finish (if one is running), then we trigger a sync just after.
|
||||
waitForSyncFinishedThenSync = async () => {
|
||||
this.waitForReSyncCalls_.push(true);
|
||||
try {
|
||||
const synchronizer = await this.syncTarget().synchronizer();
|
||||
await synchronizer.waitForSyncToFinish();
|
||||
await this.scheduleSync(0);
|
||||
} finally {
|
||||
this.waitForReSyncCalls_.pop();
|
||||
}
|
||||
};
|
||||
|
||||
scheduleSync = async (delay: number = null, syncOptions: any = null) => {
|
||||
this.schedSyncCalls_.push(true);
|
||||
|
||||
try {
|
||||
if (delay === null) delay = 1000 * 10;
|
||||
if (syncOptions === null) syncOptions = {};
|
||||
|
||||
let promiseResolve: Function = null;
|
||||
const promise = new Promise((resolve) => {
|
||||
promiseResolve = resolve;
|
||||
});
|
||||
|
||||
if (this.scheduleSyncId_) {
|
||||
shim.clearTimeout(this.scheduleSyncId_);
|
||||
this.scheduleSyncId_ = null;
|
||||
}
|
||||
|
||||
this.logger().debug('Scheduling sync operation...', delay);
|
||||
|
||||
if (Setting.value('env') === 'dev' && delay !== 0) {
|
||||
this.logger().info('Schedule sync DISABLED!!!');
|
||||
return;
|
||||
}
|
||||
|
||||
const timeoutCallback = async () => {
|
||||
this.timerCallbackCalls_.push(true);
|
||||
try {
|
||||
this.scheduleSyncId_ = null;
|
||||
this.logger().info('Preparing scheduled sync');
|
||||
|
||||
const syncTargetId = Setting.value('sync.target');
|
||||
|
||||
if (!(await this.syncTarget(syncTargetId).isAuthenticated())) {
|
||||
this.logger().info('Synchroniser is missing credentials - manual sync required to authenticate.');
|
||||
promiseResolve();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const sync = await this.syncTarget(syncTargetId).synchronizer();
|
||||
|
||||
const contextKey = `sync.${syncTargetId}.context`;
|
||||
let context = Setting.value(contextKey);
|
||||
try {
|
||||
context = context ? JSON.parse(context) : {};
|
||||
} catch (error) {
|
||||
// Clearing the context is inefficient since it means all items are going to be re-downloaded
|
||||
// however it won't result in duplicate items since the synchroniser is going to compare each
|
||||
// item to the current state.
|
||||
this.logger().warn(`Could not parse JSON sync context ${contextKey}:`, context);
|
||||
this.logger().info('Clearing context and starting from scratch');
|
||||
context = null;
|
||||
}
|
||||
|
||||
try {
|
||||
this.logger().info('Starting scheduled sync');
|
||||
const options = Object.assign({}, syncOptions, { context: context });
|
||||
if (!options.saveContextHandler) {
|
||||
options.saveContextHandler = (newContext: any) => {
|
||||
Setting.setValue(contextKey, JSON.stringify(newContext));
|
||||
};
|
||||
}
|
||||
const newContext = await sync.start(options);
|
||||
Setting.setValue(contextKey, JSON.stringify(newContext));
|
||||
} catch (error) {
|
||||
if (error.code == 'alreadyStarted') {
|
||||
this.logger().info(error.message);
|
||||
} else {
|
||||
promiseResolve();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger().info('Could not run background sync:');
|
||||
this.logger().info(error);
|
||||
}
|
||||
this.setupRecurrentSync();
|
||||
promiseResolve();
|
||||
|
||||
} finally {
|
||||
this.timerCallbackCalls_.pop();
|
||||
}
|
||||
};
|
||||
|
||||
if (delay === 0) {
|
||||
void timeoutCallback();
|
||||
} else {
|
||||
this.scheduleSyncId_ = shim.setTimeout(timeoutCallback, delay);
|
||||
}
|
||||
return promise;
|
||||
|
||||
} finally {
|
||||
this.schedSyncCalls_.pop();
|
||||
}
|
||||
};
|
||||
|
||||
setupRecurrentSync() {
|
||||
this.setupRecurrentCalls_.push(true);
|
||||
|
||||
try {
|
||||
if (this.recurrentSyncId_) {
|
||||
shim.clearInterval(this.recurrentSyncId_);
|
||||
this.recurrentSyncId_ = null;
|
||||
}
|
||||
|
||||
if (!Setting.value('sync.interval')) {
|
||||
this.logger().debug('Recurrent sync is disabled');
|
||||
} else {
|
||||
this.logger().debug(`Setting up recurrent sync with interval ${Setting.value('sync.interval')}`);
|
||||
|
||||
if (Setting.value('env') === 'dev') {
|
||||
this.logger().info('Recurrent sync operation DISABLED!!!');
|
||||
return;
|
||||
}
|
||||
|
||||
this.recurrentSyncId_ = shim.setInterval(() => {
|
||||
this.logger().info('Running background sync on timer...');
|
||||
void this.scheduleSync(0);
|
||||
}, 1000 * Setting.value('sync.interval'));
|
||||
}
|
||||
} finally {
|
||||
this.setupRecurrentCalls_.pop();
|
||||
}
|
||||
}
|
||||
|
||||
setDb = (v: any) => {
|
||||
this.db_ = v;
|
||||
};
|
||||
|
||||
db() {
|
||||
return this.db_;
|
||||
}
|
||||
|
||||
cancelTimers_() {
|
||||
if (this.recurrentSyncId_) {
|
||||
shim.clearInterval(this.recurrentSyncId_);
|
||||
this.recurrentSyncId_ = null;
|
||||
}
|
||||
if (this.scheduleSyncId_) {
|
||||
shim.clearTimeout(this.scheduleSyncId_);
|
||||
this.scheduleSyncId_ = null;
|
||||
}
|
||||
}
|
||||
|
||||
cancelTimers = async () => {
|
||||
this.logger().info('Cancelling sync timers');
|
||||
this.cancelTimers_();
|
||||
|
||||
return new Promise((resolve) => {
|
||||
shim.setInterval(() => {
|
||||
// ensure processing complete
|
||||
if (!this.setupRecurrentCalls_.length && !this.schedSyncCalls_.length && !this.timerCallbackCalls_.length && !this.waitForReSyncCalls_.length) {
|
||||
this.cancelTimers_();
|
||||
resolve(null);
|
||||
}
|
||||
}, 100);
|
||||
});
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
const reg = new Registry();
|
||||
|
||||
// eslint-disable-next-line import/prefer-default-export
|
||||
export { reg };
|
@ -8,11 +8,13 @@ import Note from '../models/Note';
|
||||
import Resource from '../models/Resource';
|
||||
import SearchEngine from './searchengine/SearchEngine';
|
||||
import ItemChangeUtils from './ItemChangeUtils';
|
||||
import time from '../time';
|
||||
const { sprintf } = require('sprintf-js');
|
||||
|
||||
export default class ResourceService extends BaseService {
|
||||
|
||||
public static isRunningInBackground_: boolean = false;
|
||||
private isIndexing_: boolean = false;
|
||||
|
||||
private maintenanceCalls_: boolean[] = [];
|
||||
private maintenanceTimer1_: any = null;
|
||||
@ -21,76 +23,89 @@ export default class ResourceService extends BaseService {
|
||||
public async indexNoteResources() {
|
||||
this.logger().info('ResourceService::indexNoteResources: Start');
|
||||
|
||||
await ItemChange.waitForAllSaved();
|
||||
|
||||
let foundNoteWithEncryption = false;
|
||||
|
||||
while (true) {
|
||||
const changes = await ItemChange.modelSelectAll(`
|
||||
SELECT id, item_id, type
|
||||
FROM item_changes
|
||||
WHERE item_type = ?
|
||||
AND id > ?
|
||||
ORDER BY id ASC
|
||||
LIMIT 10
|
||||
`,
|
||||
[BaseModel.TYPE_NOTE, Setting.value('resourceService.lastProcessedChangeId')]
|
||||
);
|
||||
|
||||
if (!changes.length) break;
|
||||
|
||||
const noteIds = changes.map((a: any) => a.item_id);
|
||||
const notes = await Note.modelSelectAll(`SELECT id, title, body, encryption_applied FROM notes WHERE id IN ("${noteIds.join('","')}")`);
|
||||
|
||||
const noteById = (noteId: string) => {
|
||||
for (let i = 0; i < notes.length; i++) {
|
||||
if (notes[i].id === noteId) return notes[i];
|
||||
}
|
||||
// The note may have been deleted since the change was recorded. For example in this case:
|
||||
// - Note created (Some Change object is recorded)
|
||||
// - Note is deleted
|
||||
// - ResourceService indexer runs.
|
||||
// In that case, there will be a change for the note, but the note will be gone.
|
||||
return null;
|
||||
};
|
||||
|
||||
for (let i = 0; i < changes.length; i++) {
|
||||
const change = changes[i];
|
||||
|
||||
if (change.type === ItemChange.TYPE_CREATE || change.type === ItemChange.TYPE_UPDATE) {
|
||||
const note = noteById(change.item_id);
|
||||
|
||||
if (note) {
|
||||
if (note.encryption_applied) {
|
||||
// If we hit an encrypted note, abort processing for now.
|
||||
// Note will eventually get decrypted and processing can resume then.
|
||||
// This is a limitation of the change tracking system - we cannot skip a change
|
||||
// and keep processing the rest since we only keep track of "lastProcessedChangeId".
|
||||
foundNoteWithEncryption = true;
|
||||
break;
|
||||
}
|
||||
|
||||
await this.setAssociatedResources(note.id, note.body);
|
||||
} else {
|
||||
this.logger().warn(`ResourceService::indexNoteResources: A change was recorded for a note that has been deleted: ${change.item_id}`);
|
||||
}
|
||||
} else if (change.type === ItemChange.TYPE_DELETE) {
|
||||
await NoteResource.remove(change.item_id);
|
||||
} else {
|
||||
throw new Error(`Invalid change type: ${change.type}`);
|
||||
}
|
||||
|
||||
Setting.setValue('resourceService.lastProcessedChangeId', change.id);
|
||||
}
|
||||
|
||||
if (foundNoteWithEncryption) break;
|
||||
if (this.isIndexing_) {
|
||||
this.logger().info('ResourceService::indexNoteResources: Already indexing - waiting for it to finish');
|
||||
await time.waitTillCondition(() => !this.isIndexing_);
|
||||
return;
|
||||
}
|
||||
|
||||
await Setting.saveAll();
|
||||
this.isIndexing_ = true;
|
||||
|
||||
await NoteResource.addOrphanedResources();
|
||||
try {
|
||||
await ItemChange.waitForAllSaved();
|
||||
|
||||
await ItemChangeUtils.deleteProcessedChanges();
|
||||
let foundNoteWithEncryption = false;
|
||||
|
||||
while (true) {
|
||||
const changes = await ItemChange.modelSelectAll(`
|
||||
SELECT id, item_id, type
|
||||
FROM item_changes
|
||||
WHERE item_type = ?
|
||||
AND id > ?
|
||||
ORDER BY id ASC
|
||||
LIMIT 10
|
||||
`, [BaseModel.TYPE_NOTE, Setting.value('resourceService.lastProcessedChangeId')]
|
||||
);
|
||||
|
||||
if (!changes.length) break;
|
||||
|
||||
const noteIds = changes.map((a: any) => a.item_id);
|
||||
const notes = await Note.modelSelectAll(`SELECT id, title, body, encryption_applied FROM notes WHERE id IN ("${noteIds.join('","')}")`);
|
||||
|
||||
const noteById = (noteId: string) => {
|
||||
for (let i = 0; i < notes.length; i++) {
|
||||
if (notes[i].id === noteId) return notes[i];
|
||||
}
|
||||
// The note may have been deleted since the change was recorded. For example in this case:
|
||||
// - Note created (Some Change object is recorded)
|
||||
// - Note is deleted
|
||||
// - ResourceService indexer runs.
|
||||
// In that case, there will be a change for the note, but the note will be gone.
|
||||
return null;
|
||||
};
|
||||
|
||||
for (let i = 0; i < changes.length; i++) {
|
||||
const change = changes[i];
|
||||
|
||||
if (change.type === ItemChange.TYPE_CREATE || change.type === ItemChange.TYPE_UPDATE) {
|
||||
const note = noteById(change.item_id);
|
||||
|
||||
if (note) {
|
||||
if (note.encryption_applied) {
|
||||
// If we hit an encrypted note, abort processing for now.
|
||||
// Note will eventually get decrypted and processing can resume then.
|
||||
// This is a limitation of the change tracking system - we cannot skip a change
|
||||
// and keep processing the rest since we only keep track of "lastProcessedChangeId".
|
||||
foundNoteWithEncryption = true;
|
||||
break;
|
||||
}
|
||||
|
||||
await this.setAssociatedResources(note.id, note.body);
|
||||
} else {
|
||||
this.logger().warn(`ResourceService::indexNoteResources: A change was recorded for a note that has been deleted: ${change.item_id}`);
|
||||
}
|
||||
} else if (change.type === ItemChange.TYPE_DELETE) {
|
||||
await NoteResource.remove(change.item_id);
|
||||
} else {
|
||||
throw new Error(`Invalid change type: ${change.type}`);
|
||||
}
|
||||
|
||||
Setting.setValue('resourceService.lastProcessedChangeId', change.id);
|
||||
}
|
||||
|
||||
if (foundNoteWithEncryption) break;
|
||||
}
|
||||
|
||||
await Setting.saveAll();
|
||||
|
||||
await NoteResource.addOrphanedResources();
|
||||
|
||||
await ItemChangeUtils.deleteProcessedChanges();
|
||||
} catch (error) {
|
||||
this.logger().error('ResourceService::indexNoteResources:', error);
|
||||
}
|
||||
|
||||
this.isIndexing_ = false;
|
||||
|
||||
this.logger().info('ResourceService::indexNoteResources: Completed');
|
||||
}
|
||||
@ -176,7 +191,7 @@ export default class ResourceService extends BaseService {
|
||||
const iid = shim.setInterval(() => {
|
||||
if (!this.maintenanceCalls_.length) {
|
||||
shim.clearInterval(iid);
|
||||
resolve();
|
||||
resolve(null);
|
||||
}
|
||||
}, 100);
|
||||
});
|
||||
|
@ -6,6 +6,7 @@ export interface AlarmEntity {
|
||||
'id'?: number | null;
|
||||
'note_id'?: string;
|
||||
'trigger_time'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface DeletedItemEntity {
|
||||
'id'?: number | null;
|
||||
@ -13,6 +14,7 @@ export interface DeletedItemEntity {
|
||||
'item_id'?: string;
|
||||
'deleted_time'?: number;
|
||||
'sync_target'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface FolderEntity {
|
||||
'id'?: string | null;
|
||||
@ -25,6 +27,7 @@ export interface FolderEntity {
|
||||
'encryption_applied'?: number;
|
||||
'parent_id'?: string;
|
||||
'is_shared'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface ItemChangeEntity {
|
||||
'id'?: number | null;
|
||||
@ -34,6 +37,7 @@ export interface ItemChangeEntity {
|
||||
'created_time'?: number;
|
||||
'source'?: number;
|
||||
'before_change_item'?: string;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface KeyValueEntity {
|
||||
'id'?: number | null;
|
||||
@ -41,6 +45,7 @@ export interface KeyValueEntity {
|
||||
'value'?: string;
|
||||
'type'?: number;
|
||||
'updated_time'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface MasterKeyEntity {
|
||||
'id'?: string | null;
|
||||
@ -50,12 +55,14 @@ export interface MasterKeyEntity {
|
||||
'encryption_method'?: number;
|
||||
'checksum'?: string;
|
||||
'content'?: string;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface MigrationEntity {
|
||||
'id'?: number | null;
|
||||
'number'?: number;
|
||||
'updated_time'?: number;
|
||||
'created_time'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface NoteResourceEntity {
|
||||
'id'?: number | null;
|
||||
@ -63,6 +70,7 @@ export interface NoteResourceEntity {
|
||||
'resource_id'?: string;
|
||||
'is_associated'?: number;
|
||||
'last_seen_time'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface NoteTagEntity {
|
||||
'id'?: string | null;
|
||||
@ -75,6 +83,7 @@ export interface NoteTagEntity {
|
||||
'encryption_cipher_text'?: string;
|
||||
'encryption_applied'?: number;
|
||||
'is_shared'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface NoteEntity {
|
||||
'id'?: string | null;
|
||||
@ -102,6 +111,7 @@ export interface NoteEntity {
|
||||
'encryption_applied'?: number;
|
||||
'markup_language'?: number;
|
||||
'is_shared'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface NotesNormalizedEntity {
|
||||
'id'?: string;
|
||||
@ -116,12 +126,14 @@ export interface NotesNormalizedEntity {
|
||||
'longitude'?: number;
|
||||
'altitude'?: number;
|
||||
'source_url'?: string;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface ResourceLocalStateEntity {
|
||||
'id'?: number | null;
|
||||
'resource_id'?: string;
|
||||
'fetch_status'?: number;
|
||||
'fetch_error'?: string;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface ResourceEntity {
|
||||
'id'?: string | null;
|
||||
@ -138,12 +150,14 @@ export interface ResourceEntity {
|
||||
'encryption_blob_encrypted'?: number;
|
||||
'size'?: number;
|
||||
'is_shared'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface ResourcesToDownloadEntity {
|
||||
'id'?: number | null;
|
||||
'resource_id'?: string;
|
||||
'updated_time'?: number;
|
||||
'created_time'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface RevisionEntity {
|
||||
'id'?: string | null;
|
||||
@ -158,10 +172,12 @@ export interface RevisionEntity {
|
||||
'encryption_applied'?: number;
|
||||
'updated_time'?: number;
|
||||
'created_time'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface SettingEntity {
|
||||
'key'?: string | null;
|
||||
'value'?: string | null;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface SyncItemEntity {
|
||||
'id'?: number | null;
|
||||
@ -173,6 +189,7 @@ export interface SyncItemEntity {
|
||||
'sync_disabled_reason'?: string;
|
||||
'force_sync'?: number;
|
||||
'item_location'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface TableFieldEntity {
|
||||
'id'?: number | null;
|
||||
@ -180,6 +197,7 @@ export interface TableFieldEntity {
|
||||
'field_name'?: string;
|
||||
'field_type'?: number;
|
||||
'field_default'?: string | null;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface TagEntity {
|
||||
'id'?: string | null;
|
||||
@ -192,6 +210,7 @@ export interface TagEntity {
|
||||
'encryption_applied'?: number;
|
||||
'is_shared'?: number;
|
||||
'parent_id'?: string;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface TagsWithNoteCountEntity {
|
||||
'id'?: string | null;
|
||||
@ -199,8 +218,10 @@ export interface TagsWithNoteCountEntity {
|
||||
'created_time'?: number | null;
|
||||
'updated_time'?: number | null;
|
||||
'note_count'?: any | null;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface VersionEntity {
|
||||
'version'?: number;
|
||||
'table_fields_version'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
|
@ -10,8 +10,8 @@ import { RequestMethod, Request } from '../Api';
|
||||
import markdownUtils from '../../../markdownUtils';
|
||||
import collectionToPaginatedResults from '../utils/collectionToPaginatedResults';
|
||||
|
||||
const { reg } = require('../../../registry.js');
|
||||
const { Database } = require('../../../database.js');
|
||||
import { reg } from '../../../registry';
|
||||
import Database from '../../../database';
|
||||
import Folder from '../../../models/Folder';
|
||||
import Note from '../../../models/Note';
|
||||
import Tag from '../../../models/Tag';
|
||||
|
@ -2,7 +2,7 @@ import shim from '../../../shim';
|
||||
import MigrationHandler from '../MigrationHandler';
|
||||
const { useEffect, useState } = shim.react();
|
||||
import Setting from '../../../models/Setting';
|
||||
const { reg } = require('../../../registry');
|
||||
import { reg } from '../../../registry';
|
||||
|
||||
export interface SyncTargetUpgradeResult {
|
||||
done: boolean;
|
||||
|
@ -21,6 +21,14 @@ let react_: any = null;
|
||||
const shim = {
|
||||
Geolocation: null as any,
|
||||
|
||||
msleep_: (ms: number) => {
|
||||
return new Promise((resolve: Function) => {
|
||||
shim.setTimeout(() => {
|
||||
resolve(null);
|
||||
}, ms);
|
||||
});
|
||||
},
|
||||
|
||||
isNode: () => {
|
||||
if (typeof process === 'undefined') return false;
|
||||
if (shim.isElectron()) return true;
|
||||
@ -140,8 +148,6 @@ const shim = {
|
||||
},
|
||||
|
||||
fetchWithRetry: async function(fetchFn: Function, options: any = null) {
|
||||
const time = require('./time');
|
||||
|
||||
if (!options) options = {};
|
||||
if (!options.timeout) options.timeout = 1000 * 120; // ms
|
||||
if (!('maxRetry' in options)) options.maxRetry = shim.fetchMaxRetry_;
|
||||
@ -155,7 +161,7 @@ const shim = {
|
||||
if (shim.fetchRequestCanBeRetried(error)) {
|
||||
retryCount++;
|
||||
if (retryCount > options.maxRetry) throw error;
|
||||
await time.sleep(retryCount * 3);
|
||||
await shim.msleep_(retryCount * 3000);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
|
@ -368,10 +368,9 @@ const themeCache_: any = {};
|
||||
function themeStyle(themeId: number) {
|
||||
if (!themeId) throw new Error('Theme must be specified');
|
||||
|
||||
const zoomRatio = 1; // Setting.value('style.zoom') / 100;
|
||||
const editorFontSize = Setting.value('style.editor.fontSize');
|
||||
const zoomRatio = 1;
|
||||
|
||||
const cacheKey = [themeId, zoomRatio, editorFontSize].join('-');
|
||||
const cacheKey = themeId;
|
||||
if (themeCache_[cacheKey]) return themeCache_[cacheKey];
|
||||
|
||||
// Font size are not theme specific, but they must be referenced
|
||||
@ -380,8 +379,6 @@ function themeStyle(themeId: number) {
|
||||
const fontSizes: any = {
|
||||
fontSize: Math.round(12 * zoomRatio),
|
||||
toolbarIconSize: 18,
|
||||
editorFontSize: editorFontSize,
|
||||
textAreaLineHeight: Math.round(globalStyle.textAreaLineHeight * editorFontSize / 12),
|
||||
};
|
||||
|
||||
fontSizes.noteViewerFontSize = Math.round(fontSizes.fontSize * 1.25);
|
||||
|
@ -1,66 +1,68 @@
|
||||
import shim from './shim';
|
||||
const moment = require('moment');
|
||||
|
||||
type ConditionHandler = ()=> boolean;
|
||||
|
||||
class Time {
|
||||
|
||||
private dateFormat_: string = 'DD/MM/YYYY';
|
||||
private timeFormat_: string = 'HH:mm';
|
||||
private locale_: string = 'en-us';
|
||||
|
||||
locale() {
|
||||
public locale() {
|
||||
return this.locale_;
|
||||
}
|
||||
|
||||
setLocale(v: string) {
|
||||
public setLocale(v: string) {
|
||||
moment.locale(v);
|
||||
this.locale_ = v;
|
||||
}
|
||||
|
||||
dateFormat() {
|
||||
public dateFormat() {
|
||||
return this.dateFormat_;
|
||||
}
|
||||
|
||||
setDateFormat(v: string) {
|
||||
public setDateFormat(v: string) {
|
||||
this.dateFormat_ = v;
|
||||
}
|
||||
|
||||
timeFormat() {
|
||||
public timeFormat() {
|
||||
return this.timeFormat_;
|
||||
}
|
||||
|
||||
setTimeFormat(v: string) {
|
||||
public setTimeFormat(v: string) {
|
||||
this.timeFormat_ = v;
|
||||
}
|
||||
|
||||
use24HourFormat() {
|
||||
public use24HourFormat() {
|
||||
return this.timeFormat() ? this.timeFormat().includes('HH') : true;
|
||||
}
|
||||
|
||||
formatDateToLocal(date: Date, format: string = null) {
|
||||
public formatDateToLocal(date: Date, format: string = null) {
|
||||
return this.formatMsToLocal(date.getTime(), format);
|
||||
}
|
||||
|
||||
dateTimeFormat() {
|
||||
public dateTimeFormat() {
|
||||
return `${this.dateFormat()} ${this.timeFormat()}`;
|
||||
}
|
||||
|
||||
unix() {
|
||||
public unix() {
|
||||
return Math.floor(Date.now() / 1000);
|
||||
}
|
||||
|
||||
unixMs() {
|
||||
public unixMs() {
|
||||
return Date.now();
|
||||
}
|
||||
|
||||
unixMsToObject(ms: number) {
|
||||
public unixMsToObject(ms: number) {
|
||||
return new Date(ms);
|
||||
}
|
||||
|
||||
unixMsToS(ms: number) {
|
||||
public unixMsToS(ms: number) {
|
||||
return Math.floor(ms / 1000);
|
||||
}
|
||||
|
||||
unixMsToIso(ms: number) {
|
||||
public unixMsToIso(ms: number) {
|
||||
return (
|
||||
`${moment
|
||||
.unix(ms / 1000)
|
||||
@ -69,7 +71,7 @@ class Time {
|
||||
);
|
||||
}
|
||||
|
||||
unixMsToIsoSec(ms: number) {
|
||||
public unixMsToIsoSec(ms: number) {
|
||||
return (
|
||||
`${moment
|
||||
.unix(ms / 1000)
|
||||
@ -78,20 +80,20 @@ class Time {
|
||||
);
|
||||
}
|
||||
|
||||
unixMsToLocalDateTime(ms: number) {
|
||||
public unixMsToLocalDateTime(ms: number) {
|
||||
return moment.unix(ms / 1000).format('DD/MM/YYYY HH:mm');
|
||||
}
|
||||
|
||||
unixMsToLocalHms(ms: number) {
|
||||
public unixMsToLocalHms(ms: number) {
|
||||
return moment.unix(ms / 1000).format('HH:mm:ss');
|
||||
}
|
||||
|
||||
formatMsToLocal(ms: number, format: string = null) {
|
||||
public formatMsToLocal(ms: number, format: string = null) {
|
||||
if (format === null) format = this.dateTimeFormat();
|
||||
return moment(ms).format(format);
|
||||
}
|
||||
|
||||
formatLocalToMs(localDateTime: any, format: string = null) {
|
||||
public formatLocalToMs(localDateTime: any, format: string = null) {
|
||||
if (format === null) format = this.dateTimeFormat();
|
||||
const m = moment(localDateTime, format);
|
||||
if (m.isValid()) return m.toDate().getTime();
|
||||
@ -99,7 +101,7 @@ class Time {
|
||||
}
|
||||
|
||||
// Mostly used as a utility function for the DateTime Electron component
|
||||
anythingToDateTime(o: any, defaultValue: Date = null) {
|
||||
public anythingToDateTime(o: any, defaultValue: Date = null) {
|
||||
if (o && o.toDate) return o.toDate();
|
||||
if (!o) return defaultValue;
|
||||
let m = moment(o, time.dateTimeFormat());
|
||||
@ -108,7 +110,7 @@ class Time {
|
||||
return m.isValid() ? m.toDate() : defaultValue;
|
||||
}
|
||||
|
||||
msleep(ms: number) {
|
||||
public msleep(ms: number) {
|
||||
return new Promise((resolve: Function) => {
|
||||
shim.setTimeout(() => {
|
||||
resolve();
|
||||
@ -116,20 +118,33 @@ class Time {
|
||||
});
|
||||
}
|
||||
|
||||
sleep(seconds: number) {
|
||||
public sleep(seconds: number) {
|
||||
return this.msleep(seconds * 1000);
|
||||
}
|
||||
|
||||
|
||||
goBackInTime(startDate: any, n: number, period: any) {
|
||||
public goBackInTime(startDate: any, n: number, period: any) {
|
||||
// period is a string (eg. "day", "week", "month", "year" ), n is an integer
|
||||
return moment(startDate).startOf(period).subtract(n, period).format('x');
|
||||
}
|
||||
|
||||
goForwardInTime(startDate: any, n: number, period: any) {
|
||||
public goForwardInTime(startDate: any, n: number, period: any) {
|
||||
return moment(startDate).startOf(period).add(n, period).format('x');
|
||||
}
|
||||
|
||||
public async waitTillCondition(condition: ConditionHandler) {
|
||||
if (condition()) return;
|
||||
|
||||
return new Promise(resolve => {
|
||||
const iid = setInterval(() => {
|
||||
if (condition()) {
|
||||
clearInterval(iid);
|
||||
resolve(null);
|
||||
}
|
||||
}, 1000);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const time = new Time();
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { _ } from './locale';
|
||||
import Setting from './models/Setting';
|
||||
const { reg } = require('./registry.js');
|
||||
import { reg } from './registry';
|
||||
|
||||
export default function versionInfo(packageInfo: any) {
|
||||
const p = packageInfo;
|
||||
|
3392
packages/plugin-repo-cli/package-lock.json
generated
3392
packages/plugin-repo-cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
import htmlUtils from './htmlUtils';
|
||||
import linkReplacement from './MdToHtml/linkReplacement';
|
||||
import utils from './utils';
|
||||
import utils, { ItemIdToUrlHandler } from './utils';
|
||||
|
||||
// TODO: fix
|
||||
// import Setting from '@joplin/lib/models/Setting';
|
||||
@ -32,6 +32,7 @@ interface RenderOptions {
|
||||
resources: any;
|
||||
postMessageSyntax: string;
|
||||
enableLongPress: boolean;
|
||||
itemIdToUrl?: ItemIdToUrlHandler;
|
||||
}
|
||||
|
||||
interface RenderResult {
|
||||
@ -39,6 +40,13 @@ interface RenderResult {
|
||||
pluginAssets: any[];
|
||||
}
|
||||
|
||||
// https://github.com/es-shims/String.prototype.trimStart/blob/main/implementation.js
|
||||
function trimStart(s: string): string {
|
||||
// eslint-disable-next-line no-control-regex
|
||||
const startWhitespace = /^[\x09\x0A\x0B\x0C\x0D\x20\xA0\u1680\u180E\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200A\u202F\u205F\u3000\u2028\u2029\uFEFF]*/;
|
||||
return s.replace(startWhitespace, '');
|
||||
}
|
||||
|
||||
export default class HtmlToHtml {
|
||||
|
||||
private resourceBaseUrl_;
|
||||
@ -73,7 +81,7 @@ export default class HtmlToHtml {
|
||||
}
|
||||
|
||||
splitHtml(html: string) {
|
||||
const trimmedHtml = html.trimStart();
|
||||
const trimmedHtml = trimStart(html);
|
||||
if (trimmedHtml.indexOf('<style>') !== 0) return { html: html, css: '' };
|
||||
|
||||
const closingIndex = trimmedHtml.indexOf('</style>');
|
||||
@ -109,7 +117,7 @@ export default class HtmlToHtml {
|
||||
html = htmlUtils.processImageTags(html, (data: any) => {
|
||||
if (!data.src) return null;
|
||||
|
||||
const r = utils.imageReplacement(this.ResourceModel_, data.src, options.resources, this.resourceBaseUrl_);
|
||||
const r = utils.imageReplacement(this.ResourceModel_, data.src, options.resources, this.resourceBaseUrl_, options.itemIdToUrl);
|
||||
if (!r) return null;
|
||||
|
||||
if (typeof r === 'string') {
|
||||
|
@ -8,6 +8,18 @@ export enum MarkupLanguage {
|
||||
Html = 2,
|
||||
}
|
||||
|
||||
export interface RenderResultPluginAsset {
|
||||
name: string;
|
||||
path: string;
|
||||
mime: string;
|
||||
}
|
||||
|
||||
export interface RenderResult {
|
||||
html: string;
|
||||
pluginAssets: RenderResultPluginAsset[];
|
||||
cssStrings: string[];
|
||||
}
|
||||
|
||||
export default class MarkupToHtml {
|
||||
|
||||
static MARKUP_LANGUAGE_MARKDOWN: number = MarkupLanguage.Markdown;
|
||||
@ -75,7 +87,7 @@ export default class MarkupToHtml {
|
||||
if (r.clearCache) r.clearCache();
|
||||
}
|
||||
|
||||
async render(markupLanguage: MarkupLanguage, markup: string, theme: any, options: any) {
|
||||
async render(markupLanguage: MarkupLanguage, markup: string, theme: any, options: any): Promise<RenderResult> {
|
||||
return this.renderer(markupLanguage).render(markup, theme, options);
|
||||
}
|
||||
|
||||
|
@ -3,6 +3,8 @@ import noteStyle from './noteStyle';
|
||||
import { fileExtension } from './pathUtils';
|
||||
import setupLinkify from './MdToHtml/setupLinkify';
|
||||
import validateLinks from './MdToHtml/validateLinks';
|
||||
import { ItemIdToUrlHandler } from './utils';
|
||||
import { RenderResult, RenderResultPluginAsset } from './MarkupToHtml';
|
||||
|
||||
const MarkdownIt = require('markdown-it');
|
||||
const md5 = require('md5');
|
||||
@ -114,18 +116,6 @@ interface PluginContext {
|
||||
currentLinks: Link[];
|
||||
}
|
||||
|
||||
interface RenderResultPluginAsset {
|
||||
name: string;
|
||||
path: string;
|
||||
mime: string;
|
||||
}
|
||||
|
||||
interface RenderResult {
|
||||
html: string;
|
||||
pluginAssets: RenderResultPluginAsset[];
|
||||
cssStrings: string[];
|
||||
}
|
||||
|
||||
export interface RuleOptions {
|
||||
context: PluginContext;
|
||||
theme: any;
|
||||
@ -157,6 +147,8 @@ export interface RuleOptions {
|
||||
audioPlayerEnabled: boolean;
|
||||
videoPlayerEnabled: boolean;
|
||||
pdfViewerEnabled: boolean;
|
||||
|
||||
itemIdToUrl?: ItemIdToUrlHandler;
|
||||
}
|
||||
|
||||
export default class MdToHtml {
|
||||
|
@ -1,4 +1,4 @@
|
||||
import utils from '../utils';
|
||||
import utils, { ItemIdToUrlHandler } from '../utils';
|
||||
const Entities = require('html-entities').AllHtmlEntities;
|
||||
const htmlentities = new Entities().encode;
|
||||
const urlUtils = require('../urlUtils.js');
|
||||
@ -12,6 +12,7 @@ export interface Options {
|
||||
plainResourceRendering?: boolean;
|
||||
postMessageSyntax?: string;
|
||||
enableLongPress?: boolean;
|
||||
itemIdToUrl?: ItemIdToUrlHandler;
|
||||
}
|
||||
|
||||
export interface LinkReplacementResult {
|
||||
@ -65,6 +66,8 @@ export default function(href: string, options: Options = null): LinkReplacementR
|
||||
resourceFullPath: null,
|
||||
};
|
||||
} else {
|
||||
// If we are rendering a note link, we'll get here too, so in that
|
||||
// case "resourceId" would actually be the note ID.
|
||||
href = `joplin://${resourceId}`;
|
||||
if (resourceHrefInfo.hash) href += `#${resourceHrefInfo.hash}`;
|
||||
resourceIdAttr = `data-resource-id='${resourceId}'`;
|
||||
@ -109,7 +112,13 @@ export default function(href: string, options: Options = null): LinkReplacementR
|
||||
if (title) attrHtml.push(`title='${htmlentities(title)}'`);
|
||||
if (mime) attrHtml.push(`type='${htmlentities(mime)}'`);
|
||||
|
||||
if (options.plainResourceRendering || options.linkRenderingType === 2) {
|
||||
let resourceFullPath = resource && options?.ResourceModel?.fullPath ? options.ResourceModel.fullPath(resource) : null;
|
||||
|
||||
if (resourceId && options.itemIdToUrl) {
|
||||
const url = options.itemIdToUrl(resourceId);
|
||||
attrHtml.push(`href='${htmlentities(url)}'`);
|
||||
resourceFullPath = url;
|
||||
} else if (options.plainResourceRendering || options.linkRenderingType === 2) {
|
||||
icon = '';
|
||||
attrHtml.push(`href='${htmlentities(href)}'`);
|
||||
} else {
|
||||
@ -121,6 +130,6 @@ export default function(href: string, options: Options = null): LinkReplacementR
|
||||
html: `<a ${attrHtml.join(' ')}>${icon}`,
|
||||
resourceReady: true,
|
||||
resource,
|
||||
resourceFullPath: resource && options?.ResourceModel?.fullPath ? options.ResourceModel.fullPath(resource) : null,
|
||||
resourceFullPath: resourceFullPath,
|
||||
};
|
||||
}
|
||||
|
@ -9,12 +9,17 @@ export interface Options {
|
||||
pdfViewerEnabled: boolean;
|
||||
}
|
||||
|
||||
function resourceUrl(resourceFullPath: string): string {
|
||||
if (resourceFullPath.indexOf('http://') === 0 || resourceFullPath.indexOf('https://')) return resourceFullPath;
|
||||
return `file://${toForwardSlashes(resourceFullPath)}`;
|
||||
}
|
||||
|
||||
export default function(link: Link, options: Options) {
|
||||
const resource = link.resource;
|
||||
|
||||
if (!link.resourceReady || !resource || !resource.mime) return '';
|
||||
|
||||
const escapedResourcePath = htmlentities(`file://${toForwardSlashes(link.resourceFullPath)}`);
|
||||
const escapedResourcePath = htmlentities(resourceUrl(link.resourceFullPath));
|
||||
const escapedMime = htmlentities(resource.mime);
|
||||
|
||||
if (options.videoPlayerEnabled && resource.mime.indexOf('video/') === 0) {
|
||||
|
@ -3,7 +3,7 @@ import htmlUtils from '../../htmlUtils';
|
||||
import utils from '../../utils';
|
||||
|
||||
function renderImageHtml(before: string, src: string, after: string, ruleOptions: RuleOptions) {
|
||||
const r = utils.imageReplacement(ruleOptions.ResourceModel, src, ruleOptions.resources, ruleOptions.resourceBaseUrl);
|
||||
const r = utils.imageReplacement(ruleOptions.ResourceModel, src, ruleOptions.resources, ruleOptions.resourceBaseUrl, ruleOptions.itemIdToUrl);
|
||||
if (typeof r === 'string') return r;
|
||||
if (r) return `<img ${before} ${htmlUtils.attributesHtml(r)} ${after}/>`;
|
||||
return `[Image: ${src}]`;
|
||||
|
@ -14,7 +14,7 @@ function plugin(markdownIt: any, ruleOptions: RuleOptions) {
|
||||
|
||||
if (!Resource.isResourceUrl(src) || ruleOptions.plainResourceRendering) return defaultRender(tokens, idx, options, env, self);
|
||||
|
||||
const r = utils.imageReplacement(ruleOptions.ResourceModel, src, ruleOptions.resources, ruleOptions.resourceBaseUrl);
|
||||
const r = utils.imageReplacement(ruleOptions.ResourceModel, src, ruleOptions.resources, ruleOptions.resourceBaseUrl, ruleOptions.itemIdToUrl);
|
||||
if (typeof r === 'string') return r;
|
||||
if (r) {
|
||||
let js = '';
|
||||
|
@ -20,6 +20,7 @@ function plugin(markdownIt: any, ruleOptions: RuleOptions) {
|
||||
plainResourceRendering: ruleOptions.plainResourceRendering,
|
||||
postMessageSyntax: ruleOptions.postMessageSyntax,
|
||||
enableLongPress: ruleOptions.enableLongPress,
|
||||
itemIdToUrl: ruleOptions.itemIdToUrl,
|
||||
});
|
||||
|
||||
ruleOptions.context.currentLinks.push({
|
||||
|
@ -57,7 +57,7 @@ function loadPluginAssets(assets) {
|
||||
|
||||
Whenever updating a Markdown-it plugin, such as Katex or Mermaid, make sure to run `npm run buildAssets`, which will compile the CSS and JS for use in the Joplin applications.
|
||||
|
||||
### Adding asset files
|
||||
### Adding asset files
|
||||
|
||||
A plugin (or rule) can have any number of assets, such as CSS or font files, associated with it. To add an asset to a plugin, follow these steps:
|
||||
|
||||
|
@ -126,10 +126,15 @@ export default function(theme: any) {
|
||||
margin-top: 0.2em;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
a[data-resource-id] {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.resource-icon {
|
||||
display: inline-block;
|
||||
position: relative;
|
||||
top: .5em;
|
||||
text-decoration: none;
|
||||
width: 1.2em;
|
||||
height: 1.4em;
|
||||
|
@ -122,7 +122,9 @@ utils.resourceStatus = function(ResourceModel: any, resourceInfo: any) {
|
||||
return resourceStatus;
|
||||
};
|
||||
|
||||
utils.imageReplacement = function(ResourceModel: any, src: string, resources: any, resourceBaseUrl: string) {
|
||||
export type ItemIdToUrlHandler = (resource: any)=> string;
|
||||
|
||||
utils.imageReplacement = function(ResourceModel: any, src: string, resources: any, resourceBaseUrl: string, itemIdToUrl: ItemIdToUrlHandler = null) {
|
||||
if (!ResourceModel || !resources) return null;
|
||||
|
||||
if (!ResourceModel.isResourceUrl(src)) return null;
|
||||
@ -136,12 +138,29 @@ utils.imageReplacement = function(ResourceModel: any, src: string, resources: an
|
||||
const icon = utils.resourceStatusImage(resourceStatus);
|
||||
return `<div class="not-loaded-resource resource-status-${resourceStatus}" data-resource-id="${resourceId}">` + `<img src="data:image/svg+xml;utf8,${htmlentities(icon)}"/>` + '</div>';
|
||||
}
|
||||
|
||||
const mime = resource.mime ? resource.mime.toLowerCase() : '';
|
||||
if (ResourceModel.isSupportedImageMimeType(mime)) {
|
||||
let newSrc = `./${ResourceModel.filename(resource)}`;
|
||||
if (resourceBaseUrl) newSrc = resourceBaseUrl + newSrc;
|
||||
newSrc += `?t=${resource.updated_time}`;
|
||||
let newSrc = '';
|
||||
|
||||
if (itemIdToUrl) {
|
||||
newSrc = itemIdToUrl(resource.id);
|
||||
} else {
|
||||
const temp = [];
|
||||
|
||||
if (resourceBaseUrl) {
|
||||
temp.push(resourceBaseUrl);
|
||||
} else {
|
||||
temp.push('./');
|
||||
}
|
||||
|
||||
temp.push(ResourceModel.filename(resource));
|
||||
temp.push(`?t=${resource.updated_time}`);
|
||||
|
||||
newSrc = temp.join('');
|
||||
}
|
||||
|
||||
// let newSrc = `./${ResourceModel.filename(resource)}`;
|
||||
// newSrc += `?t=${resource.updated_time}`;
|
||||
return {
|
||||
'data-resource-id': resource.id,
|
||||
src: newSrc,
|
||||
|
@ -1,4 +1,4 @@
|
||||
{
|
||||
"verbose": true,
|
||||
"watch": ["dist/"]
|
||||
"watch": ["dist/", "../renderer", "../lib"]
|
||||
}
|
@ -5,7 +5,7 @@
|
||||
"scripts": {
|
||||
"start-dev": "nodemon --config nodemon.json dist/app.js --env dev",
|
||||
"start": "node dist/app.js",
|
||||
"generate-types": "rm -f db-buildTypes.sqlite && npm run start -- --migrate-db --env buildTypes && node dist/tools/generate-types.js && rm -f db-buildTypes.sqlite",
|
||||
"generateTypes": "rm -f db-buildTypes.sqlite && npm run start -- --migrate-db --env buildTypes && node dist/tools/generateTypes.js && rm -f db-buildTypes.sqlite",
|
||||
"tsc": "tsc --project tsconfig.json",
|
||||
"test": "jest",
|
||||
"test-ci": "npm run test",
|
||||
@ -14,6 +14,7 @@
|
||||
"dependencies": {
|
||||
"@fortawesome/fontawesome-free": "^5.15.1",
|
||||
"@joplin/lib": "^1.0.9",
|
||||
"@joplin/renderer": "^1.7.4",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"bulma": "^0.9.1",
|
||||
"bulma-prefers-dark": "^0.1.0-beta.0",
|
||||
|
@ -18,8 +18,19 @@ input.form-control {
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.navbar .logo-text {
|
||||
font-size: 2.2em;
|
||||
font-weight: bold;
|
||||
margin-left: 0.5em;
|
||||
}
|
||||
|
||||
/*
|
||||
.navbar .logo {
|
||||
height: 50px;
|
||||
} */
|
||||
|
||||
.navbar .navbar-item img {
|
||||
max-height: 3em;
|
||||
}
|
||||
|
||||
.main {
|
||||
|
@ -8,12 +8,12 @@ import Logger, { LoggerWrapper, TargetType } from '@joplin/lib/Logger';
|
||||
import config, { initConfig, runningInDocker, EnvVariables } from './config';
|
||||
import { createDb, dropDb } from './tools/dbTools';
|
||||
import { dropTables, connectDb, disconnectDb, migrateDb, waitForConnection, sqliteFilePath } from './db';
|
||||
import modelFactory from './models/factory';
|
||||
import { AppContext, Env } from './utils/types';
|
||||
import FsDriverNode from '@joplin/lib/fs-driver-node';
|
||||
import routeHandler from './middleware/routeHandler';
|
||||
import notificationHandler from './middleware/notificationHandler';
|
||||
import ownerHandler from './middleware/ownerHandler';
|
||||
import setupAppContext from './utils/setupAppContext';
|
||||
|
||||
const nodeEnvFile = require('node-env-file');
|
||||
const { shimInit } = require('@joplin/lib/shim-init-node.js');
|
||||
@ -77,6 +77,8 @@ async function main() {
|
||||
});
|
||||
|
||||
await fs.mkdirp(config().logDir);
|
||||
await fs.mkdirp(config().tempDir);
|
||||
|
||||
Logger.fsDriver_ = new FsDriverNode();
|
||||
const globalLogger = new Logger();
|
||||
// globalLogger.addTarget(TargetType.File, { path: `${config().logDir}/app.txt` });
|
||||
@ -114,8 +116,6 @@ async function main() {
|
||||
appLogger().info('DB Config:', markPasswords(config().database));
|
||||
if (config().database.client === 'sqlite3') appLogger().info('DB file:', sqliteFilePath(config().database.name));
|
||||
|
||||
const appContext = app.context as AppContext;
|
||||
|
||||
appLogger().info('Trying to connect to database...');
|
||||
const connectionCheck = await waitForConnection(config().database);
|
||||
|
||||
@ -123,10 +123,9 @@ async function main() {
|
||||
delete connectionCheckLogInfo.connection;
|
||||
|
||||
appLogger().info('Connection check:', connectionCheckLogInfo);
|
||||
appContext.env = env;
|
||||
appContext.db = connectionCheck.connection;
|
||||
appContext.models = modelFactory(appContext.db, config().baseUrl);
|
||||
appContext.appLogger = appLogger;
|
||||
const appContext = app.context as AppContext;
|
||||
|
||||
await setupAppContext(appContext, env, connectionCheck.connection, appLogger);
|
||||
|
||||
appLogger().info('Migrating database...');
|
||||
await migrateDb(appContext.db);
|
||||
|
267
packages/server/src/apps/joplin/Application.ts
Normal file
267
packages/server/src/apps/joplin/Application.ts
Normal file
@ -0,0 +1,267 @@
|
||||
import JoplinDatabase from '@joplin/lib/JoplinDatabase';
|
||||
import Logger from '@joplin/lib/Logger';
|
||||
import BaseModel, { ModelType } from '@joplin/lib/BaseModel';
|
||||
import BaseItem from '@joplin/lib/models/BaseItem';
|
||||
import Note from '@joplin/lib/models/Note';
|
||||
import { File, Share, Uuid } from '../../db';
|
||||
import { NoteEntity } from '@joplin/lib/services/database/types';
|
||||
import { MarkupToHtml } from '@joplin/renderer';
|
||||
import Setting from '@joplin/lib/models/Setting';
|
||||
import Resource from '@joplin/lib/models/Resource';
|
||||
import FileModel from '../../models/FileModel';
|
||||
import { ErrorNotFound } from '../../utils/errors';
|
||||
import BaseApplication from '../../services/BaseApplication';
|
||||
import { formatDateTime } from '../../utils/time';
|
||||
const { DatabaseDriverNode } = require('@joplin/lib/database-driver-node.js');
|
||||
const { themeStyle } = require('@joplin/lib/theme');
|
||||
|
||||
const logger = Logger.create('JoplinApp');
|
||||
|
||||
export interface FileViewerResponse {
|
||||
body: any;
|
||||
mime: string;
|
||||
size: number;
|
||||
}
|
||||
|
||||
interface ResourceInfo {
|
||||
localState: any;
|
||||
item: any;
|
||||
}
|
||||
|
||||
interface LinkedItemInfo {
|
||||
item: any;
|
||||
file: File;
|
||||
}
|
||||
|
||||
type LinkedItemInfos = Record<Uuid, LinkedItemInfo>;
|
||||
|
||||
type ResourceInfos = Record<Uuid, ResourceInfo>;
|
||||
|
||||
export default class Application extends BaseApplication {
|
||||
|
||||
// Although we don't use the database to store data, we still need to setup
|
||||
// so that its schema can be accessed. This is needed for example by
|
||||
// Note.unserialize to know what fields are valid for a note, and to format
|
||||
// the field values correctly.
|
||||
private db_: JoplinDatabase;
|
||||
|
||||
private pluginAssetRootDir_: string;
|
||||
|
||||
public async initialize() {
|
||||
this.mustache.prefersDarkEnabled = false;
|
||||
this.pluginAssetRootDir_ = require('path').resolve(__dirname, '../../..', 'node_modules/@joplin/renderer/assets');
|
||||
|
||||
const filePath = `${this.config.tempDir}/joplin.sqlite`;
|
||||
|
||||
this.db_ = new JoplinDatabase(new DatabaseDriverNode());
|
||||
this.db_.setLogger(logger as Logger);
|
||||
await this.db_.open({ name: filePath });
|
||||
|
||||
BaseModel.setDb(this.db_);
|
||||
|
||||
// Only load the classes that will be needed to render the notes and
|
||||
// resources.
|
||||
BaseItem.loadClass('Note', Note);
|
||||
BaseItem.loadClass('Resource', Resource);
|
||||
}
|
||||
|
||||
public async localFileFromUrl(url: string): Promise<string> {
|
||||
const pluginAssetPrefix = 'apps/joplin/pluginAssets/';
|
||||
|
||||
if (url.indexOf(pluginAssetPrefix) === 0) {
|
||||
return `${this.pluginAssetRootDir_}/${url.substr(pluginAssetPrefix.length)}`;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private itemIdFilename(itemId: string): string {
|
||||
return `${itemId}.md`;
|
||||
}
|
||||
|
||||
private async itemMetadataFile(parentId: Uuid, itemId: string): Promise<File> {
|
||||
const file = await this.models.file().fileByName(parentId, this.itemIdFilename(itemId), { skipPermissionCheck: true });
|
||||
return this.models.file().loadWithContent(file.id, { skipPermissionCheck: true });
|
||||
}
|
||||
|
||||
private async unserializeItem(file: File): Promise<any> {
|
||||
const content = file.content.toString();
|
||||
return BaseItem.unserialize(content);
|
||||
}
|
||||
|
||||
private async resourceInfos(linkedItemInfos: LinkedItemInfos): Promise<ResourceInfos> {
|
||||
const output: Record<string, any> = {};
|
||||
|
||||
for (const itemId of Object.keys(linkedItemInfos)) {
|
||||
const info = linkedItemInfos[itemId];
|
||||
|
||||
if (info.item.type_ !== ModelType.Resource) continue;
|
||||
|
||||
output[info.item.id] = {
|
||||
item: info.item,
|
||||
localState: {
|
||||
fetch_status: Resource.FETCH_STATUS_DONE,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
private async noteLinkedItemInfos(noteFileParentId: string, note: NoteEntity): Promise<LinkedItemInfos> {
|
||||
const itemIds = await Note.linkedItemIds(note.body);
|
||||
const output: LinkedItemInfos = {};
|
||||
|
||||
for (const itemId of itemIds) {
|
||||
const itemFile = await this.itemMetadataFile(noteFileParentId, itemId);
|
||||
output[itemId] = {
|
||||
item: await this.unserializeItem(itemFile),
|
||||
file: itemFile,
|
||||
};
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
private async resourceDir(fileModel: FileModel, parentId: Uuid): Promise<File> {
|
||||
const parent = await fileModel.load(parentId);
|
||||
const parentFullPath = await fileModel.itemFullPath(parent);
|
||||
const dirPath = fileModel.resolve(parentFullPath, '.resource');
|
||||
return fileModel.pathToFile(dirPath);
|
||||
}
|
||||
|
||||
private async itemFile(fileModel: FileModel, parentId: Uuid, itemType: ModelType, itemId: string): Promise<File> {
|
||||
let output: File = null;
|
||||
|
||||
if (itemType === ModelType.Resource) {
|
||||
const resourceDir = await this.resourceDir(fileModel, parentId);
|
||||
output = await fileModel.fileByName(resourceDir.id, itemId);
|
||||
} else if (itemType === ModelType.Note) {
|
||||
output = await fileModel.fileByName(parentId, this.itemIdFilename(itemId));
|
||||
} else {
|
||||
throw new Error(`Unsupported type: ${itemType}`);
|
||||
}
|
||||
|
||||
return fileModel.loadWithContent(output.id);
|
||||
}
|
||||
|
||||
private async renderResource(file: File): Promise<FileViewerResponse> {
|
||||
return {
|
||||
body: file.content,
|
||||
mime: file.mime_type,
|
||||
size: file.size,
|
||||
};
|
||||
}
|
||||
|
||||
private async renderNote(share: Share, note: NoteEntity, resourceInfos: ResourceInfos, linkedItemInfos: LinkedItemInfos): Promise<FileViewerResponse> {
|
||||
const markupToHtml = new MarkupToHtml({
|
||||
ResourceModel: Resource,
|
||||
});
|
||||
|
||||
const renderOptions: any = {
|
||||
resources: resourceInfos,
|
||||
|
||||
itemIdToUrl: (itemId: Uuid) => {
|
||||
const item = linkedItemInfos[itemId].item;
|
||||
if (!item) throw new Error(`No such item in this note: ${itemId}`);
|
||||
|
||||
if (item.type_ === ModelType.Note) {
|
||||
return '#';
|
||||
} else if (item.type_ === ModelType.Resource) {
|
||||
return `${this.models.share().shareUrl(share.id)}?resource_id=${item.id}&t=${item.updated_time}`;
|
||||
} else {
|
||||
throw new Error(`Unsupported item type: ${item.type_}`);
|
||||
}
|
||||
},
|
||||
|
||||
// Switch-off the media players because there's no option to toggle
|
||||
// them on and off.
|
||||
audioPlayerEnabled: false,
|
||||
videoPlayerEnabled: false,
|
||||
pdfViewerEnabled: false,
|
||||
};
|
||||
|
||||
const result = await markupToHtml.render(note.markup_language, note.body, themeStyle(Setting.THEME_LIGHT), renderOptions);
|
||||
|
||||
const bodyHtml = await this.mustache.renderView({
|
||||
cssFiles: ['note'],
|
||||
jsFiles: ['note'],
|
||||
name: 'note',
|
||||
path: 'note',
|
||||
content: {
|
||||
note: {
|
||||
...note,
|
||||
bodyHtml: result.html,
|
||||
updatedDateTime: formatDateTime(note.updated_time),
|
||||
},
|
||||
cssStrings: result.cssStrings.join('\n'),
|
||||
assetsJs: `
|
||||
const joplinNoteViewer = {
|
||||
pluginAssets: ${JSON.stringify(result.pluginAssets)},
|
||||
appBaseUrl: ${JSON.stringify(this.appBaseUrl)},
|
||||
};
|
||||
`,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
body: bodyHtml,
|
||||
mime: 'text/html',
|
||||
size: bodyHtml.length,
|
||||
};
|
||||
}
|
||||
|
||||
public async renderFile(file: File, share: Share, query: Record<string, any>): Promise<FileViewerResponse> {
|
||||
const fileModel = this.models.file({ userId: file.owner_id });
|
||||
|
||||
const rootNote: NoteEntity = await this.unserializeItem(file);
|
||||
const linkedItemInfos = await this.noteLinkedItemInfos(file.parent_id, rootNote);
|
||||
const resourceInfos = await this.resourceInfos(linkedItemInfos);
|
||||
|
||||
const fileToRender = {
|
||||
file: file,
|
||||
itemId: rootNote.id,
|
||||
};
|
||||
|
||||
if (query.resource_id) {
|
||||
fileToRender.file = await this.itemFile(fileModel, file.parent_id, ModelType.Resource, query.resource_id);
|
||||
fileToRender.itemId = query.resource_id;
|
||||
}
|
||||
|
||||
// No longer supported - need to decide what to do about note links.
|
||||
|
||||
// if (query.note_id) {
|
||||
// fileToRender.file = await this.itemFile(fileModel, file.parent_id, ModelType.Note, query.note_id);
|
||||
// fileToRender.itemId = query.note_id;
|
||||
// }
|
||||
|
||||
if (fileToRender.file !== file && !linkedItemInfos[fileToRender.itemId]) {
|
||||
throw new ErrorNotFound(`Item "${fileToRender.itemId}" does not belong to this note`);
|
||||
}
|
||||
|
||||
const itemToRender = fileToRender.file === file ? rootNote : linkedItemInfos[fileToRender.itemId].item;
|
||||
const itemType: ModelType = itemToRender.type_;
|
||||
|
||||
if (itemType === ModelType.Resource) {
|
||||
return this.renderResource(fileToRender.file);
|
||||
} else if (itemType === ModelType.Note) {
|
||||
return this.renderNote(share, itemToRender, resourceInfos, linkedItemInfos);
|
||||
} else {
|
||||
throw new Error(`Cannot render item with type "${itemType}"`);
|
||||
}
|
||||
}
|
||||
|
||||
public async isItemFile(file: File): Promise<boolean> {
|
||||
if (file.mime_type !== 'text/markdown') return false;
|
||||
|
||||
try {
|
||||
await this.unserializeItem(file);
|
||||
} catch (error) {
|
||||
// No need to log - it means it's not a note file
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
28
packages/server/src/apps/joplin/css/note.css
Normal file
28
packages/server/src/apps/joplin/css/note.css
Normal file
@ -0,0 +1,28 @@
|
||||
.main {
|
||||
padding-left: 0;
|
||||
padding-right: 0;
|
||||
}
|
||||
|
||||
.navbar {
|
||||
padding: .5em;
|
||||
border-bottom: 1px solid #dddddd;
|
||||
box-shadow: 0px 2px 8px #cccccc;
|
||||
}
|
||||
|
||||
.page-note .note-main {
|
||||
margin-top: 2em;
|
||||
max-width: 840px;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
.page-note .last-updated {
|
||||
font-size: 1.1em;
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.page-note h1.title {
|
||||
font-size: 2.2em;
|
||||
margin-top: 0.4em;
|
||||
border-bottom: none;
|
||||
}
|
44
packages/server/src/apps/joplin/js/note.js
Normal file
44
packages/server/src/apps/joplin/js/note.js
Normal file
@ -0,0 +1,44 @@
|
||||
/* global joplinNoteViewer */
|
||||
|
||||
function addPluginAssets(appBaseUrl, assets) {
|
||||
if (!assets) return;
|
||||
|
||||
const pluginAssetsContainer = document.getElementById('joplin-container-pluginAssetsContainer');
|
||||
|
||||
for (let i = 0; i < assets.length; i++) {
|
||||
const asset = assets[i];
|
||||
|
||||
if (asset.mime === 'application/javascript') {
|
||||
const script = document.createElement('script');
|
||||
script.src = `${appBaseUrl}/${asset.path}`;
|
||||
pluginAssetsContainer.appendChild(script);
|
||||
} else if (asset.mime === 'text/css') {
|
||||
const link = document.createElement('link');
|
||||
link.rel = 'stylesheet';
|
||||
link.href = `${appBaseUrl}/${asset.path}`;
|
||||
pluginAssetsContainer.appendChild(link);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function docReady(fn) {
|
||||
if (document.readyState === 'complete' || document.readyState === 'interactive') {
|
||||
setTimeout(fn, 1);
|
||||
} else {
|
||||
document.addEventListener('DOMContentLoaded', fn);
|
||||
}
|
||||
}
|
||||
|
||||
docReady(() => {
|
||||
addPluginAssets(joplinNoteViewer.appBaseUrl, joplinNoteViewer.pluginAssets);
|
||||
|
||||
document.addEventListener('click', event => {
|
||||
const element = event.target;
|
||||
|
||||
// Detects if it's a note link and, if so, display a message
|
||||
if (element && element.getAttribute('href') === '#' && element.getAttribute('data-resource-id')) {
|
||||
event.preventDefault();
|
||||
alert('This note has not been shared');
|
||||
}
|
||||
});
|
||||
});
|
19
packages/server/src/apps/joplin/views/note.mustache
Normal file
19
packages/server/src/apps/joplin/views/note.mustache
Normal file
@ -0,0 +1,19 @@
|
||||
<script>{{{assetsJs}}}</script>
|
||||
|
||||
<style>{{{cssStrings}}}</style>
|
||||
|
||||
<div id="joplin-container-pluginAssetsContainer"></div>
|
||||
|
||||
<nav class="navbar" role="navigation" aria-label="main navigation">
|
||||
<div class="navbar-brand logo-container">
|
||||
<a class="navbar-item" href="{{{global.baseUrl}}}/home">
|
||||
<img class="logo" src="{{{global.baseUrl}}}/images/Logo.png"/><span class="logo-text">Joplin</span>
|
||||
</a>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="note-main">
|
||||
<p class="last-updated">Last updated: {{note.updatedDateTime}}</p>
|
||||
<h1 class="title">{{note.title}}</h1>
|
||||
{{{note.bodyHtml}}}
|
||||
</div>
|
@ -78,6 +78,7 @@ export function initConfig(env: EnvVariables) {
|
||||
rootDir: rootDir,
|
||||
viewDir: viewDir,
|
||||
layoutDir: `${viewDir}/layouts`,
|
||||
tempDir: `${rootDir}/temp`,
|
||||
logDir: `${rootDir}/logs`,
|
||||
database: databaseConfigFromEnv(runningInDocker_, env),
|
||||
port: appPort,
|
||||
|
@ -211,6 +211,11 @@ export enum ChangeType {
|
||||
Delete = 3,
|
||||
}
|
||||
|
||||
export enum ShareType {
|
||||
Link = 1, // When a note is shared via a public link
|
||||
App = 2, // When a note is shared with another user on the same server instance
|
||||
}
|
||||
|
||||
export interface WithDates {
|
||||
updated_time?: number;
|
||||
created_time?: number;
|
||||
@ -289,6 +294,12 @@ export interface Notification extends WithDates, WithUuid {
|
||||
canBeDismissed?: number;
|
||||
}
|
||||
|
||||
export interface Share extends WithDates, WithUuid {
|
||||
owner_id?: Uuid;
|
||||
file_id?: Uuid;
|
||||
type?: ShareType;
|
||||
}
|
||||
|
||||
export const databaseSchema: DatabaseTables = {
|
||||
users: {
|
||||
id: { type: 'string' },
|
||||
@ -359,5 +370,13 @@ export const databaseSchema: DatabaseTables = {
|
||||
updated_time: { type: 'string' },
|
||||
created_time: { type: 'string' },
|
||||
},
|
||||
shares: {
|
||||
id: { type: 'string' },
|
||||
owner_id: { type: 'string' },
|
||||
file_id: { type: 'string' },
|
||||
type: { type: 'number' },
|
||||
updated_time: { type: 'string' },
|
||||
created_time: { type: 'string' },
|
||||
},
|
||||
};
|
||||
// AUTO-GENERATED-TYPES
|
||||
|
@ -2,7 +2,16 @@ import routes from '../routes/routes';
|
||||
import { ErrorForbidden, ErrorNotFound } from '../utils/errors';
|
||||
import { routeResponseFormat, findMatchingRoute, Response, RouteResponseFormat, MatchedRoute } from '../utils/routeUtils';
|
||||
import { AppContext, Env, HttpMethod } from '../utils/types';
|
||||
import mustacheService, { isView, View } from '../services/MustacheService';
|
||||
import MustacheService, { isView, View } from '../services/MustacheService';
|
||||
import config from '../config';
|
||||
|
||||
let mustache_: MustacheService = null;
|
||||
function mustache(): MustacheService {
|
||||
if (!mustache_) {
|
||||
mustache_ = new MustacheService(config().viewDir, config().baseUrl);
|
||||
}
|
||||
return mustache_;
|
||||
}
|
||||
|
||||
export default async function(ctx: AppContext) {
|
||||
ctx.appLogger().info(`${ctx.request.method} ${ctx.path}`);
|
||||
@ -28,7 +37,7 @@ export default async function(ctx: AppContext) {
|
||||
ctx.response = responseObject.response;
|
||||
} else if (isView(responseObject)) {
|
||||
ctx.response.status = 200;
|
||||
ctx.response.body = await mustacheService.renderView(responseObject, {
|
||||
ctx.response.body = await mustache().renderView(responseObject, {
|
||||
notifications: ctx.notifications || [],
|
||||
hasNotifications: !!ctx.notifications && !!ctx.notifications.length,
|
||||
owner: ctx.owner,
|
||||
@ -61,7 +70,7 @@ export default async function(ctx: AppContext) {
|
||||
stack: ctx.env === Env.Dev ? error.stack : '',
|
||||
},
|
||||
};
|
||||
ctx.response.body = await mustacheService.renderView(view);
|
||||
ctx.response.body = await mustache().renderView(view);
|
||||
} else { // JSON
|
||||
ctx.response.set('Content-Type', 'application/json');
|
||||
const r: any = { error: error.message };
|
||||
|
17
packages/server/src/migrations/20203012152842_shares.ts
Normal file
17
packages/server/src/migrations/20203012152842_shares.ts
Normal file
@ -0,0 +1,17 @@
|
||||
import * as Knex from 'knex';
|
||||
import { DbConnection } from '../db';
|
||||
|
||||
export async function up(db: DbConnection): Promise<any> {
|
||||
await db.schema.createTable('shares', function(table: Knex.CreateTableBuilder) {
|
||||
table.string('id', 32).unique().primary().notNullable();
|
||||
table.string('owner_id', 32).notNullable();
|
||||
table.string('file_id', 32).notNullable();
|
||||
table.integer('type').notNullable();
|
||||
table.bigInteger('updated_time').notNullable();
|
||||
table.bigInteger('created_time').notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(db: DbConnection): Promise<any> {
|
||||
await db.schema.dropTable('shares');
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
import BaseModel from './BaseModel';
|
||||
import { ApiClient } from '../db';
|
||||
|
||||
export default class ApiClientModel extends BaseModel {
|
||||
export default class ApiClientModel extends BaseModel<ApiClient> {
|
||||
|
||||
protected get tableName(): string {
|
||||
return 'api_clients';
|
||||
|
@ -1,12 +1,9 @@
|
||||
import { WithDates, WithUuid, File, User, Session, Permission, databaseSchema, ApiClient, DbConnection, Change, ItemType, ChangeType, Notification } from '../db';
|
||||
import { WithDates, WithUuid, databaseSchema, DbConnection, ItemType, ChangeType } from '../db';
|
||||
import TransactionHandler from '../utils/TransactionHandler';
|
||||
import uuidgen from '../utils/uuidgen';
|
||||
import { ErrorUnprocessableEntity, ErrorBadRequest } from '../utils/errors';
|
||||
import { Models } from './factory';
|
||||
|
||||
export type AnyItemType = File | User | Session | Permission | ApiClient | Change | Notification;
|
||||
export type AnyItemTypes = File[] | User[] | Session[] | Permission[] | ApiClient[] | Change[] | Notification[];
|
||||
|
||||
export interface ModelOptions {
|
||||
userId?: string;
|
||||
}
|
||||
@ -27,7 +24,7 @@ export interface ValidateOptions {
|
||||
rules?: any;
|
||||
}
|
||||
|
||||
export default abstract class BaseModel {
|
||||
export default abstract class BaseModel<T> {
|
||||
|
||||
private options_: ModelOptions = null;
|
||||
private defaultFields_: string[] = [];
|
||||
@ -156,52 +153,61 @@ export default abstract class BaseModel {
|
||||
await this.transactionHandler_.commit(txIndex);
|
||||
}
|
||||
|
||||
public async all(): Promise<AnyItemTypes> {
|
||||
public async all(): Promise<T[]> {
|
||||
return this.db(this.tableName).select(...this.defaultFields);
|
||||
}
|
||||
|
||||
public fromApiInput(object: AnyItemType): AnyItemType {
|
||||
return object;
|
||||
public fromApiInput(object: T): T {
|
||||
const blackList = ['updated_time', 'created_time', 'owner_id'];
|
||||
const whiteList = Object.keys(databaseSchema[this.tableName]);
|
||||
const output: any = { ...object };
|
||||
|
||||
for (const f in object) {
|
||||
if (blackList.includes(f)) delete output[f];
|
||||
if (!whiteList.includes(f)) delete output[f];
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
public toApiOutput(object: any): any {
|
||||
return { ...object };
|
||||
}
|
||||
|
||||
protected async validate(object: AnyItemType, options: ValidateOptions = {}): Promise<AnyItemType> {
|
||||
protected async validate(object: T, options: ValidateOptions = {}): Promise<T> {
|
||||
if (!options.isNew && !(object as WithUuid).id) throw new ErrorUnprocessableEntity('id is missing');
|
||||
return object;
|
||||
}
|
||||
|
||||
protected async isNew(object: AnyItemType, options: SaveOptions): Promise<boolean> {
|
||||
protected async isNew(object: T, options: SaveOptions): Promise<boolean> {
|
||||
if (options.isNew === false) return false;
|
||||
if (options.isNew === true) return true;
|
||||
return !(object as WithUuid).id;
|
||||
}
|
||||
|
||||
private async handleChangeTracking(options: SaveOptions, item: AnyItemType, changeType: ChangeType): Promise<void> {
|
||||
private async handleChangeTracking(options: SaveOptions, item: T, changeType: ChangeType): Promise<void> {
|
||||
const trackChanges = this.trackChanges && options.trackChanges !== false;
|
||||
if (!trackChanges) return;
|
||||
|
||||
let parentId = null;
|
||||
if (this.hasParentId) {
|
||||
if (!('parent_id' in item)) {
|
||||
const temp: any = await this.db(this.tableName).select(['parent_id']).where('id', '=', item.id).first();
|
||||
const temp: any = await this.db(this.tableName).select(['parent_id']).where('id', '=', (item as WithUuid).id).first();
|
||||
parentId = temp.parent_id;
|
||||
} else {
|
||||
parentId = item.parent_id;
|
||||
parentId = (item as any).parent_id;
|
||||
}
|
||||
}
|
||||
|
||||
// Sanity check - shouldn't happen
|
||||
// Parent ID can be an empty string for root folders, but it shouldn't be null or undefined
|
||||
if (this.hasParentId && !parentId && parentId !== '') throw new Error(`Could not find parent ID for item: ${item.id}`);
|
||||
if (this.hasParentId && !parentId && parentId !== '') throw new Error(`Could not find parent ID for item: ${(item as WithUuid).id}`);
|
||||
|
||||
const changeModel = this.models().change({ userId: this.userId });
|
||||
await changeModel.add(this.itemType, parentId, (item as WithUuid).id, (item as any).name || '', changeType);
|
||||
}
|
||||
|
||||
public async save(object: AnyItemType, options: SaveOptions = {}): Promise<AnyItemType> {
|
||||
public async save(object: T, options: SaveOptions = {}): Promise<T> {
|
||||
if (!object) throw new Error('Object cannot be empty');
|
||||
|
||||
const toSave = Object.assign({}, object);
|
||||
@ -231,7 +237,7 @@ export default abstract class BaseModel {
|
||||
if (!objectId) throw new Error('Missing "id" property');
|
||||
delete (toSave as WithUuid).id;
|
||||
const updatedCount: number = await this.db(this.tableName).update(toSave).where({ id: objectId });
|
||||
toSave.id = objectId;
|
||||
(toSave as WithUuid).id = objectId;
|
||||
|
||||
await this.handleChangeTracking(options, toSave, ChangeType.Update);
|
||||
|
||||
@ -243,12 +249,12 @@ export default abstract class BaseModel {
|
||||
return toSave;
|
||||
}
|
||||
|
||||
public async loadByIds(ids: string[]): Promise<AnyItemType[]> {
|
||||
public async loadByIds(ids: string[]): Promise<T[]> {
|
||||
if (!ids.length) return [];
|
||||
return this.db(this.tableName).select(this.defaultFields).whereIn('id', ids);
|
||||
}
|
||||
|
||||
public async load(id: string): Promise<AnyItemType> {
|
||||
public async load(id: string): Promise<T> {
|
||||
if (!id) throw new Error('id cannot be empty');
|
||||
|
||||
return this.db(this.tableName).select(this.defaultFields).where({ id: id }).first();
|
||||
@ -263,7 +269,7 @@ export default abstract class BaseModel {
|
||||
|
||||
const trackChanges = this.trackChanges;
|
||||
|
||||
let itemsWithParentIds: AnyItemType[] = null;
|
||||
let itemsWithParentIds: T[] = null;
|
||||
if (trackChanges) {
|
||||
itemsWithParentIds = await this.db(this.tableName).select(['id', 'parent_id', 'name']).whereIn('id', ids);
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user