You've already forked joplin
mirror of
https://github.com/laurent22/joplin.git
synced 2025-09-05 20:56:22 +02:00
Compare commits
2 Commits
v2.5.3
...
db_collate
Author | SHA1 | Date | |
---|---|---|---|
|
030d718a59 | ||
|
7ad1ec3246 |
@@ -261,9 +261,6 @@ packages/app-desktop/gui/MainScreen/commands/hideModalMessage.js.map
|
||||
packages/app-desktop/gui/MainScreen/commands/index.d.ts
|
||||
packages/app-desktop/gui/MainScreen/commands/index.js
|
||||
packages/app-desktop/gui/MainScreen/commands/index.js.map
|
||||
packages/app-desktop/gui/MainScreen/commands/leaveSharedFolder.d.ts
|
||||
packages/app-desktop/gui/MainScreen/commands/leaveSharedFolder.js
|
||||
packages/app-desktop/gui/MainScreen/commands/leaveSharedFolder.js.map
|
||||
packages/app-desktop/gui/MainScreen/commands/moveToFolder.d.ts
|
||||
packages/app-desktop/gui/MainScreen/commands/moveToFolder.js
|
||||
packages/app-desktop/gui/MainScreen/commands/moveToFolder.js.map
|
||||
@@ -732,9 +729,6 @@ packages/app-desktop/services/plugins/hooks/useViewIsReady.js.map
|
||||
packages/app-desktop/services/plugins/hooks/useWebviewToPluginMessages.d.ts
|
||||
packages/app-desktop/services/plugins/hooks/useWebviewToPluginMessages.js
|
||||
packages/app-desktop/services/plugins/hooks/useWebviewToPluginMessages.js.map
|
||||
packages/app-desktop/services/share/invitationRespond.d.ts
|
||||
packages/app-desktop/services/share/invitationRespond.js
|
||||
packages/app-desktop/services/share/invitationRespond.js.map
|
||||
packages/app-desktop/services/spellChecker/SpellCheckerServiceDriverNative.d.ts
|
||||
packages/app-desktop/services/spellChecker/SpellCheckerServiceDriverNative.js
|
||||
packages/app-desktop/services/spellChecker/SpellCheckerServiceDriverNative.js.map
|
||||
@@ -945,12 +939,6 @@ packages/lib/TaskQueue.js.map
|
||||
packages/lib/array.d.ts
|
||||
packages/lib/array.js
|
||||
packages/lib/array.js.map
|
||||
packages/lib/callbackUrlUtils.d.ts
|
||||
packages/lib/callbackUrlUtils.js
|
||||
packages/lib/callbackUrlUtils.js.map
|
||||
packages/lib/callbackUrlUtils.test.d.ts
|
||||
packages/lib/callbackUrlUtils.test.js
|
||||
packages/lib/callbackUrlUtils.test.js.map
|
||||
packages/lib/commands/historyBackward.d.ts
|
||||
packages/lib/commands/historyBackward.js
|
||||
packages/lib/commands/historyBackward.js.map
|
||||
@@ -1314,12 +1302,6 @@ packages/lib/services/interop/InteropService_Exporter_Jex.js.map
|
||||
packages/lib/services/interop/InteropService_Exporter_Md.d.ts
|
||||
packages/lib/services/interop/InteropService_Exporter_Md.js
|
||||
packages/lib/services/interop/InteropService_Exporter_Md.js.map
|
||||
packages/lib/services/interop/InteropService_Exporter_Md_frontmatter.d.ts
|
||||
packages/lib/services/interop/InteropService_Exporter_Md_frontmatter.js
|
||||
packages/lib/services/interop/InteropService_Exporter_Md_frontmatter.js.map
|
||||
packages/lib/services/interop/InteropService_Exporter_Md_frontmatter.test.d.ts
|
||||
packages/lib/services/interop/InteropService_Exporter_Md_frontmatter.test.js
|
||||
packages/lib/services/interop/InteropService_Exporter_Md_frontmatter.test.js.map
|
||||
packages/lib/services/interop/InteropService_Exporter_Raw.d.ts
|
||||
packages/lib/services/interop/InteropService_Exporter_Raw.js
|
||||
packages/lib/services/interop/InteropService_Exporter_Raw.js.map
|
||||
@@ -1344,12 +1326,6 @@ packages/lib/services/interop/InteropService_Importer_Md.js.map
|
||||
packages/lib/services/interop/InteropService_Importer_Md.test.d.ts
|
||||
packages/lib/services/interop/InteropService_Importer_Md.test.js
|
||||
packages/lib/services/interop/InteropService_Importer_Md.test.js.map
|
||||
packages/lib/services/interop/InteropService_Importer_Md_frontmatter.d.ts
|
||||
packages/lib/services/interop/InteropService_Importer_Md_frontmatter.js
|
||||
packages/lib/services/interop/InteropService_Importer_Md_frontmatter.js.map
|
||||
packages/lib/services/interop/InteropService_Importer_Md_frontmatter.test.d.ts
|
||||
packages/lib/services/interop/InteropService_Importer_Md_frontmatter.test.js
|
||||
packages/lib/services/interop/InteropService_Importer_Md_frontmatter.test.js.map
|
||||
packages/lib/services/interop/InteropService_Importer_Raw.d.ts
|
||||
packages/lib/services/interop/InteropService_Importer_Raw.js
|
||||
packages/lib/services/interop/InteropService_Importer_Raw.js.map
|
||||
@@ -1659,9 +1635,6 @@ packages/lib/services/synchronizer/migrations/2.js.map
|
||||
packages/lib/services/synchronizer/migrations/3.d.ts
|
||||
packages/lib/services/synchronizer/migrations/3.js
|
||||
packages/lib/services/synchronizer/migrations/3.js.map
|
||||
packages/lib/services/synchronizer/syncDebugLog.d.ts
|
||||
packages/lib/services/synchronizer/syncDebugLog.js
|
||||
packages/lib/services/synchronizer/syncDebugLog.js.map
|
||||
packages/lib/services/synchronizer/syncInfoUtils.d.ts
|
||||
packages/lib/services/synchronizer/syncInfoUtils.js
|
||||
packages/lib/services/synchronizer/syncInfoUtils.js.map
|
||||
|
7
.github/scripts/run_ci.sh
vendored
7
.github/scripts/run_ci.sh
vendored
@@ -62,13 +62,6 @@ npm install
|
||||
if [ "$IS_PULL_REQUEST" == "1" ] || [ "$IS_DEV_BRANCH" = "1" ]; then
|
||||
echo "Step: Running tests..."
|
||||
|
||||
# Need this because we're getting this error:
|
||||
#
|
||||
# @joplin/lib: FATAL ERROR: Ineffective mark-compacts near heap limit
|
||||
# Allocation failed - JavaScript heap out of memory
|
||||
#
|
||||
# https://stackoverflow.com/questions/38558989
|
||||
export NODE_OPTIONS="--max-old-space-size=4096"
|
||||
npm run test-ci
|
||||
testResult=$?
|
||||
if [ $testResult -ne 0 ]; then
|
||||
|
27
.gitignore
vendored
27
.gitignore
vendored
@@ -244,9 +244,6 @@ packages/app-desktop/gui/MainScreen/commands/hideModalMessage.js.map
|
||||
packages/app-desktop/gui/MainScreen/commands/index.d.ts
|
||||
packages/app-desktop/gui/MainScreen/commands/index.js
|
||||
packages/app-desktop/gui/MainScreen/commands/index.js.map
|
||||
packages/app-desktop/gui/MainScreen/commands/leaveSharedFolder.d.ts
|
||||
packages/app-desktop/gui/MainScreen/commands/leaveSharedFolder.js
|
||||
packages/app-desktop/gui/MainScreen/commands/leaveSharedFolder.js.map
|
||||
packages/app-desktop/gui/MainScreen/commands/moveToFolder.d.ts
|
||||
packages/app-desktop/gui/MainScreen/commands/moveToFolder.js
|
||||
packages/app-desktop/gui/MainScreen/commands/moveToFolder.js.map
|
||||
@@ -715,9 +712,6 @@ packages/app-desktop/services/plugins/hooks/useViewIsReady.js.map
|
||||
packages/app-desktop/services/plugins/hooks/useWebviewToPluginMessages.d.ts
|
||||
packages/app-desktop/services/plugins/hooks/useWebviewToPluginMessages.js
|
||||
packages/app-desktop/services/plugins/hooks/useWebviewToPluginMessages.js.map
|
||||
packages/app-desktop/services/share/invitationRespond.d.ts
|
||||
packages/app-desktop/services/share/invitationRespond.js
|
||||
packages/app-desktop/services/share/invitationRespond.js.map
|
||||
packages/app-desktop/services/spellChecker/SpellCheckerServiceDriverNative.d.ts
|
||||
packages/app-desktop/services/spellChecker/SpellCheckerServiceDriverNative.js
|
||||
packages/app-desktop/services/spellChecker/SpellCheckerServiceDriverNative.js.map
|
||||
@@ -928,12 +922,6 @@ packages/lib/TaskQueue.js.map
|
||||
packages/lib/array.d.ts
|
||||
packages/lib/array.js
|
||||
packages/lib/array.js.map
|
||||
packages/lib/callbackUrlUtils.d.ts
|
||||
packages/lib/callbackUrlUtils.js
|
||||
packages/lib/callbackUrlUtils.js.map
|
||||
packages/lib/callbackUrlUtils.test.d.ts
|
||||
packages/lib/callbackUrlUtils.test.js
|
||||
packages/lib/callbackUrlUtils.test.js.map
|
||||
packages/lib/commands/historyBackward.d.ts
|
||||
packages/lib/commands/historyBackward.js
|
||||
packages/lib/commands/historyBackward.js.map
|
||||
@@ -1297,12 +1285,6 @@ packages/lib/services/interop/InteropService_Exporter_Jex.js.map
|
||||
packages/lib/services/interop/InteropService_Exporter_Md.d.ts
|
||||
packages/lib/services/interop/InteropService_Exporter_Md.js
|
||||
packages/lib/services/interop/InteropService_Exporter_Md.js.map
|
||||
packages/lib/services/interop/InteropService_Exporter_Md_frontmatter.d.ts
|
||||
packages/lib/services/interop/InteropService_Exporter_Md_frontmatter.js
|
||||
packages/lib/services/interop/InteropService_Exporter_Md_frontmatter.js.map
|
||||
packages/lib/services/interop/InteropService_Exporter_Md_frontmatter.test.d.ts
|
||||
packages/lib/services/interop/InteropService_Exporter_Md_frontmatter.test.js
|
||||
packages/lib/services/interop/InteropService_Exporter_Md_frontmatter.test.js.map
|
||||
packages/lib/services/interop/InteropService_Exporter_Raw.d.ts
|
||||
packages/lib/services/interop/InteropService_Exporter_Raw.js
|
||||
packages/lib/services/interop/InteropService_Exporter_Raw.js.map
|
||||
@@ -1327,12 +1309,6 @@ packages/lib/services/interop/InteropService_Importer_Md.js.map
|
||||
packages/lib/services/interop/InteropService_Importer_Md.test.d.ts
|
||||
packages/lib/services/interop/InteropService_Importer_Md.test.js
|
||||
packages/lib/services/interop/InteropService_Importer_Md.test.js.map
|
||||
packages/lib/services/interop/InteropService_Importer_Md_frontmatter.d.ts
|
||||
packages/lib/services/interop/InteropService_Importer_Md_frontmatter.js
|
||||
packages/lib/services/interop/InteropService_Importer_Md_frontmatter.js.map
|
||||
packages/lib/services/interop/InteropService_Importer_Md_frontmatter.test.d.ts
|
||||
packages/lib/services/interop/InteropService_Importer_Md_frontmatter.test.js
|
||||
packages/lib/services/interop/InteropService_Importer_Md_frontmatter.test.js.map
|
||||
packages/lib/services/interop/InteropService_Importer_Raw.d.ts
|
||||
packages/lib/services/interop/InteropService_Importer_Raw.js
|
||||
packages/lib/services/interop/InteropService_Importer_Raw.js.map
|
||||
@@ -1642,9 +1618,6 @@ packages/lib/services/synchronizer/migrations/2.js.map
|
||||
packages/lib/services/synchronizer/migrations/3.d.ts
|
||||
packages/lib/services/synchronizer/migrations/3.js
|
||||
packages/lib/services/synchronizer/migrations/3.js.map
|
||||
packages/lib/services/synchronizer/syncDebugLog.d.ts
|
||||
packages/lib/services/synchronizer/syncDebugLog.js
|
||||
packages/lib/services/synchronizer/syncDebugLog.js.map
|
||||
packages/lib/services/synchronizer/syncInfoUtils.d.ts
|
||||
packages/lib/services/synchronizer/syncInfoUtils.js
|
||||
packages/lib/services/synchronizer/syncInfoUtils.js.map
|
||||
|
2
BUILD.md
2
BUILD.md
@@ -45,7 +45,7 @@ You can also run it under WSL 2. To do so, [follow these instructions](https://w
|
||||
|
||||
## Testing the Mobile application
|
||||
|
||||
First you need to setup React Native to build projects with native code. For this, follow the instructions in the [Setting up the development environment](https://reactnative.dev/docs/environment-setup) tutorial, in the "React Native CLI Quickstart" tab.
|
||||
First you need to setup React Native to build projects with native code. For this, follow the instructions on the [Get Started](https://facebook.github.io/react-native/docs/getting-started.html) tutorial, in the "React Native CLI Quickstart" tab.
|
||||
|
||||
Then, for **Android**:
|
||||
|
||||
|
@@ -187,7 +187,7 @@ if command -v lsb_release &> /dev/null; then
|
||||
# Linux Mint 4 Debbie is based on Debian 10 and requires the same param handling.
|
||||
if [[ $DISTVER =~ Debian1. ]] || [ "$DISTVER" = "Linuxmint4" ] && [ "$DISTCODENAME" = "debbie" ] || [ "$DISTVER" = "CentOS" ] && [[ "$DISTMAJOR" =~ 6|7 ]]
|
||||
then
|
||||
SANDBOXPARAM="--no-sandbox"
|
||||
SANDBOXPARAM=" --no-sandbox"
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -206,21 +206,7 @@ then
|
||||
|
||||
# On some systems this directory doesn't exist by default
|
||||
mkdir -p ~/.local/share/applications
|
||||
|
||||
# Tabs specifically, and not spaces, are needed for indentation with Bash heredocs
|
||||
cat >> ~/.local/share/applications/appimagekit-joplin.desktop <<-EOF
|
||||
[Desktop Entry]
|
||||
Encoding=UTF-8
|
||||
Name=Joplin
|
||||
Comment=Joplin for Desktop
|
||||
Exec=${HOME}/.joplin/Joplin.AppImage ${SANDBOXPARAM}
|
||||
Icon=joplin
|
||||
StartupWMClass=Joplin
|
||||
Type=Application
|
||||
Categories=Office;
|
||||
MimeType=x-scheme-handler/joplin;
|
||||
EOF
|
||||
|
||||
echo -e "[Desktop Entry]\nEncoding=UTF-8\nName=Joplin\nComment=Joplin for Desktop\nExec=${HOME}/.joplin/Joplin.AppImage${SANDBOXPARAM}\nIcon=joplin\nStartupWMClass=Joplin\nType=Application\nCategories=Office;" >> ~/.local/share/applications/appimagekit-joplin.desktop
|
||||
# Update application icons
|
||||
[[ `command -v update-desktop-database` ]] && update-desktop-database ~/.local/share/applications && update-desktop-database ~/.local/share/icons
|
||||
print "${COLOR_GREEN}OK${COLOR_RESET}"
|
||||
|
@@ -98,7 +98,6 @@ The Web Clipper is a browser extension that allows you to save web pages and scr
|
||||
- [What is a conflict?](https://github.com/laurent22/joplin/blob/dev/readme/conflict.md)
|
||||
- [How to enable debug mode](https://github.com/laurent22/joplin/blob/dev/readme/debugging.md)
|
||||
- [About the Rich Text editor limitations](https://github.com/laurent22/joplin/blob/dev/readme/rich_text_editor.md)
|
||||
- [External links](https://github.com/laurent22/joplin/blob/dev/readme/external_links.md)
|
||||
- [FAQ](https://github.com/laurent22/joplin/blob/dev/readme/faq.md)
|
||||
|
||||
- Joplin Cloud
|
||||
|
@@ -7,7 +7,6 @@ version: '3'
|
||||
services:
|
||||
db:
|
||||
image: postgres:13.1
|
||||
command: postgres -c work_mem=100000
|
||||
ports:
|
||||
- "5432:5432"
|
||||
environment:
|
||||
|
@@ -9,7 +9,7 @@ const Tag = require('@joplin/lib/models/Tag').default;
|
||||
const Setting = require('@joplin/lib/models/Setting').default;
|
||||
const { reg } = require('@joplin/lib/registry.js');
|
||||
const { fileExtension } = require('@joplin/lib/path-utils');
|
||||
const { splitCommandString, splitCommandBatch } = require('@joplin/lib/string-utils');
|
||||
const { splitCommandString } = require('@joplin/lib/string-utils');
|
||||
const { _ } = require('@joplin/lib/locale');
|
||||
const fs = require('fs-extra');
|
||||
const { cliUtils } = require('./cli-utils.js');
|
||||
@@ -390,8 +390,7 @@ class Application extends BaseApplication {
|
||||
async commandList(argv) {
|
||||
if (argv.length && argv[0] === 'batch') {
|
||||
const commands = [];
|
||||
const commandLines = splitCommandBatch(await fs.readFile(argv[1], 'utf-8'));
|
||||
|
||||
const commandLines = (await fs.readFile(argv[1], 'utf-8')).split('\n');
|
||||
for (const commandLine of commandLines) {
|
||||
if (!commandLine.trim()) continue;
|
||||
const splitted = splitCommandString(commandLine.trim());
|
||||
|
@@ -1,9 +0,0 @@
|
||||
---
|
||||
title: ddd
|
||||
tags:
|
||||
- banana
|
||||
- banana
|
||||
- banana
|
||||
---
|
||||
|
||||
And a link to note [full](./full.md), and link to the [same note](./full.md).
|
@@ -1,18 +0,0 @@
|
||||
---
|
||||
title: Test Note Title
|
||||
updated: 2019-05-01 16:54
|
||||
created: 2019-05-01 16:54
|
||||
Source: https://joplinapp.org
|
||||
author: Joplin
|
||||
latitude: 37.084021
|
||||
longitude: -94.51350100
|
||||
altitude: 0.0000
|
||||
Completed?: false
|
||||
due: 2021-08-22 00:00
|
||||
tags:
|
||||
- Joplin
|
||||
- nOte
|
||||
- pencil
|
||||
---
|
||||
|
||||
This is the note body
|
@@ -1,6 +0,0 @@
|
||||
---
|
||||
Title: Inline Tags
|
||||
Tags: [inline, tag]
|
||||
---
|
||||
|
||||
Body
|
@@ -1,9 +0,0 @@
|
||||
---
|
||||
title: norm
|
||||
tags:
|
||||
- tag1
|
||||
- tag2
|
||||
- tag3
|
||||
---
|
||||
|
||||
note body
|
@@ -1,5 +0,0 @@
|
||||
---
|
||||
title: 001
|
||||
---
|
||||
|
||||
note body
|
@@ -1,20 +0,0 @@
|
||||
---
|
||||
title: "YAML metadata for R Markdown with examples"
|
||||
subtitle: "YAML header"
|
||||
author: Hao Liang
|
||||
date: "2021-06-10"
|
||||
output:
|
||||
md_document:
|
||||
toc: yes
|
||||
toc_depth: 2
|
||||
abstract: YAML is a human-readable and easy to write language to define data structures.
|
||||
keywords: ["YAML", "Rmd"]
|
||||
subject: Medicine
|
||||
description: Rmd makes it possible to use a YAML header to specify certain parameters right at the beginning of the document.
|
||||
category:
|
||||
- Rmd
|
||||
- Medicine
|
||||
lang: "en-US"
|
||||
---
|
||||
|
||||
Example taken from https://github.com/hao203/rmarkdown-YAML
|
@@ -1,10 +0,0 @@
|
||||
---
|
||||
title: "Distill for R Markdown"
|
||||
author:
|
||||
- name: "JJ Allaire"
|
||||
url: https://github.com/jjallaire
|
||||
affiliation: RStudio
|
||||
affiliation_url: https://www.rstudio.com
|
||||
---
|
||||
|
||||
Example taken from https://github.com/hao203/rmarkdown-YAML
|
@@ -1,7 +0,0 @@
|
||||
---
|
||||
title: Date
|
||||
created: 2017-01-01
|
||||
updated: 2021-01-01
|
||||
---
|
||||
|
||||
I hope the dates are imported correctly
|
@@ -1,9 +0,0 @@
|
||||
---
|
||||
title: xxx
|
||||
---
|
||||
|
||||
---
|
||||
author: xxx
|
||||
---
|
||||
|
||||
note body
|
@@ -1,12 +0,0 @@
|
||||
---
|
||||
Title: |-
|
||||
First
|
||||
Second
|
||||
Updated: 28/09/2021 20:57
|
||||
Created: 28/09/2021 20:57
|
||||
Latitude: 0.00000000
|
||||
Longitude: 0.00000000
|
||||
Altitude: 0.0000
|
||||
---
|
||||
|
||||
This note has a newline in the title
|
@@ -1,8 +0,0 @@
|
||||
---
|
||||
Title: Unquoted
|
||||
Longitude: -94.51350100
|
||||
Completed?: No
|
||||
DUE: 2022-04-04 13:00
|
||||
---
|
||||
|
||||
note body
|
@@ -1,7 +0,0 @@
|
||||
---
|
||||
title: Test Note Title
|
||||
updated: 2019-05-01T16:54:00Z
|
||||
created: 2019-05-01 16:54-07:00
|
||||
---
|
||||
|
||||
This is the note body
|
@@ -1,7 +1,6 @@
|
||||
import Logger from '@joplin/lib/Logger';
|
||||
import { PluginMessage } from './services/plugins/PluginRunner';
|
||||
import shim from '@joplin/lib/shim';
|
||||
import { isCallbackUrl } from '@joplin/lib/callbackUrlUtils';
|
||||
|
||||
const { BrowserWindow, Tray, screen } = require('electron');
|
||||
const url = require('url');
|
||||
@@ -31,14 +30,12 @@ export default class ElectronAppWrapper {
|
||||
private buildDir_: string = null;
|
||||
private rendererProcessQuitReply_: RendererProcessQuitReply = null;
|
||||
private pluginWindows_: PluginWindows = {};
|
||||
private initialCallbackUrl_: string = null;
|
||||
|
||||
constructor(electronApp: any, env: string, profilePath: string, isDebugMode: boolean, initialCallbackUrl: string) {
|
||||
constructor(electronApp: any, env: string, profilePath: string, isDebugMode: boolean) {
|
||||
this.electronApp_ = electronApp;
|
||||
this.env_ = env;
|
||||
this.isDebugMode_ = isDebugMode;
|
||||
this.profilePath_ = profilePath;
|
||||
this.initialCallbackUrl_ = initialCallbackUrl;
|
||||
}
|
||||
|
||||
electronApp() {
|
||||
@@ -61,10 +58,6 @@ export default class ElectronAppWrapper {
|
||||
return this.env_;
|
||||
}
|
||||
|
||||
initialCallbackUrl() {
|
||||
return this.initialCallbackUrl_;
|
||||
}
|
||||
|
||||
createWindow() {
|
||||
// Set to true to view errors if the application does not start
|
||||
const debugEarlyBugs = this.env_ === 'dev' || this.isDebugMode_;
|
||||
@@ -243,7 +236,7 @@ export default class ElectronAppWrapper {
|
||||
async waitForElectronAppReady() {
|
||||
if (this.electronApp().isReady()) return Promise.resolve();
|
||||
|
||||
return new Promise<void>((resolve) => {
|
||||
return new Promise((resolve) => {
|
||||
const iid = setInterval(() => {
|
||||
if (this.electronApp().isReady()) {
|
||||
clearInterval(iid);
|
||||
@@ -330,18 +323,12 @@ export default class ElectronAppWrapper {
|
||||
}
|
||||
|
||||
// Someone tried to open a second instance - focus our window instead
|
||||
this.electronApp_.on('second-instance', (_e: any, argv: string[]) => {
|
||||
this.electronApp_.on('second-instance', () => {
|
||||
const win = this.window();
|
||||
if (!win) return;
|
||||
if (win.isMinimized()) win.restore();
|
||||
win.show();
|
||||
win.focus();
|
||||
if (process.platform !== 'darwin') {
|
||||
const url = argv.find((arg) => isCallbackUrl(arg));
|
||||
if (url) {
|
||||
void this.openCallbackUrl(url);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return false;
|
||||
@@ -368,17 +355,6 @@ export default class ElectronAppWrapper {
|
||||
this.electronApp_.on('activate', () => {
|
||||
this.win_.show();
|
||||
});
|
||||
|
||||
this.electronApp_.on('open-url', (event: any, url: string) => {
|
||||
event.preventDefault();
|
||||
void this.openCallbackUrl(url);
|
||||
});
|
||||
}
|
||||
|
||||
async openCallbackUrl(url: string) {
|
||||
this.win_.webContents.send('asynchronous-message', 'openCallbackUrl', {
|
||||
url: url,
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -43,7 +43,6 @@ import noteListControlsCommands from './gui/NoteListControls/commands/index';
|
||||
import sidebarCommands from './gui/Sidebar/commands/index';
|
||||
import appCommands from './commands/index';
|
||||
import libCommands from '@joplin/lib/commands/index';
|
||||
import { homedir } from 'os';
|
||||
const electronContextMenu = require('./services/electron-context-menu');
|
||||
// import populateDatabase from '@joplin/lib/services/debug/populateDatabase';
|
||||
|
||||
@@ -61,7 +60,6 @@ import editorCommandDeclarations from './gui/NoteEditor/editorCommandDeclaration
|
||||
import ShareService from '@joplin/lib/services/share/ShareService';
|
||||
import checkForUpdates from './checkForUpdates';
|
||||
import { AppState } from './app.reducer';
|
||||
import syncDebugLog from '../lib/services/synchronizer/syncDebugLog';
|
||||
// import { runIntegrationTests } from '@joplin/lib/services/e2ee/ppkTestUtils';
|
||||
|
||||
const pluginClasses = [
|
||||
@@ -357,17 +355,7 @@ class Application extends BaseApplication {
|
||||
|
||||
reg.logger().info('app.start: doing regular boot');
|
||||
|
||||
const dir: string = Setting.value('profileDir');
|
||||
|
||||
syncDebugLog.enabled = false;
|
||||
|
||||
if (dir.endsWith('dev-desktop-2')) {
|
||||
syncDebugLog.addTarget(TargetType.File, {
|
||||
path: `${homedir()}/synclog.txt`,
|
||||
});
|
||||
syncDebugLog.enabled = true;
|
||||
syncDebugLog.info(`Profile dir: ${dir}`);
|
||||
}
|
||||
const dir = Setting.value('profileDir');
|
||||
|
||||
// Loads app-wide styles. (Markdown preview-specific styles loaded in app.js)
|
||||
const filename = Setting.custom_css_files.JOPLIN_APP;
|
||||
|
@@ -32,13 +32,13 @@ import removeItem from '../ResizableLayout/utils/removeItem';
|
||||
import EncryptionService from '@joplin/lib/services/e2ee/EncryptionService';
|
||||
import ShareFolderDialog from '../ShareFolderDialog/ShareFolderDialog';
|
||||
import { ShareInvitation } from '@joplin/lib/services/share/reducer';
|
||||
import ShareService from '@joplin/lib/services/share/ShareService';
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
import removeKeylessItems from '../ResizableLayout/utils/removeKeylessItems';
|
||||
import { localSyncInfoFromState } from '@joplin/lib/services/synchronizer/syncInfoUtils';
|
||||
import { parseCallbackUrl } from '@joplin/lib/callbackUrlUtils';
|
||||
import ElectronAppWrapper from '../../ElectronAppWrapper';
|
||||
import { showMissingMasterKeyMessage } from '@joplin/lib/services/e2ee/utils';
|
||||
import commands from './commands/index';
|
||||
import invitationRespond from '../../services/share/invitationRespond';
|
||||
|
||||
const { connect } = require('react-redux');
|
||||
const { PromptDialog } = require('../PromptDialog.min.js');
|
||||
const NotePropertiesDialog = require('../NotePropertiesDialog.min.js');
|
||||
@@ -156,23 +156,6 @@ class MainScreenComponent extends React.Component<Props, State> {
|
||||
this.layoutModeListenerKeyDown = this.layoutModeListenerKeyDown.bind(this);
|
||||
|
||||
window.addEventListener('resize', this.window_resize);
|
||||
|
||||
ipcRenderer.on('asynchronous-message', (_event: any, message: string, args: any) => {
|
||||
if (message === 'openCallbackUrl') {
|
||||
this.openCallbackUrl(args.url);
|
||||
}
|
||||
});
|
||||
|
||||
const initialCallbackUrl = (bridge().electronApp() as ElectronAppWrapper).initialCallbackUrl();
|
||||
if (initialCallbackUrl) {
|
||||
this.openCallbackUrl(initialCallbackUrl);
|
||||
}
|
||||
}
|
||||
|
||||
private openCallbackUrl(url: string) {
|
||||
console.log(`openUrl ${url}`);
|
||||
const { command, params } = parseCallbackUrl(url);
|
||||
void CommandService.instance().execute(command.toString(), params.id);
|
||||
}
|
||||
|
||||
private updateLayoutPluginViews(layout: LayoutItem, plugins: PluginStates) {
|
||||
@@ -564,8 +547,18 @@ class MainScreenComponent extends React.Component<Props, State> {
|
||||
bridge().restart();
|
||||
};
|
||||
|
||||
const onInvitationRespond = async (shareUserId: string, folderId: string, accept: boolean) => {
|
||||
await invitationRespond(shareUserId, folderId, accept);
|
||||
const onInvitationRespond = async (shareUserId: string, accept: boolean) => {
|
||||
// The below functions can take a bit of time to complete so in the
|
||||
// meantime we hide the notification so that the user doesn't click
|
||||
// multiple times on the Accept link.
|
||||
ShareService.instance().setProcessingShareInvitationResponse(true);
|
||||
try {
|
||||
await ShareService.instance().respondInvitation(shareUserId, accept);
|
||||
await ShareService.instance().refreshShareInvitations();
|
||||
} finally {
|
||||
ShareService.instance().setProcessingShareInvitationResponse(false);
|
||||
}
|
||||
void reg.scheduleSync(1000);
|
||||
};
|
||||
|
||||
let msg = null;
|
||||
@@ -610,9 +603,9 @@ class MainScreenComponent extends React.Component<Props, State> {
|
||||
msg = this.renderNotificationMessage(
|
||||
_('%s (%s) would like to share a notebook with you.', sharer.full_name, sharer.email),
|
||||
_('Accept'),
|
||||
() => onInvitationRespond(invitation.id, invitation.share.folder_id, true),
|
||||
() => onInvitationRespond(invitation.id, true),
|
||||
_('Reject'),
|
||||
() => onInvitationRespond(invitation.id, invitation.share.folder_id, false)
|
||||
() => onInvitationRespond(invitation.id, false)
|
||||
);
|
||||
} else if (this.props.hasDisabledSyncItems) {
|
||||
msg = this.renderNotificationMessage(
|
||||
|
@@ -4,7 +4,6 @@ import * as editAlarm from './editAlarm';
|
||||
import * as exportPdf from './exportPdf';
|
||||
import * as gotoAnything from './gotoAnything';
|
||||
import * as hideModalMessage from './hideModalMessage';
|
||||
import * as leaveSharedFolder from './leaveSharedFolder';
|
||||
import * as moveToFolder from './moveToFolder';
|
||||
import * as newFolder from './newFolder';
|
||||
import * as newNote from './newNote';
|
||||
@@ -37,7 +36,6 @@ const index:any[] = [
|
||||
exportPdf,
|
||||
gotoAnything,
|
||||
hideModalMessage,
|
||||
leaveSharedFolder,
|
||||
moveToFolder,
|
||||
newFolder,
|
||||
newNote,
|
||||
|
@@ -1,19 +0,0 @@
|
||||
import { CommandRuntime, CommandDeclaration, CommandContext } from '@joplin/lib/services/CommandService';
|
||||
import { _ } from '@joplin/lib/locale';
|
||||
import ShareService from '@joplin/lib/services/share/ShareService';
|
||||
|
||||
export const declaration: CommandDeclaration = {
|
||||
name: 'leaveSharedFolder',
|
||||
label: () => _('Leave notebook...'),
|
||||
};
|
||||
|
||||
export const runtime = (): CommandRuntime => {
|
||||
return {
|
||||
execute: async (_context: CommandContext, folderId: string = null) => {
|
||||
const answer = confirm(_('This will remove the notebook from your collection and you will no longer have access to its content. Do you wish to continue?'));
|
||||
if (!answer) return;
|
||||
await ShareService.instance().leaveSharedFolder(folderId);
|
||||
},
|
||||
enabledCondition: 'joplinServerConnected && folderIsShareRootAndNotOwnedByUser',
|
||||
};
|
||||
};
|
@@ -9,7 +9,7 @@ import ShareService from '@joplin/lib/services/share/ShareService';
|
||||
import styled from 'styled-components';
|
||||
import StyledFormLabel from '../style/StyledFormLabel';
|
||||
import StyledInput from '../style/StyledInput';
|
||||
import Button, { ButtonSize } from '../Button/Button';
|
||||
import Button from '../Button/Button';
|
||||
import Logger from '@joplin/lib/Logger';
|
||||
import StyledMessage from '../style/StyledMessage';
|
||||
import { ShareUserStatus, StateShare, StateShareUser } from '@joplin/lib/services/share/reducer';
|
||||
@@ -64,7 +64,7 @@ const StyledRecipientStatusIcon = styled.i`
|
||||
`;
|
||||
|
||||
const StyledRecipients = styled.div`
|
||||
margin-bottom: 10px;
|
||||
|
||||
`;
|
||||
|
||||
const StyledRecipientList = styled.div`
|
||||
@@ -194,13 +194,7 @@ function ShareFolderDialog(props: Props) {
|
||||
async function recipient_delete(event: RecipientDeleteEvent) {
|
||||
if (!confirm(_('Delete this invitation? The recipient will no longer have access to this shared notebook.'))) return;
|
||||
|
||||
try {
|
||||
await ShareService.instance().deleteShareRecipient(event.shareUserId);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
alert(_('The recipient could not be removed from the list. Please try again.\n\nThe error was: "%s"', error.message));
|
||||
}
|
||||
|
||||
await ShareService.instance().deleteShareRecipient(event.shareUserId);
|
||||
await ShareService.instance().refreshShareUsers(share.id);
|
||||
}
|
||||
|
||||
@@ -219,7 +213,7 @@ function ShareFolderDialog(props: Props) {
|
||||
<StyledFormLabel>{_('Add recipient:')}</StyledFormLabel>
|
||||
<StyledRecipientControls>
|
||||
<StyledRecipientInput disabled={disabled} type="email" placeholder="example@domain.com" value={recipientEmail} onChange={recipientEmail_change} />
|
||||
<Button size={ButtonSize.Small} disabled={disabled} title={_('Share')} onClick={shareRecipient_click}></Button>
|
||||
<Button disabled={disabled} title={_('Share')} onClick={shareRecipient_click}></Button>
|
||||
</StyledRecipientControls>
|
||||
</StyledAddRecipient>
|
||||
);
|
||||
@@ -242,7 +236,7 @@ function ShareFolderDialog(props: Props) {
|
||||
<StyledRecipient key={shareUser.user.email} index={index}>
|
||||
<StyledRecipientName>{shareUser.user.email}</StyledRecipientName>
|
||||
<StyledRecipientStatusIcon title={statusToMessage[shareUser.status]} className={statusToIcon[shareUser.status]}></StyledRecipientStatusIcon>
|
||||
<Button size={ButtonSize.Small} iconName="far fa-times-circle" onClick={() => recipient_delete({ shareUserId: shareUser.id })}/>
|
||||
<Button iconName="far fa-times-circle" onClick={() => recipient_delete({ shareUserId: shareUser.id })}/>
|
||||
</StyledRecipient>
|
||||
);
|
||||
}
|
||||
@@ -288,14 +282,6 @@ function ShareFolderDialog(props: Props) {
|
||||
);
|
||||
}
|
||||
|
||||
const renderInfo = () => {
|
||||
return (
|
||||
<p className="info-text -small">
|
||||
{_('Please note that if it is a large notebook, it may take a few minutes for all the notes to show up on the recipient\'s device.')}
|
||||
</p>
|
||||
);
|
||||
};
|
||||
|
||||
async function buttonRow_click(event: ClickEvent) {
|
||||
if (event.buttonName === 'unshare') {
|
||||
if (!confirm(_('Unshare this notebook? The recipients will no longer have access to its content.'))) return;
|
||||
@@ -315,7 +301,6 @@ function ShareFolderDialog(props: Props) {
|
||||
{renderShareState()}
|
||||
{renderError()}
|
||||
{renderRecipients()}
|
||||
{renderInfo()}
|
||||
<DialogButtonRow
|
||||
themeId={props.themeId}
|
||||
onClick={buttonRow_click}
|
||||
|
@@ -20,7 +20,6 @@ import Logger from '@joplin/lib/Logger';
|
||||
import { FolderEntity } from '@joplin/lib/services/database/types';
|
||||
import stateToWhenClauseContext from '../../services/commands/stateToWhenClauseContext';
|
||||
import { store } from '@joplin/lib/reducer';
|
||||
import { getFolderCallbackUrl, getTagCallbackUrl } from '@joplin/lib/callbackUrlUtils';
|
||||
const { connect } = require('react-redux');
|
||||
const shared = require('@joplin/lib/components/shared/side-menu-shared.js');
|
||||
const { themeStyle } = require('@joplin/lib/theme');
|
||||
@@ -29,7 +28,6 @@ const Menu = bridge().Menu;
|
||||
const MenuItem = bridge().MenuItem;
|
||||
const { substrWithEllipsis } = require('@joplin/lib/string-utils');
|
||||
const { ALL_NOTES_FILTER_ID } = require('@joplin/lib/reserved-ids');
|
||||
const { clipboard } = require('electron');
|
||||
|
||||
const logger = Logger.create('Sidebar');
|
||||
|
||||
@@ -316,16 +314,10 @@ class SidebarComponent extends React.Component<Props, State> {
|
||||
// that are within a shared notebook. If user wants to do this,
|
||||
// they'd have to move the notebook out of the shared notebook
|
||||
// first.
|
||||
const whenClause = stateToWhenClauseContext(state, { commandFolderId: itemId });
|
||||
|
||||
if (CommandService.instance().isEnabled('showShareFolderDialog', whenClause)) {
|
||||
if (CommandService.instance().isEnabled('showShareFolderDialog', stateToWhenClauseContext(state, { commandFolderId: itemId }))) {
|
||||
menu.append(new MenuItem(menuUtils.commandToStatefulMenuItem('showShareFolderDialog', itemId)));
|
||||
}
|
||||
|
||||
if (CommandService.instance().isEnabled('leaveSharedFolder', whenClause)) {
|
||||
menu.append(new MenuItem(menuUtils.commandToStatefulMenuItem('leaveSharedFolder', itemId)));
|
||||
}
|
||||
|
||||
menu.append(
|
||||
new MenuItem({
|
||||
label: _('Export'),
|
||||
@@ -334,29 +326,10 @@ class SidebarComponent extends React.Component<Props, State> {
|
||||
);
|
||||
}
|
||||
|
||||
if (itemType === BaseModel.TYPE_FOLDER) {
|
||||
menu.append(
|
||||
new MenuItem({
|
||||
label: _('Copy external link'),
|
||||
click: () => {
|
||||
clipboard.writeText(getFolderCallbackUrl(itemId));
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
if (itemType === BaseModel.TYPE_TAG) {
|
||||
menu.append(new MenuItem(
|
||||
menuUtils.commandToStatefulMenuItem('renameTag', itemId)
|
||||
));
|
||||
menu.append(
|
||||
new MenuItem({
|
||||
label: _('Copy external link'),
|
||||
click: () => {
|
||||
clipboard.writeText(getTagCallbackUrl(itemId));
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const pluginViews = pluginUtils.viewsByType(this.pluginsRef.current, 'menuItem');
|
||||
|
@@ -47,7 +47,6 @@ export default function() {
|
||||
'toggleSafeMode',
|
||||
'showShareNoteDialog',
|
||||
'showShareFolderDialog',
|
||||
'leaveSharedFolder',
|
||||
'gotoAnything',
|
||||
'commandPalette',
|
||||
'openMasterPasswordDialog',
|
||||
|
@@ -357,17 +357,6 @@
|
||||
return m ? contentElement.scrollTop / m : 0;
|
||||
}
|
||||
|
||||
contentElement.addEventListener('wheel', webviewLib.logEnabledEventHandler(e => {
|
||||
// When zoomFactor is not 1 (using an HD display is a typical case),
|
||||
// DOM element's scrollTop is incorrectly calculated after wheel scroll events
|
||||
// in the layer of Electron/Chromium, as of 2021-09-23.
|
||||
// To avoid this problem, prevent the upstream from calculating scrollTop and
|
||||
// calculate by yourself by accumulating wheel events.
|
||||
// https://github.com/laurent22/joplin/pull/5496
|
||||
contentElement.scrollTop = Math.max(0, Math.min(maxScrollTop(), contentElement.scrollTop + e.deltaY));
|
||||
e.preventDefault();
|
||||
}));
|
||||
|
||||
contentElement.addEventListener('scroll', webviewLib.logEnabledEventHandler(e => {
|
||||
// If the last scroll event was done by the user, lastScrollEventTime is set and
|
||||
// we can use that to skip the event handling. We skip it because in that case
|
||||
|
@@ -6,7 +6,6 @@ import MenuUtils from '@joplin/lib/services/commands/MenuUtils';
|
||||
import InteropServiceHelper from '../../InteropServiceHelper';
|
||||
import { _ } from '@joplin/lib/locale';
|
||||
import { MenuItemLocation } from '@joplin/lib/services/plugins/api/types';
|
||||
import { getNoteCallbackUrl } from '@joplin/lib/callbackUrlUtils';
|
||||
|
||||
import BaseModel from '@joplin/lib/BaseModel';
|
||||
const bridge = require('@electron/remote').require('./bridge').default;
|
||||
@@ -14,7 +13,6 @@ const Menu = bridge().Menu;
|
||||
const MenuItem = bridge().MenuItem;
|
||||
import Note from '@joplin/lib/models/Note';
|
||||
import Setting from '@joplin/lib/models/Setting';
|
||||
const { clipboard } = require('electron');
|
||||
|
||||
interface ContextMenuProps {
|
||||
notes: any[];
|
||||
@@ -123,6 +121,7 @@ export default class NoteListUtils {
|
||||
new MenuItem({
|
||||
label: _('Copy Markdown link'),
|
||||
click: async () => {
|
||||
const { clipboard } = require('electron');
|
||||
const links = [];
|
||||
for (let i = 0; i < noteIds.length; i++) {
|
||||
const note = await Note.load(noteIds[i]);
|
||||
@@ -133,17 +132,6 @@ export default class NoteListUtils {
|
||||
})
|
||||
);
|
||||
|
||||
if (noteIds.length === 1) {
|
||||
menu.append(
|
||||
new MenuItem({
|
||||
label: _('Copy external link'),
|
||||
click: () => {
|
||||
clipboard.writeText(getNoteCallbackUrl(noteIds[0]));
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
if ([9, 10].includes(Setting.value('sync.target'))) {
|
||||
menu.append(
|
||||
new MenuItem(
|
||||
|
@@ -8,7 +8,6 @@ const Logger = require('@joplin/lib/Logger').default;
|
||||
const FsDriverNode = require('@joplin/lib/fs-driver-node').default;
|
||||
const envFromArgs = require('@joplin/lib/envFromArgs');
|
||||
const packageInfo = require('./packageInfo.js');
|
||||
const { isCallbackUrl } = require('@joplin/lib/callbackUrlUtils');
|
||||
|
||||
// Electron takes the application name from package.json `name` and
|
||||
// displays this in the tray icon toolip and message box titles, however in
|
||||
@@ -38,11 +37,7 @@ const env = envFromArgs(process.argv);
|
||||
const profilePath = profileFromArgs(process.argv);
|
||||
const isDebugMode = !!process.argv && process.argv.indexOf('--debug') >= 0;
|
||||
|
||||
electronApp.setAsDefaultProtocolClient('joplin');
|
||||
|
||||
const initialCallbackUrl = process.argv.find((arg) => isCallbackUrl(arg));
|
||||
|
||||
const wrapper = new ElectronAppWrapper(electronApp, env, profilePath, isDebugMode, initialCallbackUrl);
|
||||
const wrapper = new ElectronAppWrapper(electronApp, env, profilePath, isDebugMode);
|
||||
|
||||
initBridge(wrapper);
|
||||
|
||||
|
@@ -188,12 +188,6 @@ div.form,
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
p {
|
||||
&.-small {
|
||||
font-size: 13px;
|
||||
}
|
||||
}
|
||||
|
||||
.form > .form-input-group:last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
@@ -210,11 +204,6 @@ a {
|
||||
color: var(--joplin-url-color);
|
||||
}
|
||||
|
||||
p.info-text {
|
||||
font-style: italic;
|
||||
color: var(--joplin-color-faded);
|
||||
}
|
||||
|
||||
/* =========================================================================================
|
||||
Component-specific classes
|
||||
========================================================================================= */
|
||||
@@ -224,6 +213,8 @@ Component-specific classes
|
||||
padding-bottom: 20px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
.master-password-dialog .dialog-root {
|
||||
min-width: 500px;
|
||||
max-width: 600px;
|
||||
|
4
packages/app-desktop/package-lock.json
generated
4
packages/app-desktop/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@joplin/app-desktop",
|
||||
"version": "2.5.3",
|
||||
"version": "2.5.1",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@joplin/app-desktop",
|
||||
"version": "2.5.3",
|
||||
"version": "2.5.1",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@electron/remote": "^2.0.1",
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@joplin/app-desktop",
|
||||
"version": "2.5.3",
|
||||
"version": "2.5.1",
|
||||
"description": "Joplin for Desktop",
|
||||
"main": "main.js",
|
||||
"private": true,
|
||||
@@ -37,7 +37,7 @@
|
||||
"asar": true,
|
||||
"asarUnpack": "./node_modules/node-notifier/vendor/**",
|
||||
"win": {
|
||||
"rfc3161TimeStampServer": "http://sha256timestamp.ws.symantec.com/sha256/timestamp",
|
||||
"rfc3161TimeStampServer": "http://timestamp.comodoca.com/rfc3161",
|
||||
"icon": "../../Assets/ImageSources/Joplin.ico",
|
||||
"target": [
|
||||
{
|
||||
@@ -77,25 +77,13 @@
|
||||
"icon": "../../Assets/macOs.icns",
|
||||
"target": "dmg",
|
||||
"hardenedRuntime": true,
|
||||
"entitlements": "./build-mac/entitlements.mac.inherit.plist",
|
||||
"extendInfo": {
|
||||
"CFBundleURLTypes": [
|
||||
{
|
||||
"CFBundleURLSchemes": [
|
||||
"joplin"
|
||||
],
|
||||
"CFBundleTypeRole": "Editor",
|
||||
"CFBundleURLName": "org.joplinapp.x-callback-url"
|
||||
}
|
||||
]
|
||||
}
|
||||
"entitlements": "./build-mac/entitlements.mac.inherit.plist"
|
||||
},
|
||||
"linux": {
|
||||
"icon": "../../Assets/LinuxIcons",
|
||||
"category": "Office",
|
||||
"desktop": {
|
||||
"Icon": "joplin",
|
||||
"MimeType": "x-scheme-handler/joplin;"
|
||||
"Icon": "joplin"
|
||||
},
|
||||
"target": "AppImage"
|
||||
},
|
||||
|
@@ -8,10 +8,6 @@
|
||||
|
||||
# ./runForTesting.sh 1 createUsers,createData,reset,e2ee,sync && ./runForTesting.sh 2 reset,e2ee,sync && ./runForTesting.sh 1
|
||||
|
||||
# Without E2EE:
|
||||
|
||||
# ./runForTesting.sh 1 createUsers,createData,reset,sync && ./runForTesting.sh 2 reset,sync && ./runForTesting.sh 1
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
@@ -67,7 +63,7 @@ do
|
||||
|
||||
elif [[ $CMD == "sync" ]]; then
|
||||
|
||||
echo "sync --use-lock 0" >> "$CMD_FILE"
|
||||
echo "sync" >> "$CMD_FILE"
|
||||
|
||||
# elif [[ $CMD == "generatePpk" ]]; then
|
||||
|
||||
|
@@ -1,57 +0,0 @@
|
||||
import ShareService from '@joplin/lib/services/share/ShareService';
|
||||
import Logger from '@joplin/lib/Logger';
|
||||
import Folder from '@joplin/lib/models/Folder';
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
import { _ } from '@joplin/lib/locale';
|
||||
|
||||
const logger = Logger.create('invitationRespond');
|
||||
|
||||
export default async function(shareUserId: string, folderId: string, accept: boolean) {
|
||||
// The below functions can take a bit of time to complete so in the
|
||||
// meantime we hide the notification so that the user doesn't click
|
||||
// multiple times on the Accept link.
|
||||
ShareService.instance().setProcessingShareInvitationResponse(true);
|
||||
|
||||
try {
|
||||
await ShareService.instance().respondInvitation(shareUserId, accept);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
alert(_('Could not respond to the invitation. Please try again, or check with the notebook owner if they are still sharing it.\n\nThe error was: "%s"', error.message));
|
||||
}
|
||||
|
||||
// This is to handle an edge case that can happen if:
|
||||
//
|
||||
// - The user is a recipient of a share.
|
||||
// - The sender removes the recipient from the share, then add him again.
|
||||
// - The recipient gets the invitation, but reply "Reject" to it.
|
||||
//
|
||||
// If we don't handle this case, it would kind of work but would create
|
||||
// conflicts because the shared notes would be converted to local ones, then
|
||||
// during sync the synchronizer would try to delete them. Since they've been
|
||||
// changed, they'll all be marked as conflicts.
|
||||
//
|
||||
// So the simplest thing to do is to leave the folder, which is most likely
|
||||
// what the user wants. And if not, it's always possible to ask the sender
|
||||
// to share again.
|
||||
//
|
||||
// NOTE: DOESN'T WORK. Because Folder.updateAllShareIds() would still run
|
||||
// and change the notes share_id property, thus creating conflicts again.
|
||||
// Leaving it as it is for now, as it's an unlikely scenario and it won't
|
||||
// cause any data loss.
|
||||
|
||||
if (!accept) {
|
||||
const existingFolder = await Folder.load(folderId);
|
||||
if (existingFolder) {
|
||||
logger.warn('Rejected an invitation, but the folder was already there. Conflicts are likely to happen. ShareUserId:', shareUserId, 'Folder ID:', folderId);
|
||||
// await ShareService.instance().leaveSharedFolder(folderId);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await ShareService.instance().refreshShareInvitations();
|
||||
} finally {
|
||||
ShareService.instance().setProcessingShareInvitationResponse(false);
|
||||
}
|
||||
|
||||
void reg.scheduleSync(1000);
|
||||
}
|
@@ -106,7 +106,7 @@ const EncryptionConfigScreen = (props: Props) => {
|
||||
<View style={{ flex: 1, flexDirection: 'row', alignItems: 'center' }}>
|
||||
<TextInput selectionColor={theme.textSelectionColor} keyboardAppearance={theme.keyboardAppearance} secureTextEntry={true} value={password} onChangeText={(text: string) => onInputPasswordChange(mk, text)} style={inputStyle}></TextInput>
|
||||
<Text style={{ fontSize: theme.fontSize, marginRight: 10, color: theme.color }}>{passwordOk}</Text>
|
||||
<Button title={_('Save')} onPress={() => onSavePasswordClick(mk, inputPasswords)}></Button>
|
||||
<Button title={_('Save')} onPress={() => onSavePasswordClick(mk, props.passwords)}></Button>
|
||||
</View>
|
||||
);
|
||||
}
|
||||
|
@@ -15,8 +15,8 @@
|
||||
"postinstall": "jetify && npm run build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@joplin/lib": "~2.5",
|
||||
"@joplin/renderer": "~2.5",
|
||||
"@joplin/lib": "~2.4",
|
||||
"@joplin/renderer": "~2.4",
|
||||
"@react-native-community/clipboard": "^1.5.0",
|
||||
"@react-native-community/datetimepicker": "^3.0.3",
|
||||
"@react-native-community/geolocation": "^2.0.2",
|
||||
@@ -73,7 +73,7 @@
|
||||
"@codemirror/lang-markdown": "^0.18.4",
|
||||
"@codemirror/state": "^0.18.7",
|
||||
"@codemirror/view": "^0.18.19",
|
||||
"@joplin/tools": "~2.5",
|
||||
"@joplin/tools": "~2.4",
|
||||
"@rollup/plugin-node-resolve": "^13.0.0",
|
||||
"@rollup/plugin-typescript": "^8.2.1",
|
||||
"@types/node": "^14.14.6",
|
||||
|
@@ -476,7 +476,7 @@ async function initialize(dispatch: Function) {
|
||||
if (Setting.value('env') == 'prod') {
|
||||
await db.open({ name: 'joplin.sqlite' });
|
||||
} else {
|
||||
await db.open({ name: 'joplin-107.sqlite' });
|
||||
await db.open({ name: 'joplin-104.sqlite' });
|
||||
|
||||
// await db.clearForTesting();
|
||||
}
|
||||
@@ -492,10 +492,6 @@ async function initialize(dispatch: Function) {
|
||||
// Setting.setValue('sync.10.userContentPath', 'https://joplinusercontent.com');
|
||||
Setting.setValue('sync.10.path', 'http://api.joplincloud.local:22300');
|
||||
Setting.setValue('sync.10.userContentPath', 'http://joplinusercontent.local:22300');
|
||||
|
||||
Setting.setValue('sync.target', 10);
|
||||
Setting.setValue('sync.10.username', 'user1@example.com');
|
||||
Setting.setValue('sync.10.password', 'hunter1hunter2hunter3');
|
||||
}
|
||||
|
||||
if (!Setting.value('clientId')) Setting.setValue('clientId', uuid.create());
|
||||
|
@@ -26,19 +26,6 @@ export enum ModelType {
|
||||
Command = 16,
|
||||
}
|
||||
|
||||
export interface DeleteOptions {
|
||||
idFieldName?: string;
|
||||
changeSource?: number;
|
||||
deleteChildren?: boolean;
|
||||
|
||||
// By default the application tracks item deletions, so that they can be
|
||||
// applied to the remote items during synchronisation. However, in some
|
||||
// cases, we don't want this. In particular when an item is deleted via
|
||||
// sync, we don't need to track the deletion, because the operation doesn't
|
||||
// need to applied again on next sync.
|
||||
trackDeleted?: boolean;
|
||||
}
|
||||
|
||||
class BaseModel {
|
||||
|
||||
// TODO: This ancient part of Joplin about model types is a bit of a
|
||||
@@ -645,7 +632,7 @@ class BaseModel {
|
||||
return this.db().exec(`DELETE FROM ${this.tableName()} WHERE id = ?`, [id]);
|
||||
}
|
||||
|
||||
static async batchDelete(ids: string[], options: DeleteOptions = null) {
|
||||
static async batchDelete(ids: string[], options: any = null) {
|
||||
if (!ids.length) return;
|
||||
options = this.modOptions(options);
|
||||
const idFieldName = options.idFieldName ? options.idFieldName : 'id';
|
||||
|
@@ -20,7 +20,8 @@ export enum LogLevel {
|
||||
Debug = 40,
|
||||
}
|
||||
|
||||
interface TargetOptions {
|
||||
interface Target {
|
||||
type: TargetType;
|
||||
level?: LogLevel;
|
||||
database?: any;
|
||||
console?: any;
|
||||
@@ -35,10 +36,6 @@ interface TargetOptions {
|
||||
formatInfo?: string;
|
||||
}
|
||||
|
||||
interface Target extends TargetOptions {
|
||||
type: TargetType;
|
||||
}
|
||||
|
||||
export interface LoggerWrapper {
|
||||
debug: Function;
|
||||
info: Function;
|
||||
@@ -106,11 +103,11 @@ class Logger {
|
||||
return this.targets_;
|
||||
}
|
||||
|
||||
addTarget(type: TargetType, options: TargetOptions = null) {
|
||||
addTarget(type: TargetType, options: any = null) {
|
||||
const target = { type: type };
|
||||
for (const n in options) {
|
||||
if (!options.hasOwnProperty(n)) continue;
|
||||
(target as any)[n] = (options as any)[n];
|
||||
(target as any)[n] = options[n];
|
||||
}
|
||||
|
||||
this.targets_.push(target);
|
||||
|
@@ -1,6 +1,5 @@
|
||||
/* eslint-disable no-unused-vars */
|
||||
|
||||
const { splitCommandBatch } = require('./string-utils');
|
||||
const StringUtils = require('./string-utils');
|
||||
|
||||
describe('StringUtils', function() {
|
||||
@@ -54,26 +53,4 @@ describe('StringUtils', function() {
|
||||
});
|
||||
}));
|
||||
|
||||
it('should split the command batch by newlines not inside quotes', (async () => {
|
||||
const eol = '\n';
|
||||
const testCases = [
|
||||
['',
|
||||
['']],
|
||||
['command1',
|
||||
['command1']],
|
||||
['command1 arg1 arg2 arg3',
|
||||
['command1 arg1 arg2 arg3']],
|
||||
[`command1 arg1 'arg2${eol}continue' arg3`,
|
||||
[`command1 arg1 'arg2${eol}continue' arg3`]],
|
||||
[`command1 arg1 'arg2${eol}continue'${eol}command2${eol}command3 'arg1${eol}continue${eol}continue' arg2 arg3`,
|
||||
[`command1 arg1 'arg2${eol}continue'`, 'command2', `command3 'arg1${eol}continue${eol}continue' arg2 arg3`]],
|
||||
[`command1 arg\\1 'arg2${eol}continue\\'continue' arg3`,
|
||||
[`command1 arg\\1 'arg2${eol}continue\\'continue' arg3`]],
|
||||
];
|
||||
|
||||
testCases.forEach((t) => {
|
||||
expect(splitCommandBatch(t[0])).toEqual(t[1]);
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
|
@@ -20,17 +20,34 @@ import JoplinError from './JoplinError';
|
||||
import ShareService from './services/share/ShareService';
|
||||
import TaskQueue from './TaskQueue';
|
||||
import ItemUploader from './services/synchronizer/ItemUploader';
|
||||
import { FileApi, RemoteItem } from './file-api';
|
||||
import { FileApi } from './file-api';
|
||||
import JoplinDatabase from './JoplinDatabase';
|
||||
import { fetchSyncInfo, getActiveMasterKey, localSyncInfo, mergeSyncInfos, saveLocalSyncInfo, SyncInfo, syncInfoEquals, uploadSyncInfo } from './services/synchronizer/syncInfoUtils';
|
||||
import { getMasterPassword, setupAndDisableEncryption, setupAndEnableEncryption } from './services/e2ee/utils';
|
||||
import { generateKeyPair } from './services/e2ee/ppk';
|
||||
import syncDebugLog from './services/synchronizer/syncDebugLog';
|
||||
const { sprintf } = require('sprintf-js');
|
||||
const { Dirnames } = require('./services/synchronizer/utils/types');
|
||||
|
||||
const logger = Logger.create('Synchronizer');
|
||||
|
||||
interface RemoteItem {
|
||||
id: string;
|
||||
path?: string;
|
||||
type_?: number;
|
||||
isDeleted?: boolean;
|
||||
|
||||
// This the time when the file was created on the server. It is used for
|
||||
// example for the locking mechanim or any file that's not an actual Joplin
|
||||
// item.
|
||||
updated_time?: number;
|
||||
|
||||
// This is the time that corresponds to the actual Joplin item updated_time
|
||||
// value. A note is always uploaded with a delay so the server updated_time
|
||||
// value will always be ahead. However for synchronising we need to know the
|
||||
// exact Joplin item updated_time value.
|
||||
jop_updated_time?: number;
|
||||
}
|
||||
|
||||
function isCannotSyncError(error: any): boolean {
|
||||
if (!error) return false;
|
||||
if (['rejectedByTarget', 'fileNotFound'].indexOf(error.code) >= 0) return true;
|
||||
@@ -180,7 +197,7 @@ export default class Synchronizer {
|
||||
return lines;
|
||||
}
|
||||
|
||||
logSyncOperation(action: string, local: any = null, remote: RemoteItem = null, message: string = null, actionCount: number = 1) {
|
||||
logSyncOperation(action: any, local: any = null, remote: RemoteItem = null, message: string = null, actionCount: number = 1) {
|
||||
const line = ['Sync'];
|
||||
line.push(action);
|
||||
if (message) line.push(message);
|
||||
@@ -208,8 +225,6 @@ export default class Synchronizer {
|
||||
logger.debug(line.join(': '));
|
||||
}
|
||||
|
||||
if (!['fetchingProcessed', 'fetchingTotal'].includes(action)) syncDebugLog.info(line.join(': '));
|
||||
|
||||
if (!this.progressReport_[action]) this.progressReport_[action] = 0;
|
||||
this.progressReport_[action] += actionCount;
|
||||
this.progressReport_.state = this.state();
|
||||
@@ -532,7 +547,7 @@ export default class Synchronizer {
|
||||
if (this.cancelling()) break;
|
||||
|
||||
let local = locals[i];
|
||||
const ItemClass: typeof BaseItem = BaseItem.itemClass(local);
|
||||
const ItemClass = BaseItem.itemClass(local);
|
||||
const path = BaseItem.systemPath(local);
|
||||
|
||||
// Safety check to avoid infinite loops.
|
||||
@@ -726,10 +741,7 @@ export default class Synchronizer {
|
||||
const syncTimeQueries = BaseItem.updateSyncTimeQueries(syncTargetId, local, time.unixMs());
|
||||
await ItemClass.save(local, { autoTimestamp: false, changeSource: ItemChange.SOURCE_SYNC, nextQueries: syncTimeQueries });
|
||||
} else {
|
||||
await ItemClass.delete(local.id, {
|
||||
changeSource: ItemChange.SOURCE_SYNC,
|
||||
trackDeleted: false,
|
||||
});
|
||||
await ItemClass.delete(local.id, { changeSource: ItemChange.SOURCE_SYNC });
|
||||
}
|
||||
} else if (action == 'noteConflict') {
|
||||
// ------------------------------------------------------------------------------
|
||||
@@ -782,7 +794,7 @@ export default class Synchronizer {
|
||||
if (local.encryption_applied) this.dispatch({ type: 'SYNC_GOT_ENCRYPTED_ITEM' });
|
||||
} else {
|
||||
// Remote no longer exists (note deleted) so delete local one too
|
||||
await ItemClass.delete(local.id, { changeSource: ItemChange.SOURCE_SYNC, trackDeleted: false });
|
||||
await ItemClass.delete(local.id, { changeSource: ItemChange.SOURCE_SYNC });
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,64 +0,0 @@
|
||||
import * as callbackUrlUtils from './callbackUrlUtils';
|
||||
|
||||
describe('callbackUrlUtils', function() {
|
||||
|
||||
it('should identify valid callback urls', () => {
|
||||
const url = 'joplin://x-callback-url/123?a=b';
|
||||
expect(callbackUrlUtils.isCallbackUrl(url)).toBe(true);
|
||||
});
|
||||
|
||||
it('should identify invalid callback urls', () => {
|
||||
expect(callbackUrlUtils.isCallbackUrl('not-joplin://x-callback-url/123?a=b')).toBe(false);
|
||||
expect(callbackUrlUtils.isCallbackUrl('joplin://xcallbackurl/123?a=b')).toBe(false);
|
||||
});
|
||||
|
||||
it('should build valid note callback urls', () => {
|
||||
const noteUrl = callbackUrlUtils.getNoteCallbackUrl('123456');
|
||||
expect(callbackUrlUtils.isCallbackUrl(noteUrl)).toBe(true);
|
||||
expect(noteUrl).toBe('joplin://x-callback-url/openNote?id=123456');
|
||||
});
|
||||
|
||||
it('should build valid folder callback urls', () => {
|
||||
const folderUrl = callbackUrlUtils.getFolderCallbackUrl('123456');
|
||||
expect(callbackUrlUtils.isCallbackUrl(folderUrl)).toBe(true);
|
||||
expect(folderUrl).toBe('joplin://x-callback-url/openFolder?id=123456');
|
||||
});
|
||||
|
||||
it('should build valid tag callback urls', () => {
|
||||
const tagUrl = callbackUrlUtils.getTagCallbackUrl('123456');
|
||||
expect(callbackUrlUtils.isCallbackUrl(tagUrl)).toBe(true);
|
||||
expect(tagUrl).toBe('joplin://x-callback-url/openTag?id=123456');
|
||||
});
|
||||
|
||||
it('should parse note callback urls', () => {
|
||||
const parsed = callbackUrlUtils.parseCallbackUrl('joplin://x-callback-url/openNote?id=123456');
|
||||
expect(parsed.command).toBe(callbackUrlUtils.CallbackUrlCommand.OpenNote);
|
||||
expect(parsed.params).toStrictEqual({ id: '123456' });
|
||||
});
|
||||
|
||||
it('should parse folder callback urls', () => {
|
||||
const parsed = callbackUrlUtils.parseCallbackUrl('joplin://x-callback-url/openFolder?id=123456');
|
||||
expect(parsed.command).toBe(callbackUrlUtils.CallbackUrlCommand.OpenFolder);
|
||||
expect(parsed.params).toStrictEqual({ id: '123456' });
|
||||
});
|
||||
|
||||
it('should parse tag callback urls', () => {
|
||||
const parsed = callbackUrlUtils.parseCallbackUrl('joplin://x-callback-url/openTag?id=123456');
|
||||
expect(parsed.command).toBe(callbackUrlUtils.CallbackUrlCommand.OpenTag);
|
||||
expect(parsed.params).toStrictEqual({ id: '123456' });
|
||||
});
|
||||
|
||||
it('should throw an error on invalid input', () => {
|
||||
expect(() => callbackUrlUtils.parseCallbackUrl('not-a-url'))
|
||||
.toThrowError('Invalid callback url not-a-url');
|
||||
|
||||
expect(() => callbackUrlUtils.parseCallbackUrl('not-joplin://x-callback-url/123?a=b'))
|
||||
.toThrowError('Invalid callback url not-joplin://x-callback-url/123?a=b');
|
||||
|
||||
expect(() => callbackUrlUtils.parseCallbackUrl('joplin://xcallbackurl/123?a=b'))
|
||||
.toThrowError('Invalid callback url joplin://xcallbackurl/123?a=b');
|
||||
});
|
||||
|
||||
|
||||
|
||||
});
|
@@ -1,37 +0,0 @@
|
||||
const URL = require('url-parse');
|
||||
|
||||
export function isCallbackUrl(s: string) {
|
||||
return s.startsWith('joplin://x-callback-url/');
|
||||
}
|
||||
|
||||
export function getNoteCallbackUrl(noteId: string) {
|
||||
return `joplin://x-callback-url/openNote?id=${encodeURIComponent(noteId)}`;
|
||||
}
|
||||
|
||||
export function getFolderCallbackUrl(folderId: string) {
|
||||
return `joplin://x-callback-url/openFolder?id=${encodeURIComponent(folderId)}`;
|
||||
}
|
||||
|
||||
export function getTagCallbackUrl(tagId: string) {
|
||||
return `joplin://x-callback-url/openTag?id=${encodeURIComponent(tagId)}`;
|
||||
}
|
||||
|
||||
export const enum CallbackUrlCommand {
|
||||
OpenNote = 'openNote',
|
||||
OpenFolder = 'openFolder',
|
||||
OpenTag = 'openTag',
|
||||
}
|
||||
|
||||
export interface CallbackUrlInfo {
|
||||
command: CallbackUrlCommand;
|
||||
params: Record<string, string>;
|
||||
}
|
||||
|
||||
export function parseCallbackUrl(s: string): CallbackUrlInfo {
|
||||
if (!isCallbackUrl(s)) throw new Error(`Invalid callback url ${s}`);
|
||||
const url = new URL(s, true);
|
||||
return {
|
||||
command: url.pathname.substring(url.pathname.lastIndexOf('/') + 1) as CallbackUrlCommand,
|
||||
params: url.query,
|
||||
};
|
||||
}
|
@@ -96,13 +96,6 @@ export const onSavePasswordClick = (mk: MasterKeyEntity, passwords: Record<strin
|
||||
} else {
|
||||
Setting.setObjectValue('encryption.passwordCache', mk.id, password);
|
||||
}
|
||||
|
||||
// When setting a master key password, if the master password is not set, we
|
||||
// assume that this password is the master password. If it turns out it's
|
||||
// not, it's always possible to change it in the UI.
|
||||
if (password && !Setting.value('encryption.masterPassword')) {
|
||||
Setting.setValue('encryption.masterPassword', password);
|
||||
}
|
||||
};
|
||||
|
||||
export const onMasterPasswordSave = (masterPasswordInput: string) => {
|
||||
@@ -148,11 +141,6 @@ export const useInputPasswords = (propsPasswords: Record<string, string>) => {
|
||||
|
||||
export const usePasswordChecker = (masterKeys: MasterKeyEntity[], activeMasterKeyId: string, masterPassword: string, passwords: Record<string, string>) => {
|
||||
const [passwordChecks, setPasswordChecks] = useState<PasswordChecks>({});
|
||||
|
||||
// "masterPasswordKeys" are the master key that can be decrypted with the
|
||||
// master password. It should be all of them normally, but in previous
|
||||
// versions it was possible to have different passwords for different keys,
|
||||
// so we need this for backward compatibility.
|
||||
const [masterPasswordKeys, setMasterPasswordKeys] = useState<PasswordChecks>({});
|
||||
const [masterPasswordStatus, setMasterPasswordStatus] = useState<MasterPasswordStatus>(MasterPasswordStatus.Unknown);
|
||||
|
||||
@@ -179,6 +167,7 @@ export const usePasswordChecker = (masterKeys: MasterKeyEntity[], activeMasterKe
|
||||
|
||||
setMasterPasswordKeys(masterPasswordKeys => {
|
||||
if (JSON.stringify(newMasterPasswordKeys) === JSON.stringify(masterPasswordKeys)) return masterPasswordKeys;
|
||||
console.info('====', JSON.stringify(newMasterPasswordKeys), JSON.stringify(masterPasswordKeys));
|
||||
return newMasterPasswordKeys;
|
||||
});
|
||||
|
||||
|
@@ -16,30 +16,6 @@ export interface MultiPutItem {
|
||||
body: string;
|
||||
}
|
||||
|
||||
export interface RemoteItem {
|
||||
id: string;
|
||||
path?: string;
|
||||
type_?: number;
|
||||
isDeleted?: boolean;
|
||||
|
||||
// This the time when the file was created on the server. It is used for
|
||||
// example for the locking mechanim or any file that's not an actual Joplin
|
||||
// item.
|
||||
updated_time?: number;
|
||||
|
||||
// This is the time that corresponds to the actual Joplin item updated_time
|
||||
// value. A note is always uploaded with a delay so the server updated_time
|
||||
// value will always be ahead. However for synchronising we need to know the
|
||||
// exact Joplin item updated_time value.
|
||||
jop_updated_time?: number;
|
||||
}
|
||||
|
||||
export interface PaginatedList {
|
||||
items: RemoteItem[];
|
||||
has_more: boolean;
|
||||
context: any;
|
||||
}
|
||||
|
||||
function requestCanBeRepeated(error: any) {
|
||||
const errorCode = typeof error === 'object' && error.code ? error.code : null;
|
||||
|
||||
@@ -250,7 +226,7 @@ class FileApi {
|
||||
|
||||
// DRIVER MUST RETURN PATHS RELATIVE TO `path`
|
||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||
public async list(path = '', options: any = null): Promise<PaginatedList> {
|
||||
async list(path = '', options: any = null) {
|
||||
if (!options) options = {};
|
||||
if (!('includeHidden' in options)) options.includeHidden = false;
|
||||
if (!('context' in options)) options.context = null;
|
||||
@@ -259,7 +235,7 @@ class FileApi {
|
||||
|
||||
logger.debug(`list ${this.baseDir()}`);
|
||||
|
||||
const result: PaginatedList = await tryAndRepeat(() => this.driver_.list(this.fullPath(path), options), this.requestRepeatCount());
|
||||
const result = await tryAndRepeat(() => this.driver_.list(this.fullPath(path), options), this.requestRepeatCount());
|
||||
|
||||
if (!options.includeHidden) {
|
||||
const temp = [];
|
||||
|
@@ -301,38 +301,28 @@ interface NoteResourceRecognition {
|
||||
}
|
||||
|
||||
const preProcessFile = async (filePath: string): Promise<string> => {
|
||||
// Disabled pre-processing for now because it runs out of memory:
|
||||
// https://github.com/laurent22/joplin/issues/5543
|
||||
const content: string = await shim.fsDriver().readFile(filePath, 'utf8');
|
||||
|
||||
// The note content in an ENEX file is wrapped in a CDATA block so it means
|
||||
// that any "]]>" inside the note must be somehow escaped, or else the CDATA
|
||||
// block would be closed at the wrong point.
|
||||
//
|
||||
// It could be fixed by not loading the whole file in memory, but there are
|
||||
// other issues because people import 1GB+ files so pre-processing
|
||||
// everything means creating a new copy of that file, and that has its own
|
||||
// problems.
|
||||
|
||||
return filePath;
|
||||
|
||||
// const content: string = await shim.fsDriver().readFile(filePath, 'utf8');
|
||||
|
||||
// // The note content in an ENEX file is wrapped in a CDATA block so it means
|
||||
// // that any "]]>" inside the note must be somehow escaped, or else the CDATA
|
||||
// // block would be closed at the wrong point.
|
||||
// //
|
||||
// // The problem is that Evernote appears to encode "]]>" as "]]<![CDATA[>]]>"
|
||||
// // instead of the more sensible "]]>", or perhaps they have nothing in
|
||||
// // place to properly escape data imported from their web clipper. In any
|
||||
// // case it results in invalid XML that Evernote cannot even import back.
|
||||
// //
|
||||
// // Handling that invalid XML with SAX would also be very tricky, so instead
|
||||
// // we add a pre-processing step that converts this tags to just ">". It
|
||||
// // should be safe to do so because such content can only be within the body
|
||||
// // of a note - and ">" or ">" is equivalent.
|
||||
// //
|
||||
// // Ref: https://discourse.joplinapp.org/t/20470/4
|
||||
// const newContent = content.replace(/<!\[CDATA\[>\]\]>/g, '>');
|
||||
// if (content === newContent) return filePath;
|
||||
// const newFilePath = `${Setting.value('tempDir')}/${md5(Date.now() + Math.random())}.enex`;
|
||||
// await shim.fsDriver().writeFile(newFilePath, newContent, 'utf8');
|
||||
// return newFilePath;
|
||||
// The problem is that Evernote appears to encode "]]>" as "]]<![CDATA[>]]>"
|
||||
// instead of the more sensible "]]>", or perhaps they have nothing in
|
||||
// place to properly escape data imported from their web clipper. In any
|
||||
// case it results in invalid XML that Evernote cannot even import back.
|
||||
//
|
||||
// Handling that invalid XML with SAX would also be very tricky, so instead
|
||||
// we add a pre-processing step that converts this tags to just ">". It
|
||||
// should be safe to do so because such content can only be within the body
|
||||
// of a note - and ">" or ">" is equivalent.
|
||||
//
|
||||
// Ref: https://discourse.joplinapp.org/t/20470/4
|
||||
const newContent = content.replace(/<!\[CDATA\[>\]\]>/g, '>');
|
||||
if (content === newContent) return filePath;
|
||||
const newFilePath = `${Setting.value('tempDir')}/${md5(Date.now() + Math.random())}.enex`;
|
||||
await shim.fsDriver().writeFile(newFilePath, newContent, 'utf8');
|
||||
return newFilePath;
|
||||
};
|
||||
|
||||
export default async function importEnex(parentFolderId: string, filePath: string, importOptions: ImportOptions = null) {
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import { ModelType, DeleteOptions } from '../BaseModel';
|
||||
import { ModelType } from '../BaseModel';
|
||||
import { BaseItemEntity, NoteEntity } from '../services/database/types';
|
||||
import Setting from './Setting';
|
||||
import BaseModel from '../BaseModel';
|
||||
@@ -236,11 +236,11 @@ export default class BaseItem extends BaseModel {
|
||||
return ItemClass.delete(id);
|
||||
}
|
||||
|
||||
static async delete(id: string, options: DeleteOptions = null) {
|
||||
static async delete(id: string, options: any = null) {
|
||||
return this.batchDelete([id], options);
|
||||
}
|
||||
|
||||
static async batchDelete(ids: string[], options: DeleteOptions = null) {
|
||||
static async batchDelete(ids: string[], options: any = null) {
|
||||
if (!options) options = {};
|
||||
let trackDeleted = true;
|
||||
if (options && options.trackDeleted !== null && options.trackDeleted !== undefined) trackDeleted = options.trackDeleted;
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import { FolderEntity } from '../services/database/types';
|
||||
import BaseModel, { DeleteOptions } from '../BaseModel';
|
||||
import BaseModel from '../BaseModel';
|
||||
import time from '../time';
|
||||
import { _ } from '../locale';
|
||||
import Note from './Note';
|
||||
@@ -8,7 +8,6 @@ import BaseItem from './BaseItem';
|
||||
import Resource from './Resource';
|
||||
import { isRootSharedFolder } from '../services/share/reducer';
|
||||
import Logger from '../Logger';
|
||||
import syncDebugLog from '../services/synchronizer/syncDebugLog';
|
||||
const { substrWithEllipsis } = require('../string-utils.js');
|
||||
|
||||
const logger = Logger.create('models/Folder');
|
||||
@@ -79,7 +78,7 @@ export default class Folder extends BaseItem {
|
||||
return this.db().exec(query);
|
||||
}
|
||||
|
||||
public static async delete(folderId: string, options: DeleteOptions = null) {
|
||||
static async delete(folderId: string, options: any = null) {
|
||||
options = {
|
||||
deleteChildren: true,
|
||||
...options,
|
||||
@@ -652,8 +651,6 @@ export default class Folder extends BaseItem {
|
||||
if (o.title == Folder.conflictFolderTitle()) throw new Error(_('Notebooks cannot be named "%s", which is a reserved title.', o.title));
|
||||
}
|
||||
|
||||
syncDebugLog.info('Folder Save:', o);
|
||||
|
||||
return super.save(o, options).then((folder: FolderEntity) => {
|
||||
this.dispatch({
|
||||
type: 'FOLDER_UPDATE_ONE',
|
||||
|
@@ -10,7 +10,6 @@ import Tag from './Tag';
|
||||
|
||||
const { sprintf } = require('sprintf-js');
|
||||
import Resource from './Resource';
|
||||
import syncDebugLog from '../services/synchronizer/syncDebugLog';
|
||||
const { pregQuote, substrWithEllipsis } = require('../string-utils.js');
|
||||
const { _ } = require('../locale');
|
||||
const ArrayUtils = require('../ArrayUtils.js');
|
||||
@@ -665,8 +664,6 @@ export default class Note extends BaseItem {
|
||||
// Trying to fix: https://github.com/laurent22/joplin/issues/3893
|
||||
const oldNote = !isNew && o.id ? await Note.load(o.id) : null;
|
||||
|
||||
syncDebugLog.info('Save Note: P:', oldNote);
|
||||
|
||||
let beforeNoteJson = null;
|
||||
if (oldNote && this.revisionService().isOldNote(o.id)) {
|
||||
beforeNoteJson = JSON.stringify(oldNote);
|
||||
@@ -683,8 +680,6 @@ export default class Note extends BaseItem {
|
||||
}
|
||||
}
|
||||
|
||||
syncDebugLog.info('Save Note: N:', o);
|
||||
|
||||
const note = await super.save(o, options);
|
||||
|
||||
const changeSource = options && options.changeSource ? options.changeSource : null;
|
||||
|
90
packages/lib/package-lock.json
generated
90
packages/lib/package-lock.json
generated
@@ -29,7 +29,6 @@
|
||||
"image-data-uri": "^2.0.0",
|
||||
"image-type": "^3.0.0",
|
||||
"immer": "^7.0.14",
|
||||
"js-yaml": "^4.1.0",
|
||||
"levenshtein": "^1.0.5",
|
||||
"lodash": "^4.17.20",
|
||||
"markdown-it": "^10.0.0",
|
||||
@@ -68,7 +67,6 @@
|
||||
"devDependencies": {
|
||||
"@types/fs-extra": "^9.0.6",
|
||||
"@types/jest": "^26.0.15",
|
||||
"@types/js-yaml": "^4.0.2",
|
||||
"@types/node": "^14.14.6",
|
||||
"@types/node-rsa": "^1.1.1",
|
||||
"@types/react": "^17.0.20",
|
||||
@@ -724,19 +722,6 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": {
|
||||
"version": "3.14.1",
|
||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
|
||||
"integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"argparse": "^1.0.7",
|
||||
"esprima": "^4.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"js-yaml": "bin/js-yaml.js"
|
||||
}
|
||||
},
|
||||
"node_modules/@istanbuljs/schema": {
|
||||
"version": "0.1.3",
|
||||
"integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==",
|
||||
@@ -1112,12 +1097,6 @@
|
||||
"pretty-format": "^26.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/js-yaml": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.3.tgz",
|
||||
"integrity": "sha512-5t9BhoORasuF5uCPr+d5/hdB++zRFUTMIZOzbNkr+jZh3yQht4HYbRDyj9fY8n2TZT30iW9huzav73x4NikqWg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "14.17.20",
|
||||
"integrity": "sha512-gI5Sl30tmhXsqkNvopFydP7ASc4c2cLfGNQrVKN3X90ADFWFsPEsotm/8JHSUJQKTHbwowAHtcJPeyVhtKv0TQ==",
|
||||
@@ -1125,6 +1104,7 @@
|
||||
},
|
||||
"node_modules/@types/node-rsa": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node-rsa/-/node-rsa-1.1.1.tgz",
|
||||
"integrity": "sha512-itzxtaBgk4OMbrCawVCvas934waMZWjW17v7EYgFVlfYS/cl0/P7KZdojWCq9SDJMI5cnLQLUP8ayhVCTY8TEg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
@@ -3074,6 +3054,18 @@
|
||||
"version": "1.0.0",
|
||||
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
|
||||
},
|
||||
"node_modules/fsevents": {
|
||||
"version": "2.3.2",
|
||||
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
|
||||
"hasInstallScript": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/fstream": {
|
||||
"version": "1.0.12",
|
||||
"integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==",
|
||||
@@ -4661,21 +4653,17 @@
|
||||
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
|
||||
},
|
||||
"node_modules/js-yaml": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
|
||||
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
|
||||
"version": "3.14.1",
|
||||
"integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"argparse": "^2.0.1"
|
||||
"argparse": "^1.0.7",
|
||||
"esprima": "^4.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"js-yaml": "bin/js-yaml.js"
|
||||
}
|
||||
},
|
||||
"node_modules/js-yaml/node_modules/argparse": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
|
||||
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="
|
||||
},
|
||||
"node_modules/jsbn": {
|
||||
"version": "0.1.1",
|
||||
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM="
|
||||
@@ -5549,6 +5537,7 @@
|
||||
},
|
||||
"node_modules/node-rsa": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/node-rsa/-/node-rsa-1.1.1.tgz",
|
||||
"integrity": "sha512-Jd4cvbJMryN21r5HgxQOpMEqv+ooke/korixNNK3mGqfGJmy0M77WDDzo/05969+OkMy3XW1UuZsSmW9KQm7Fw==",
|
||||
"dependencies": {
|
||||
"asn1": "^0.2.4"
|
||||
@@ -8904,18 +8893,6 @@
|
||||
"get-package-type": "^0.1.0",
|
||||
"js-yaml": "^3.13.1",
|
||||
"resolve-from": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"js-yaml": {
|
||||
"version": "3.14.1",
|
||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
|
||||
"integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"argparse": "^1.0.7",
|
||||
"esprima": "^4.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@istanbuljs/schema": {
|
||||
@@ -9240,12 +9217,6 @@
|
||||
"pretty-format": "^26.0.0"
|
||||
}
|
||||
},
|
||||
"@types/js-yaml": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.3.tgz",
|
||||
"integrity": "sha512-5t9BhoORasuF5uCPr+d5/hdB++zRFUTMIZOzbNkr+jZh3yQht4HYbRDyj9fY8n2TZT30iW9huzav73x4NikqWg==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/node": {
|
||||
"version": "14.17.20",
|
||||
"integrity": "sha512-gI5Sl30tmhXsqkNvopFydP7ASc4c2cLfGNQrVKN3X90ADFWFsPEsotm/8JHSUJQKTHbwowAHtcJPeyVhtKv0TQ==",
|
||||
@@ -9253,6 +9224,7 @@
|
||||
},
|
||||
"@types/node-rsa": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node-rsa/-/node-rsa-1.1.1.tgz",
|
||||
"integrity": "sha512-itzxtaBgk4OMbrCawVCvas934waMZWjW17v7EYgFVlfYS/cl0/P7KZdojWCq9SDJMI5cnLQLUP8ayhVCTY8TEg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
@@ -10755,6 +10727,11 @@
|
||||
"version": "1.0.0",
|
||||
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
|
||||
},
|
||||
"fsevents": {
|
||||
"version": "2.3.2",
|
||||
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
|
||||
"optional": true
|
||||
},
|
||||
"fstream": {
|
||||
"version": "1.0.12",
|
||||
"integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==",
|
||||
@@ -11984,18 +11961,12 @@
|
||||
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
|
||||
},
|
||||
"js-yaml": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
|
||||
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
|
||||
"version": "3.14.1",
|
||||
"integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"argparse": "^2.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"argparse": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
|
||||
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="
|
||||
}
|
||||
"argparse": "^1.0.7",
|
||||
"esprima": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"jsbn": {
|
||||
@@ -12682,6 +12653,7 @@
|
||||
},
|
||||
"node-rsa": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/node-rsa/-/node-rsa-1.1.1.tgz",
|
||||
"integrity": "sha512-Jd4cvbJMryN21r5HgxQOpMEqv+ooke/korixNNK3mGqfGJmy0M77WDDzo/05969+OkMy3XW1UuZsSmW9KQm7Fw==",
|
||||
"requires": {
|
||||
"asn1": "^0.2.4"
|
||||
|
@@ -18,7 +18,6 @@
|
||||
"devDependencies": {
|
||||
"@types/fs-extra": "^9.0.6",
|
||||
"@types/jest": "^26.0.15",
|
||||
"@types/js-yaml": "^4.0.2",
|
||||
"@types/node": "^14.14.6",
|
||||
"@types/node-rsa": "^1.1.1",
|
||||
"@types/react": "^17.0.20",
|
||||
@@ -54,7 +53,6 @@
|
||||
"image-data-uri": "^2.0.0",
|
||||
"image-type": "^3.0.0",
|
||||
"immer": "^7.0.14",
|
||||
"js-yaml": "^4.1.0",
|
||||
"levenshtein": "^1.0.5",
|
||||
"lodash": "^4.17.20",
|
||||
"markdown-it": "^10.0.0",
|
||||
|
@@ -25,10 +25,8 @@ export interface WhenClauseContext {
|
||||
noteTodoCompleted: boolean;
|
||||
noteIsMarkdown: boolean;
|
||||
noteIsHtml: boolean;
|
||||
folderIsShareRootAndNotOwnedByUser: boolean;
|
||||
folderIsShareRootAndOwnedByUser: boolean;
|
||||
folderIsShared: boolean;
|
||||
folderIsShareRoot: boolean;
|
||||
joplinServerConnected: boolean;
|
||||
}
|
||||
|
||||
@@ -76,8 +74,6 @@ export default function stateToWhenClauseContext(state: State, options: WhenClau
|
||||
noteIsHtml: selectedNote ? selectedNote.markup_language === MarkupToHtml.MARKUP_LANGUAGE_HTML : false,
|
||||
|
||||
// Current context folder
|
||||
folderIsShareRoot: commandFolder ? isRootSharedFolder(commandFolder) : false,
|
||||
folderIsShareRootAndNotOwnedByUser: commandFolder ? isRootSharedFolder(commandFolder) && !isSharedFolderOwner(state, commandFolder.id) : false,
|
||||
folderIsShareRootAndOwnedByUser: commandFolder ? isRootSharedFolder(commandFolder) && isSharedFolderOwner(state, commandFolder.id) : false,
|
||||
folderIsShared: commandFolder ? !!commandFolder.share_id : false,
|
||||
|
||||
|
@@ -58,14 +58,6 @@ export default class InteropService {
|
||||
isNoteArchive: false, // Tells whether the file can contain multiple notes (eg. Enex or Jex format)
|
||||
description: _('Markdown'),
|
||||
},
|
||||
{
|
||||
...defaultImportExportModule(ModuleType.Importer),
|
||||
format: 'md_frontmatter',
|
||||
fileExtensions: ['md', 'markdown', 'txt', 'html'],
|
||||
sources: [FileSystemItem.File, FileSystemItem.Directory],
|
||||
isNoteArchive: false, // Tells whether the file can contain multiple notes (eg. Enex or Jex format)
|
||||
description: _('Markdown + Front Matter'),
|
||||
},
|
||||
{
|
||||
...defaultImportExportModule(ModuleType.Importer),
|
||||
format: 'raw',
|
||||
@@ -113,12 +105,6 @@ export default class InteropService {
|
||||
target: FileSystemItem.Directory,
|
||||
description: _('Markdown'),
|
||||
},
|
||||
{
|
||||
...defaultImportExportModule(ModuleType.Exporter),
|
||||
format: 'md_frontmatter',
|
||||
target: FileSystemItem.Directory,
|
||||
description: _('Markdown + Front Matter'),
|
||||
},
|
||||
{
|
||||
...defaultImportExportModule(ModuleType.Exporter),
|
||||
format: 'html',
|
||||
|
@@ -110,7 +110,7 @@ export default class InteropService_Exporter_Md extends InteropService_Exporter_
|
||||
}
|
||||
}
|
||||
|
||||
protected async getNoteExportContent_(modNote: NoteEntity) {
|
||||
private async getNoteExportContent_(modNote: NoteEntity) {
|
||||
return await Note.replaceResourceInternalToExternalLinks(await Note.serialize(modNote, ['body']));
|
||||
}
|
||||
|
||||
|
@@ -1,134 +0,0 @@
|
||||
import InteropService from '../../services/interop/InteropService';
|
||||
import { setupDatabaseAndSynchronizer, switchClient, exportDir } from '../../testing/test-utils';
|
||||
import Folder from '../../models/Folder';
|
||||
import Note from '../../models/Note';
|
||||
import Tag from '../../models/Tag';
|
||||
import time from '../../time';
|
||||
import { fieldOrder } from './InteropService_Exporter_Md_frontmatter';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
async function recreateExportDir() {
|
||||
const dir = exportDir();
|
||||
await fs.remove(dir);
|
||||
await fs.mkdirp(dir);
|
||||
}
|
||||
|
||||
describe('interop/InteropService_Exporter_Md_frontmatter', function() {
|
||||
async function exportAndLoad(path: string): Promise<string> {
|
||||
const service = InteropService.instance();
|
||||
|
||||
await service.export({
|
||||
path: exportDir(),
|
||||
format: 'md_frontmatter',
|
||||
});
|
||||
|
||||
return await fs.readFile(path, 'utf8');
|
||||
}
|
||||
|
||||
beforeEach(async (done) => {
|
||||
await setupDatabaseAndSynchronizer(1);
|
||||
await switchClient(1);
|
||||
await recreateExportDir();
|
||||
done();
|
||||
});
|
||||
|
||||
test('should export MD file with YAML header', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'ma', latitude: 58.2222, user_updated_time: 1, user_created_time: 1, body: '**ma note**', parent_id: folder1.id });
|
||||
|
||||
const content = await exportAndLoad(`${exportDir()}/folder1/ma.md`);
|
||||
expect(content.startsWith('---')).toBe(true);
|
||||
expect(content).toContain('title: ma');
|
||||
expect(content).toContain('updated:'); // Will be current time of test run
|
||||
expect(content).toContain(`created: ${time.unixMsToRfc3339Sec(1)}`);
|
||||
expect(content).toContain('latitude: 58.22220000');
|
||||
expect(content).toContain('longitude: 0.00000000');
|
||||
expect(content).toContain('altitude: 0.0000');
|
||||
expect(content).toContain('**ma note**');
|
||||
expect(content).not.toContain('completed?');
|
||||
expect(content).not.toContain('author');
|
||||
expect(content).not.toContain('source');
|
||||
expect(content).not.toContain('due');
|
||||
}));
|
||||
|
||||
test('should export without additional quotes', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: '-60', body: '**ma note**', parent_id: folder1.id });
|
||||
|
||||
const content = await exportAndLoad(`${exportDir()}/folder1/-60.md`);
|
||||
expect(content).toContain('title: -60');
|
||||
}));
|
||||
|
||||
test('should export tags', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note = await Note.save({ title: 'Title', body: '**ma note**', parent_id: folder1.id });
|
||||
await Tag.addNoteTagByTitle(note.id, 'lamp');
|
||||
await Tag.addNoteTagByTitle(note.id, 'moth');
|
||||
await Tag.addNoteTagByTitle(note.id, 'godzilla');
|
||||
|
||||
const content = await exportAndLoad(`${exportDir()}/folder1/Title.md`);
|
||||
expect(content).toContain('tags:\n - godzilla\n - lamp\n - moth');
|
||||
}));
|
||||
|
||||
test('should export todo', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'Todo', is_todo: 1, todo_due: 1, body: '**ma note**', parent_id: folder1.id });
|
||||
|
||||
const content = await exportAndLoad(`${exportDir()}/folder1/Todo.md`);
|
||||
expect(content).toContain(`due: ${time.unixMsToRfc3339Sec(1)}`);
|
||||
expect(content).toContain('completed?: no');
|
||||
}));
|
||||
|
||||
test('should export author', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'Author', author: 'Scott Joplin', body: '**ma note**', parent_id: folder1.id });
|
||||
|
||||
const content = await exportAndLoad(`${exportDir()}/folder1/Author.md`);
|
||||
expect(content).toContain('author: Scott Joplin');
|
||||
}));
|
||||
|
||||
test('should export source', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'Source', source_url: 'https://joplinapp.org', body: '**ma note**', parent_id: folder1.id });
|
||||
|
||||
const content = await exportAndLoad(`${exportDir()}/folder1/Source.md`);
|
||||
expect(content).toContain('source: https://joplinapp.org');
|
||||
}));
|
||||
|
||||
test('should export fields in the correct order', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
|
||||
const note = await Note.save({
|
||||
title: 'Fields',
|
||||
is_todo: 1,
|
||||
todo_due: 1,
|
||||
author: 'Scott Joplin',
|
||||
source_url: 'https://joplinapp.org',
|
||||
body: '**ma note**',
|
||||
parent_id: folder1.id,
|
||||
});
|
||||
await Tag.addNoteTagByTitle(note.id, 'piano');
|
||||
await Tag.addNoteTagByTitle(note.id, 'greatness');
|
||||
|
||||
const content = await exportAndLoad(`${exportDir()}/folder1/Fields.md`);
|
||||
const fieldIndices = fieldOrder.map(field => content.indexOf(field));
|
||||
expect(fieldIndices).toBe(fieldIndices.sort());
|
||||
}));
|
||||
|
||||
test('should export title with a newline encoded', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'Source\ntitle', body: '**ma note**', parent_id: folder1.id });
|
||||
|
||||
const content = await exportAndLoad(`${exportDir()}/folder1/Source_title.md`);
|
||||
expect(content).toContain('title: |-\n Source\n title');
|
||||
}));
|
||||
test('should not export coordinates if they\'re not available', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'Coordinates', body: '**ma note**', parent_id: folder1.id });
|
||||
|
||||
const content = await exportAndLoad(`${exportDir()}/folder1/Coordinates.md`);
|
||||
expect(content).not.toContain('latitude');
|
||||
expect(content).not.toContain('longitude');
|
||||
expect(content).not.toContain('altitude');
|
||||
}));
|
||||
});
|
@@ -1,155 +0,0 @@
|
||||
import InteropService_Exporter_Md from './InteropService_Exporter_Md';
|
||||
import BaseModel from '../../BaseModel';
|
||||
import Note from '../../models/Note';
|
||||
import NoteTag from '../../models/NoteTag';
|
||||
import Tag from '../../models/Tag';
|
||||
import time from '../../time';
|
||||
import { NoteEntity } from '../database/types';
|
||||
import { MdFrontMatterExport } from './types';
|
||||
|
||||
import * as yaml from 'js-yaml';
|
||||
|
||||
interface NoteTagContext {
|
||||
noteTags: Record<string, string[]>;
|
||||
}
|
||||
|
||||
interface TagContext {
|
||||
tagTitles: Record<string, string>;
|
||||
}
|
||||
|
||||
interface FrontMatterContext extends NoteTagContext, TagContext {}
|
||||
|
||||
// There is a special case (negative numbers) where the yaml library will force quotations
|
||||
// These need to be stripped
|
||||
function trimQuotes(rawOutput: string): string {
|
||||
return rawOutput.split('\n').map(line => {
|
||||
const index = line.indexOf(': \'-');
|
||||
if (index >= 0) {
|
||||
// The plus 2 eats the : and space characters
|
||||
const start = line.substring(0, index + 2);
|
||||
// The plus 3 eats the quote character
|
||||
const end = line.substring(index + 3, line.length - 1);
|
||||
return start + end;
|
||||
}
|
||||
return line;
|
||||
}).join('\n');
|
||||
}
|
||||
|
||||
export const fieldOrder = ['title', 'updated', 'created', 'source', 'author', 'latitude', 'longitude', 'altitude', 'completed?', 'due', 'tags'];
|
||||
|
||||
export default class InteropService_Exporter_Md_frontmatter extends InteropService_Exporter_Md {
|
||||
|
||||
public async prepareForProcessingItemType(itemType: number, itemsToExport: any[]) {
|
||||
await super.prepareForProcessingItemType(itemType, itemsToExport);
|
||||
|
||||
if (itemType === BaseModel.TYPE_NOTE_TAG) {
|
||||
// Get tag list for each note
|
||||
const context: NoteTagContext = {
|
||||
noteTags: {},
|
||||
};
|
||||
for (let i = 0; i < itemsToExport.length; i++) {
|
||||
const it = itemsToExport[i].type;
|
||||
|
||||
if (it !== itemType) continue;
|
||||
|
||||
const itemOrId = itemsToExport[i].itemOrId;
|
||||
const noteTag = typeof itemOrId === 'object' ? itemOrId : await NoteTag.load(itemOrId);
|
||||
|
||||
if (!noteTag) continue;
|
||||
|
||||
if (!context.noteTags[noteTag.note_id]) context.noteTags[noteTag.note_id] = [];
|
||||
context.noteTags[noteTag.note_id].push(noteTag.tag_id);
|
||||
}
|
||||
|
||||
this.updateContext(context);
|
||||
} else if (itemType === BaseModel.TYPE_TAG) {
|
||||
// Map tag ID to title
|
||||
const context: TagContext = {
|
||||
tagTitles: {},
|
||||
};
|
||||
for (let i = 0; i < itemsToExport.length; i++) {
|
||||
const it = itemsToExport[i].type;
|
||||
|
||||
if (it !== itemType) continue;
|
||||
|
||||
const itemOrId = itemsToExport[i].itemOrId;
|
||||
const tag = typeof itemOrId === 'object' ? itemOrId : await Tag.load(itemOrId);
|
||||
|
||||
if (!tag) continue;
|
||||
|
||||
context.tagTitles[tag.id] = tag.title;
|
||||
}
|
||||
|
||||
this.updateContext(context);
|
||||
}
|
||||
}
|
||||
|
||||
private convertDate(datetime: number): string {
|
||||
return time.unixMsToRfc3339Sec(datetime);
|
||||
}
|
||||
|
||||
private extractMetadata(note: NoteEntity) {
|
||||
const md: MdFrontMatterExport = {};
|
||||
// Every variable needs to be converted seperately, so they will be handles in groups
|
||||
//
|
||||
// title
|
||||
if (note.title) { md['title'] = note.title; }
|
||||
|
||||
// source, author
|
||||
if (note.source_url) { md['source'] = note.source_url; }
|
||||
if (note.author) { md['author'] = note.author; }
|
||||
|
||||
// locations
|
||||
// non-strict inequality is used here to interpret the location strings
|
||||
// as numbers i.e 0.000000 is the same as 0.
|
||||
// This is necessary because these fields are officially numbers, but often
|
||||
// contain strings.
|
||||
if (note.latitude != 0 || note.longitude != 0 || note.altitude != 0) {
|
||||
md['latitude'] = note.latitude;
|
||||
md['longitude'] = note.longitude;
|
||||
md['altitude'] = note.altitude;
|
||||
}
|
||||
|
||||
// todo
|
||||
if (note.is_todo) {
|
||||
// boolean is not support by the yaml FAILSAFE_SCHEMA
|
||||
md['completed?'] = note.todo_completed ? 'yes' : 'no';
|
||||
}
|
||||
if (note.todo_due) { md['due'] = this.convertDate(note.todo_due); }
|
||||
|
||||
// time
|
||||
if (note.user_updated_time) { md['updated'] = this.convertDate(note.user_updated_time); }
|
||||
if (note.user_created_time) { md['created'] = this.convertDate(note.user_created_time); }
|
||||
|
||||
// tags
|
||||
const context: FrontMatterContext = this.context();
|
||||
if (context.noteTags[note.id]) {
|
||||
const tagIds = context.noteTags[note.id];
|
||||
const tags = tagIds.map((id: string) => context.tagTitles[id]).sort();
|
||||
md['tags'] = tags;
|
||||
}
|
||||
|
||||
// This guarentees that fields will always be ordered the same way
|
||||
// which can be useful if users are using this for generating diffs
|
||||
const sort = (a: string, b: string) => {
|
||||
return fieldOrder.indexOf(a) - fieldOrder.indexOf(b);
|
||||
};
|
||||
|
||||
// The FAILSAFE_SCHEMA along with noCompatMode allows this to export strings that look
|
||||
// like numbers (or yes/no) without the added '' quotes around the text
|
||||
const rawOutput = yaml.dump(md, { sortKeys: sort, noCompatMode: true, schema: yaml.FAILSAFE_SCHEMA });
|
||||
// The additional trimming is the unfortunate result of the yaml library insisting on
|
||||
// quoting negative numbers.
|
||||
// For now the trimQuotes function only trims quotes associated with a negative number
|
||||
// but it can be extended to support more special cases in the future if necessary.
|
||||
return trimQuotes(rawOutput);
|
||||
}
|
||||
|
||||
|
||||
protected async getNoteExportContent_(modNote: NoteEntity) {
|
||||
const noteContent = await Note.replaceResourceInternalToExternalLinks(await Note.serialize(modNote, ['body']));
|
||||
const metadata = this.extractMetadata(modNote);
|
||||
return `---\n${metadata}---\n\n${noteContent}`;
|
||||
}
|
||||
|
||||
}
|
@@ -1,135 +0,0 @@
|
||||
import InteropService_Importer_Md_frontmatter from '../../services/interop/InteropService_Importer_Md_frontmatter';
|
||||
import Note from '../../models/Note';
|
||||
import Tag from '../../models/Tag';
|
||||
import time from '../../time';
|
||||
import { setupDatabaseAndSynchronizer, supportDir, switchClient } from '../../testing/test-utils';
|
||||
|
||||
|
||||
describe('InteropService_Importer_Md_frontmatter: importMetadata', function() {
|
||||
async function importNote(path: string) {
|
||||
const importer = new InteropService_Importer_Md_frontmatter();
|
||||
importer.setMetadata({ fileExtensions: ['md', 'html'] });
|
||||
return await importer.importFile(path, 'notebook');
|
||||
}
|
||||
|
||||
beforeEach(async (done) => {
|
||||
await setupDatabaseAndSynchronizer(1);
|
||||
await switchClient(1);
|
||||
done();
|
||||
});
|
||||
it('should import file and set all metadata correctly', async function() {
|
||||
const note = await importNote(`${supportDir}/test_notes/yaml/full.md`);
|
||||
const format = 'DD/MM/YYYY HH:mm';
|
||||
|
||||
expect(note.title).toBe('Test Note Title');
|
||||
expect(time.formatMsToLocal(note.user_updated_time, format)).toBe('01/05/2019 16:54');
|
||||
expect(time.formatMsToLocal(note.user_created_time, format)).toBe('01/05/2019 16:54');
|
||||
expect(note.source_url).toBe('https://joplinapp.org');
|
||||
expect(note.author).toBe('Joplin');
|
||||
expect(note.latitude).toBe('37.08402100');
|
||||
expect(note.longitude).toBe('-94.51350100');
|
||||
expect(note.altitude).toBe('0.0000');
|
||||
expect(note.is_todo).toBe(1);
|
||||
expect(note.todo_completed).toBeUndefined();
|
||||
expect(time.formatMsToLocal(note.todo_due, format)).toBe('22/08/2021 00:00');
|
||||
expect(note.body).toBe('This is the note body\n');
|
||||
|
||||
const tags = await Tag.tagsByNoteId(note.id);
|
||||
expect(tags.length).toBe(3);
|
||||
|
||||
const tagTitles = tags.map(tag => tag.title);
|
||||
expect(tagTitles).toContain('joplin');
|
||||
expect(tagTitles).toContain('note');
|
||||
expect(tagTitles).toContain('pencil');
|
||||
});
|
||||
it('should only import data from the first yaml block', async function() {
|
||||
const note = await importNote(`${supportDir}/test_notes/yaml/split.md`);
|
||||
|
||||
expect(note.title).toBe('xxx');
|
||||
expect(note.author).not.toBe('xxx');
|
||||
expect(note.body).toBe('---\nauthor: xxx\n---\n\nnote body\n');
|
||||
});
|
||||
it('should only import, duplicate notes and tags are not created', async function() {
|
||||
const note = await importNote(`${supportDir}/test_notes/yaml/duplicates.md`);
|
||||
|
||||
expect(note.title).toBe('ddd');
|
||||
const itemIds = await Note.linkedItemIds(note.body);
|
||||
expect(itemIds.length).toBe(1);
|
||||
|
||||
const tags = await Tag.tagsByNoteId(note.id);
|
||||
expect(tags.length).toBe(1);
|
||||
});
|
||||
it('should not import items as numbers', async function() {
|
||||
const note = await importNote(`${supportDir}/test_notes/yaml/numbers.md`);
|
||||
|
||||
expect(note.title).toBe('001');
|
||||
expect(note.body).toBe('note body\n');
|
||||
});
|
||||
it('should normalize whitespace and load correctly', async function() {
|
||||
const note = await importNote(`${supportDir}/test_notes/yaml/normalize.md`);
|
||||
|
||||
expect(note.title).toBe('norm');
|
||||
expect(note.body).toBe('note body\n');
|
||||
|
||||
const tags = await Tag.tagsByNoteId(note.id);
|
||||
expect(tags.length).toBe(3);
|
||||
});
|
||||
it('should load unquoted special forms correctly', async function() {
|
||||
const note = await importNote(`${supportDir}/test_notes/yaml/unquoted.md`);
|
||||
|
||||
expect(note.title).toBe('Unquoted');
|
||||
expect(note.body).toBe('note body\n');
|
||||
|
||||
expect(note.longitude).toBe('-94.51350100');
|
||||
expect(note.is_todo).toBe(1);
|
||||
expect(note.todo_completed).toBeUndefined();
|
||||
});
|
||||
it('should load notes with newline in the title', async function() {
|
||||
const note = await importNote(`${supportDir}/test_notes/yaml/title_newline.md`);
|
||||
|
||||
expect(note.title).toBe('First\nSecond');
|
||||
});
|
||||
it('should import dates (without time) correctly', async function() {
|
||||
const note = await importNote(`${supportDir}/test_notes/yaml/short_date.md`);
|
||||
const format = 'YYYY-MM-DD HH:mm';
|
||||
|
||||
expect(time.formatMsToLocal(note.user_updated_time, format)).toBe('2021-01-01 00:00');
|
||||
expect(time.formatMsToLocal(note.user_created_time, format)).toBe('2017-01-01 00:00');
|
||||
});
|
||||
it('should load tags even with the inline syntax', async function() {
|
||||
const note = await importNote(`${supportDir}/test_notes/yaml/inline_tags.md`);
|
||||
|
||||
expect(note.title).toBe('Inline Tags');
|
||||
|
||||
const tags = await Tag.tagsByNoteId(note.id);
|
||||
expect(tags.length).toBe(2);
|
||||
});
|
||||
it('should import r-markdown files correctly and set what metadata it can', async function() {
|
||||
const note = await importNote(`${supportDir}/test_notes/yaml/r-markdown.md`);
|
||||
const format = 'YYYY-MM-DD HH:mm';
|
||||
|
||||
expect(note.title).toBe('YAML metadata for R Markdown with examples');
|
||||
expect(time.formatMsToLocal(note.user_updated_time, format)).toBe('2021-06-10 00:00');
|
||||
expect(time.formatMsToLocal(note.user_created_time, format)).toBe('2021-06-10 00:00');
|
||||
expect(note.author).toBe('Hao Liang');
|
||||
|
||||
const tags = await Tag.tagsByNoteId(note.id);
|
||||
expect(tags.length).toBe(2);
|
||||
|
||||
const tagTitles = tags.map(tag => tag.title);
|
||||
expect(tagTitles).toContain('yaml');
|
||||
expect(tagTitles).toContain('rmd');
|
||||
});
|
||||
it('should import r-markdown files with alternative author syntax', async function() {
|
||||
const note = await importNote(`${supportDir}/test_notes/yaml/r-markdown_author.md`);
|
||||
|
||||
expect(note.title).toBe('Distill for R Markdown');
|
||||
expect(note.author).toBe('JJ Allaire');
|
||||
});
|
||||
it('should handle date formats with timezone information', async function() {
|
||||
const note = await importNote(`${supportDir}/test_notes/yaml/utc.md`);
|
||||
|
||||
expect(note.user_updated_time).toBe(1556729640000);
|
||||
expect(note.user_created_time).toBe(1556754840000);
|
||||
});
|
||||
});
|
@@ -1,162 +0,0 @@
|
||||
import InteropService_Importer_Md from './InteropService_Importer_Md';
|
||||
import Note from '../../models/Note';
|
||||
import Tag from '../../models/Tag';
|
||||
import time from '../../time';
|
||||
import { NoteEntity } from '../database/types';
|
||||
|
||||
import * as yaml from 'js-yaml';
|
||||
|
||||
interface ParsedMeta {
|
||||
metadata: NoteEntity;
|
||||
tags: string[];
|
||||
}
|
||||
|
||||
function isTruthy(str: string): boolean {
|
||||
return str.toLowerCase() in ['true', 'yes'];
|
||||
}
|
||||
|
||||
// Enforces exactly 2 spaces in front of list items
|
||||
function normalizeYamlWhitespace(yaml: string[]): string[] {
|
||||
return yaml.map(line => {
|
||||
const l = line.trimStart();
|
||||
if (l.startsWith('-')) {
|
||||
return ` ${l}`;
|
||||
}
|
||||
|
||||
return line;
|
||||
});
|
||||
}
|
||||
|
||||
// This is a helper functon to convert an arbitrary author variable into a string
|
||||
// the use case is for loading from r-markdown/pandoc style notes
|
||||
// references:
|
||||
// https://pandoc.org/MANUAL.html#extension-yaml_metadata_block
|
||||
// https://github.com/hao203/rmarkdown-YAML
|
||||
function extractAuthor(author: any): string {
|
||||
if (!author) return '';
|
||||
|
||||
if (typeof(author) === 'string') {
|
||||
return author;
|
||||
} else if (Array.isArray(author)) {
|
||||
// Joplin only supports a single author, so we take the first one
|
||||
return extractAuthor(author[0]);
|
||||
} else if (typeof(author) === 'object') {
|
||||
if ('name' in author) {
|
||||
return author['name'];
|
||||
}
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
export default class InteropService_Importer_Md_frontmatter extends InteropService_Importer_Md {
|
||||
|
||||
private getNoteHeader(note: string) {
|
||||
// Ignore the leading `---`
|
||||
const lines = note.split('\n').slice(1);
|
||||
let inHeader = true;
|
||||
const headerLines: string[] = [];
|
||||
const bodyLines: string[] = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (inHeader && line.startsWith('---')) {
|
||||
inHeader = false;
|
||||
i++; // Need to eat the extra newline after the yaml block
|
||||
continue;
|
||||
}
|
||||
|
||||
if (inHeader) { headerLines.push(line); } else { bodyLines.push(line); }
|
||||
}
|
||||
|
||||
const normalizedHeaderLines = normalizeYamlWhitespace(headerLines);
|
||||
const header = normalizedHeaderLines.join('\n');
|
||||
const body = bodyLines.join('\n');
|
||||
|
||||
return { header, body };
|
||||
}
|
||||
|
||||
private toLowerCase(obj: Record<string, any>): Record<string, any> {
|
||||
const newObj: Record<string, any> = {};
|
||||
for (const key of Object.keys(obj)) {
|
||||
newObj[key.toLowerCase()] = obj[key];
|
||||
}
|
||||
|
||||
return newObj;
|
||||
}
|
||||
|
||||
private parseYamlNote(note: string): ParsedMeta {
|
||||
if (!note.startsWith('---')) return { metadata: { body: note }, tags: [] };
|
||||
|
||||
const { header, body } = this.getNoteHeader(note);
|
||||
|
||||
const md: Record<string, any> = this.toLowerCase(yaml.load(header, { schema: yaml.FAILSAFE_SCHEMA }));
|
||||
const metadata: NoteEntity = {
|
||||
title: md['title'] || '',
|
||||
source_url: md['source'] || '',
|
||||
is_todo: ('completed?' in md) ? 1 : 0,
|
||||
};
|
||||
|
||||
if ('author' in md) { metadata['author'] = extractAuthor(md['author']); }
|
||||
|
||||
// The date fallback gives support for MultiMarkdown format, r-markdown, and pandoc formats
|
||||
if ('created' in md) {
|
||||
metadata['user_created_time'] = time.anythingToMs(md['created'], Date.now());
|
||||
} else if ('date' in md) {
|
||||
metadata['user_created_time'] = time.anythingToMs(md['date'], Date.now());
|
||||
}
|
||||
|
||||
if ('updated' in md) {
|
||||
metadata['user_updated_time'] = time.anythingToMs(md['updated'], Date.now());
|
||||
} else if ('lastmod' in md) {
|
||||
// Add support for hugo
|
||||
metadata['user_updated_time'] = time.anythingToMs(md['lastmod'], Date.now());
|
||||
} else if ('date' in md) {
|
||||
metadata['user_updated_time'] = time.anythingToMs(md['date'], Date.now());
|
||||
}
|
||||
|
||||
if ('latitude' in md) { metadata['latitude'] = md['latitude']; }
|
||||
if ('longitude' in md) { metadata['longitude'] = md['longitude']; }
|
||||
if ('altitude' in md) { metadata['altitude'] = md['altitude']; }
|
||||
|
||||
if (metadata.is_todo) {
|
||||
if (isTruthy(md['completed?'])) {
|
||||
// Completed time isn't preserved, so we use a sane choice here
|
||||
metadata['todo_completed'] = metadata['user_updated_time'];
|
||||
}
|
||||
if ('due' in md) {
|
||||
const due_date = time.anythingToMs(md['due'], null);
|
||||
if (due_date) { metadata['todo_due'] = due_date; }
|
||||
}
|
||||
}
|
||||
|
||||
// Tags are handled seperately from typical metadata
|
||||
let tags: string[] = [];
|
||||
if ('tags' in md) {
|
||||
// Only create unique tags
|
||||
tags = md['tags'];
|
||||
} else if ('keywords' in md) {
|
||||
// Adding support for r-markdown/pandoc
|
||||
tags = tags.concat(md['keywords']);
|
||||
}
|
||||
|
||||
// Only create unique tags
|
||||
tags = [...new Set(tags)] as string[];
|
||||
|
||||
metadata['body'] = body;
|
||||
|
||||
return { metadata, tags };
|
||||
}
|
||||
|
||||
public async importFile(filePath: string, parentFolderId: string) {
|
||||
const note = await super.importFile(filePath, parentFolderId);
|
||||
const { metadata, tags } = this.parseYamlNote(note.body);
|
||||
|
||||
const updatedNote = Object.assign({}, note, metadata);
|
||||
|
||||
const noteItem = await Note.save(updatedNote, { isNew: false, autoTimestamp: false });
|
||||
|
||||
for (const tag of tags) { await Tag.addNoteTagByTitle(noteItem.id, tag); }
|
||||
|
||||
return noteItem;
|
||||
}
|
||||
}
|
@@ -103,24 +103,8 @@ export interface ImportExportResult {
|
||||
warnings: string[];
|
||||
}
|
||||
|
||||
// These are the fields that will be included in an exported Md+Front Matter note
|
||||
export interface MdFrontMatterExport {
|
||||
'title'?: string;
|
||||
'source'?: string;
|
||||
'author'?: string;
|
||||
'latitude'?: number;
|
||||
'longitude'?: number;
|
||||
'altitude'?: number;
|
||||
'completed?'?: string;
|
||||
'due'?: string;
|
||||
'updated'?: string;
|
||||
'created'?: string;
|
||||
'tags'?: string[];
|
||||
}
|
||||
|
||||
function moduleFullLabel(moduleSource: FileSystemItem = null): string {
|
||||
const format = this.format.split('_')[0];
|
||||
const label = [`${format.toUpperCase()} - ${this.description}`];
|
||||
const label = [`${this.format.toUpperCase()} - ${this.description}`];
|
||||
if (moduleSource && this.sources.length > 1) {
|
||||
label.push(`(${moduleSource === 'file' ? _('File') : _('Directory')})`);
|
||||
}
|
||||
|
@@ -74,8 +74,6 @@ export default class ShareService {
|
||||
return share;
|
||||
}
|
||||
|
||||
// This allows the notebook owner to stop sharing it. For a recipient to
|
||||
// leave the shared notebook, see the leaveSharedFolder command.
|
||||
public async unshareFolder(folderId: string) {
|
||||
const folder = await Folder.load(folderId);
|
||||
if (!folder) throw new Error(`No such folder: ${folderId}`);
|
||||
@@ -117,20 +115,6 @@ export default class ShareService {
|
||||
await Folder.updateAllShareIds();
|
||||
}
|
||||
|
||||
// This is when a share recipient decides to leave the shared folder.
|
||||
//
|
||||
// In that case, we should only delete the folder but none of its children.
|
||||
// Deleting the folder tells the server that we want to leave the share. The
|
||||
// server will then proceed to delete all associated user_items. So
|
||||
// eventually all the notebook content will also be deleted for the current
|
||||
// user.
|
||||
//
|
||||
// We don't delete the children here because that would delete them for the
|
||||
// other share participants too.
|
||||
public async leaveSharedFolder(folderId: string): Promise<void> {
|
||||
await Folder.delete(folderId, { deleteChildren: false });
|
||||
}
|
||||
|
||||
public async shareNote(noteId: string): Promise<StateShare> {
|
||||
const note = await Note.load(noteId);
|
||||
if (!note) throw new Error(`No such note: ${noteId}`);
|
||||
|
@@ -394,49 +394,4 @@ describe('Synchronizer.basics', function() {
|
||||
expect((await Note.all()).length).toBe(11);
|
||||
}));
|
||||
|
||||
it('should not sync deletions that came via sync even when there is a conflict', (async () => {
|
||||
// This test is mainly to simulate sharing, unsharing and sharing a note
|
||||
// again. Previously, when doing so, the app would create deleted_items
|
||||
// objects on the recipient when the owner unshares. It means that when
|
||||
// sharing again, the recipient would apply the deletions and delete
|
||||
// everything in the shared notebook.
|
||||
//
|
||||
// Specifically it was happening when a conflict was generated as a
|
||||
// result of the items being deleted.
|
||||
//
|
||||
// - C1 creates a note and sync
|
||||
// - C2 sync and get the note
|
||||
// - C2 deletes the note and sync
|
||||
// - C1 modify the note, and sync
|
||||
//
|
||||
// => A conflict is created. The note is deleted and a copy is created
|
||||
// in the Conflict folder.
|
||||
//
|
||||
// After this, we recreate the note on the sync target (simulates the
|
||||
// note being shared again), and we check that C2 doesn't attempt to
|
||||
// delete that note.
|
||||
|
||||
const note = await Note.save({});
|
||||
await synchronizerStart();
|
||||
const noteSerialized = await fileApi().get(`${note.id}.md`);
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Note.delete(note.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await Note.save({ id: note.id });
|
||||
await synchronizerStart();
|
||||
expect((await Note.all())[0].is_conflict).toBe(1);
|
||||
await fileApi().put(`${note.id}.md`, noteSerialized); // Recreate the note - simulate sharing again.
|
||||
await synchronizerStart();
|
||||
|
||||
// Check that the client didn't delete the note
|
||||
const remotes = (await fileApi().list()).items;
|
||||
expect(remotes.find(r => r.path === `${note.id}.md`)).toBeTruthy();
|
||||
}));
|
||||
|
||||
});
|
||||
|
@@ -1,9 +0,0 @@
|
||||
// The sync debug log can be used to view from a single file a sequence of sync
|
||||
// related events. In particular, it logs notes and folders being saved, and the
|
||||
// relevant sync operations. Enable it in app.ts
|
||||
|
||||
import Logger from '../../Logger';
|
||||
|
||||
const syncDebugLog = new Logger();
|
||||
|
||||
export default syncDebugLog;
|
@@ -210,59 +210,6 @@ function splitCommandString(command, options = null) {
|
||||
return args;
|
||||
}
|
||||
|
||||
function splitCommandBatch(commandBatch) {
|
||||
const commandLines = [];
|
||||
const eol = '\n';
|
||||
|
||||
let state = 'command';
|
||||
let current = '';
|
||||
let quote = '';
|
||||
for (let i = 0; i < commandBatch.length; i++) {
|
||||
const c = commandBatch[i];
|
||||
|
||||
if (state === 'command') {
|
||||
if (c === eol) {
|
||||
commandLines.push(current);
|
||||
current = '';
|
||||
} else if (c === '"' || c === '\'') {
|
||||
quote = c;
|
||||
current += c;
|
||||
state = 'quoted';
|
||||
} else if (c === '\\') {
|
||||
current += c;
|
||||
if (i + 1 < commandBatch.length) {
|
||||
current += commandBatch[i + 1];
|
||||
i++;
|
||||
}
|
||||
} else {
|
||||
current += c;
|
||||
}
|
||||
} else if (state === 'quoted') {
|
||||
if (c === quote) {
|
||||
quote = '';
|
||||
current += c;
|
||||
state = 'command';
|
||||
} else if (c === '\\') {
|
||||
current += c;
|
||||
if (i + 1 < commandBatch.length) {
|
||||
current += commandBatch[i + 1];
|
||||
i++;
|
||||
}
|
||||
} else {
|
||||
current += c;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (current.length > 0) {
|
||||
commandLines.push(current);
|
||||
}
|
||||
if (commandLines.length === 0) {
|
||||
commandLines.push('');
|
||||
}
|
||||
|
||||
return commandLines;
|
||||
}
|
||||
|
||||
function padLeft(string, length, padString) {
|
||||
if (!string) return '';
|
||||
|
||||
@@ -360,4 +307,4 @@ function scriptType(s) {
|
||||
return 'en';
|
||||
}
|
||||
|
||||
module.exports = Object.assign({ formatCssSize, camelCaseToDash, removeDiacritics, substrWithEllipsis, nextWhitespaceIndex, escapeFilename, wrap, splitCommandString, splitCommandBatch, padLeft, toTitleCase, urlDecode, escapeHtml, surroundKeywords, scriptType, commandArgumentsToString }, stringUtilsCommon);
|
||||
module.exports = Object.assign({ formatCssSize, camelCaseToDash, removeDiacritics, substrWithEllipsis, nextWhitespaceIndex, escapeFilename, wrap, splitCommandString, padLeft, toTitleCase, urlDecode, escapeHtml, surroundKeywords, scriptType, commandArgumentsToString }, stringUtilsCommon);
|
||||
|
@@ -80,15 +80,6 @@ class Time {
|
||||
);
|
||||
}
|
||||
|
||||
public unixMsToRfc3339Sec(ms: number) {
|
||||
return (
|
||||
`${moment
|
||||
.unix(ms / 1000)
|
||||
.utc()
|
||||
.format('YYYY-MM-DD HH:mm:ss')}Z`
|
||||
);
|
||||
}
|
||||
|
||||
public unixMsToLocalDateTime(ms: number) {
|
||||
return moment.unix(ms / 1000).format('DD/MM/YYYY HH:mm');
|
||||
}
|
||||
@@ -119,19 +110,6 @@ class Time {
|
||||
return m.isValid() ? m.toDate() : defaultValue;
|
||||
}
|
||||
|
||||
public anythingToMs(o: any, defaultValue: number = null) {
|
||||
if (o && o.toDate) return o.toDate();
|
||||
if (!o) return defaultValue;
|
||||
// There are a few date formats supported by Joplin that are not supported by
|
||||
// moment without an explicit format specifier. The typical case is that a user
|
||||
// has a preferred data format. This means we should try the currently assigned
|
||||
// date first, and then attempt to load a generic date string.
|
||||
const m = moment(o, this.dateTimeFormat());
|
||||
if (m.isValid()) return m.toDate().getTime();
|
||||
const d = moment(o);
|
||||
return d.isValid() ? d.toDate().getTime() : defaultValue;
|
||||
}
|
||||
|
||||
public msleep(ms: number) {
|
||||
return new Promise((resolve: Function) => {
|
||||
shim.setTimeout(() => {
|
||||
@@ -166,6 +144,7 @@ class Time {
|
||||
}, 1000);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const time = new Time();
|
||||
|
@@ -5,7 +5,6 @@
|
||||
"scripts": {
|
||||
"start-dev": "nodemon --config nodemon.json --ext ts,js,mustache,css,tsx dist/app.js --env dev",
|
||||
"start-dev-no-watch": "node dist/app.js --env dev",
|
||||
"rebuild": "npm run clean && npm run build && npm run tsc",
|
||||
"build": "gulp build",
|
||||
"postinstall": "npm run build",
|
||||
"devCreateDb": "node dist/app.js --env dev --create-db",
|
||||
|
@@ -1,25 +0,0 @@
|
||||
# How to test the complete workflow locally
|
||||
|
||||
- In website/build.ts, set the env to "dev", then build the website - `npm run watchWebsite`
|
||||
- Start the Stripe CLI tool: `npm run stripeListen`
|
||||
- Copy the webhook secret, and paste it in joplin-credentials/server.env (under STRIPE_WEBHOOK_SECRET)
|
||||
- Start the local Joplin Server, `npm run start-dev`, running under http://joplincloud.local:22300
|
||||
- Start the workflow from http://localhost:8077/plans/
|
||||
- The local website often is not configured to send email, but you can see them in the database, in the "emails" table.
|
||||
|
||||
# Simplified workflow
|
||||
|
||||
To test without running the main website, use http://joplincloud.local:22300/stripe/checkoutTest
|
||||
|
||||
# Stripe config
|
||||
|
||||
- The public config is under packages/server/stripeConfig.json
|
||||
- The private config is in the server .env file
|
||||
|
||||
# Failed Stripe cli login
|
||||
|
||||
If the tool show this error, with code "api_key_expired":
|
||||
|
||||
> FATAL Error while authenticating with Stripe: Authorization failed
|
||||
|
||||
Need to logout and login again to refresh the CLI token - `stripe logout && stripe login`
|
125
packages/server/src/db.perf.ts
Normal file
125
packages/server/src/db.perf.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { afterAllTests, beforeAllDb, beforeEachDb, createUserAndSession, db, models } from "./utils/testing/testUtils";
|
||||
import sqlts from '@rmp135/sql-ts';
|
||||
import config from "./config";
|
||||
import { connectDb, DbConnection, disconnectDb, migrateDown, migrateList, migrateUp, nextMigration, Uuid } from "./services/database/types";
|
||||
|
||||
const { shimInit } = require('@joplin/lib/shim-init-node.js');
|
||||
const nodeSqlite = require('sqlite3');
|
||||
|
||||
shimInit({ nodeSqlite });
|
||||
process.env.JOPLIN_IS_TESTING = '1';
|
||||
|
||||
// async function makeTestItem(userId: Uuid, num: number): Promise<Item> {
|
||||
// return models().item().saveForUser(userId, {
|
||||
// name: `${num.toString().padStart(32, '0')}.md`,
|
||||
// });
|
||||
// }
|
||||
|
||||
const main = async() => {
|
||||
await beforeAllDb('db.perf');
|
||||
|
||||
const { user } = await createUserAndSession(1, true);
|
||||
|
||||
await models().item().makeTestItems(user.id, 10000);
|
||||
|
||||
{
|
||||
const startTime = Date.now();
|
||||
await db().raw('SELECT id FROM items ORDER BY name DESC');
|
||||
console.info('Time:', Date.now() - startTime);
|
||||
|
||||
// With collate C:
|
||||
//
|
||||
// ASC: 23ms
|
||||
// DESC: 114
|
||||
|
||||
|
||||
}
|
||||
|
||||
// const durations:number[] = [];
|
||||
|
||||
// for (let i = 0; i < 1000; i++) {
|
||||
// const id = 1 + Math.floor(Math.random() * 10000);
|
||||
// const item = await models().item().loadByName(user.id, `${id.toString().padStart(32, '0')}.md`);
|
||||
// const startTime = Date.now();
|
||||
// await models().item().load(item.id);
|
||||
// durations.push(Date.now() - startTime);
|
||||
// }
|
||||
|
||||
|
||||
// let sum = 0;
|
||||
// for (const d of durations) sum += d;
|
||||
|
||||
// console.info('Time per query: ' + (sum / durations.length));
|
||||
|
||||
await afterAllTests();
|
||||
}
|
||||
|
||||
main().catch(error => {
|
||||
console.error(error);
|
||||
});
|
||||
|
||||
// async function dbSchemaSnapshot(db:DbConnection):Promise<any> {
|
||||
// return sqlts.toObject({
|
||||
// client: 'sqlite',
|
||||
// knex: db,
|
||||
// // 'connection': {
|
||||
// // 'filename': config().database.name,
|
||||
// // },
|
||||
// useNullAsDefault: true,
|
||||
// } as any)
|
||||
|
||||
// // return JSON.stringify(definitions);
|
||||
// }
|
||||
|
||||
// describe('db', function() {
|
||||
|
||||
// beforeAll(async () => {
|
||||
// await beforeAllDb('db', { autoMigrate: false });
|
||||
// });
|
||||
|
||||
// afterAll(async () => {
|
||||
// await afterAllTests();
|
||||
// });
|
||||
|
||||
// beforeEach(async () => {
|
||||
// await beforeEachDb();
|
||||
// });
|
||||
|
||||
// it('should allow downgrading schema', async function() {
|
||||
// const ignoreAllBefore = '20210819165350_user_flags';
|
||||
// let startProcessing = false;
|
||||
|
||||
// //console.info(await dbSchemaSnapshot());
|
||||
|
||||
// while (true) {
|
||||
// await migrateUp(db());
|
||||
|
||||
// if (!startProcessing) {
|
||||
// const next = await nextMigration(db());
|
||||
// if (next === ignoreAllBefore) {
|
||||
// startProcessing = true;
|
||||
// } else {
|
||||
// continue;
|
||||
// }
|
||||
// }
|
||||
|
||||
// if (!(await nextMigration(db()))) break;
|
||||
|
||||
// // await disconnectDb(db());
|
||||
// // const beforeSchema = await dbSchemaSnapshot(db());
|
||||
// // console.info(beforeSchema);
|
||||
// // await connectDb(db());
|
||||
|
||||
// // await migrateUp(db());
|
||||
// // await migrateDown(db());
|
||||
|
||||
// // const afterSchema = await dbSchemaSnapshot(db());
|
||||
|
||||
// // // console.info(beforeSchema);
|
||||
// // // console.info(afterSchema);
|
||||
|
||||
// // expect(beforeSchema).toEqual(afterSchema);
|
||||
// }
|
||||
// });
|
||||
|
||||
// });
|
@@ -0,0 +1,39 @@
|
||||
import { DbConnection, setCollateC } from '../db';
|
||||
|
||||
export async function up(db: DbConnection): Promise<any> {
|
||||
// await setCollateC(db, 'api_clients', 'id');
|
||||
// await setCollateC(db, 'changes', 'id');
|
||||
// await setCollateC(db, 'changes', 'item_id');
|
||||
// await setCollateC(db, 'changes', 'user_id');
|
||||
// await setCollateC(db, 'emails', 'recipient_id');
|
||||
// await setCollateC(db, 'item_resources', 'item_id');
|
||||
// await setCollateC(db, 'item_resources', 'resource_id');
|
||||
// await setCollateC(db, 'items', 'id');
|
||||
// await setCollateC(db, 'items', 'jop_id');
|
||||
// await setCollateC(db, 'items', 'jop_parent_id');
|
||||
// await setCollateC(db, 'items', 'jop_share_id');
|
||||
// await setCollateC(db, 'notifications', 'id');
|
||||
// await setCollateC(db, 'notifications', 'owner_id');
|
||||
// await setCollateC(db, 'sessions', 'id');
|
||||
// await setCollateC(db, 'sessions', 'user_id');
|
||||
// await setCollateC(db, 'share_users', 'id');
|
||||
// await setCollateC(db, 'share_users', 'share_id');
|
||||
// await setCollateC(db, 'share_users', 'user_id');
|
||||
// await setCollateC(db, 'shares', 'folder_id');
|
||||
// await setCollateC(db, 'shares', 'id');
|
||||
// await setCollateC(db, 'shares', 'item_id');
|
||||
// await setCollateC(db, 'shares', 'note_id');
|
||||
// await setCollateC(db, 'shares', 'owner_id');
|
||||
// await setCollateC(db, 'subscriptions', 'stripe_subscription_id');
|
||||
// await setCollateC(db, 'subscriptions', 'stripe_user_id');
|
||||
// await setCollateC(db, 'subscriptions', 'user_id');
|
||||
// await setCollateC(db, 'tokens', 'user_id');
|
||||
// await setCollateC(db, 'user_flags', 'user_id');
|
||||
// await setCollateC(db, 'user_items', 'item_id');
|
||||
// await setCollateC(db, 'user_items', 'user_id');
|
||||
// await setCollateC(db, 'users', 'id');
|
||||
}
|
||||
|
||||
export async function down(_db: DbConnection): Promise<any> {
|
||||
|
||||
}
|
@@ -230,7 +230,6 @@ export default class ChangeModel extends BaseModel<Change> {
|
||||
// create - delete => NOOP
|
||||
// update - update => update
|
||||
// update - delete => delete
|
||||
// delete - create => create
|
||||
//
|
||||
// There's one exception for changes that include a "previous_item". This is
|
||||
// used to save specific properties about the previous state of the item,
|
||||
@@ -238,13 +237,6 @@ export default class ChangeModel extends BaseModel<Change> {
|
||||
// to know if an item has been moved from one folder to another. In that
|
||||
// case, we need to know about each individual change, so they are not
|
||||
// compressed.
|
||||
//
|
||||
// The latest change, when an item goes from DELETE to CREATE seems odd but
|
||||
// can happen because we are not checking for "item" changes but for
|
||||
// "user_item" changes. When sharing is involved, an item can be shared
|
||||
// (CREATED), then unshared (DELETED), then shared again (CREATED). When it
|
||||
// happens, we want the user to get the item, thus we generate a CREATE
|
||||
// event.
|
||||
private compressChanges(changes: Change[]): Change[] {
|
||||
const itemChanges: Record<Uuid, Change> = {};
|
||||
|
||||
@@ -276,10 +268,6 @@ export default class ChangeModel extends BaseModel<Change> {
|
||||
if (previous.type === ChangeType.Update && change.type === ChangeType.Delete) {
|
||||
itemChanges[itemId] = change;
|
||||
}
|
||||
|
||||
if (previous.type === ChangeType.Delete && change.type === ChangeType.Create) {
|
||||
itemChanges[itemId] = change;
|
||||
}
|
||||
} else {
|
||||
itemChanges[itemId] = change;
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@ import { ChangeType, Share, ShareType, ShareUser, ShareUserStatus } from '../../
|
||||
import { beforeAllDb, afterAllTests, beforeEachDb, createUserAndSession, models, createNote, createFolder, updateItem, createItemTree, makeNoteSerializedBody, updateNote, expectHttpError, createResource } from '../../utils/testing/testUtils';
|
||||
import { postApi, patchApi, getApi, deleteApi } from '../../utils/testing/apiUtils';
|
||||
import { PaginatedDeltaChanges } from '../../models/ChangeModel';
|
||||
import { inviteUserToShare, shareFolderWithUser } from '../../utils/testing/shareApiUtils';
|
||||
import { shareFolderWithUser } from '../../utils/testing/shareApiUtils';
|
||||
import { msleep } from '../../utils/time';
|
||||
import { ErrorForbidden } from '../../utils/errors';
|
||||
import { resourceBlobPath, serializeJoplinItem, unserializeJoplinItem } from '../../utils/joplinUtils';
|
||||
@@ -860,45 +860,4 @@ describe('shares.folder', function() {
|
||||
);
|
||||
});
|
||||
|
||||
test('should allow sharing, unsharing and sharing again', async function() {
|
||||
// - U1 share a folder that contains a note
|
||||
// - U2 accept
|
||||
// - U2 syncs
|
||||
// - U1 remove U2
|
||||
// - U1 adds back U2
|
||||
// - U2 accept
|
||||
//
|
||||
// => Previously, the notebook would be deleted fro U2 due to a quirk in
|
||||
// delta sync, that doesn't handle user_items being deleted, then
|
||||
// created again. Instead U2 should end up with both the folder and the
|
||||
// note.
|
||||
//
|
||||
// Ref: https://discourse.joplinapp.org/t/20977
|
||||
|
||||
const { session: session1 } = await createUserAndSession(1);
|
||||
const { user: user2, session: session2 } = await createUserAndSession(2);
|
||||
|
||||
const { shareUser: shareUserA, share } = await shareFolderWithUser(session1.id, session2.id, '000000000000000000000000000000F1', {
|
||||
'000000000000000000000000000000F1': {
|
||||
'00000000000000000000000000000001': null,
|
||||
},
|
||||
});
|
||||
|
||||
await models().share().updateSharedItems3();
|
||||
|
||||
await deleteApi(session1.id, `share_users/${shareUserA.id}`);
|
||||
|
||||
await models().share().updateSharedItems3();
|
||||
|
||||
await inviteUserToShare(share, session1.id, user2.email, true);
|
||||
|
||||
await models().share().updateSharedItems3();
|
||||
|
||||
const page = await getApi<PaginatedDeltaChanges>(session2.id, 'items/root/delta', { query: { cursor: '' } });
|
||||
|
||||
expect(page.items.length).toBe(2);
|
||||
expect(page.items.find(it => it.item_name === '000000000000000000000000000000F1.md').type).toBe(ChangeType.Create);
|
||||
expect(page.items.find(it => it.item_name === '00000000000000000000000000000001.md').type).toBe(ChangeType.Create);
|
||||
});
|
||||
|
||||
});
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import { SubPath } from '../../utils/routeUtils';
|
||||
import Router from '../../utils/Router';
|
||||
import { Env, RouteType } from '../../utils/types';
|
||||
import { RouteType } from '../../utils/types';
|
||||
import { AppContext } from '../../utils/types';
|
||||
import { bodyFields } from '../../utils/requestUtils';
|
||||
import globalConfig from '../../config';
|
||||
@@ -198,6 +198,32 @@ export const postHandlers: PostHandlers = {
|
||||
return { sessionId: session.id };
|
||||
},
|
||||
|
||||
// # How to test the complete workflow locally
|
||||
//
|
||||
// - In website/build.ts, set the env to "dev", then build the website - `npm run watchWebsite`
|
||||
// - Start the Stripe CLI tool: `stripe listen --forward-to http://joplincloud.local:22300/stripe/webhook`
|
||||
// - Copy the webhook secret, and paste it in joplin-credentials/server.env (under STRIPE_WEBHOOK_SECRET)
|
||||
// - Start the local Joplin Server, `npm run start-dev`, running under http://joplincloud.local:22300
|
||||
// - Start the workflow from http://localhost:8077/plans/
|
||||
// - The local website often is not configured to send email, but you can see them in the database, in the "emails" table.
|
||||
//
|
||||
// # Simplified workflow
|
||||
//
|
||||
// To test without running the main website, use http://joplincloud.local:22300/stripe/checkoutTest
|
||||
//
|
||||
// # Stripe config
|
||||
//
|
||||
// - The public config is under packages/server/stripeConfig.json
|
||||
// - The private config is in the server .env file
|
||||
//
|
||||
// # Failed Stripe cli login
|
||||
//
|
||||
// If the tool show this error, with code "api_key_expired":
|
||||
//
|
||||
// > FATAL Error while authenticating with Stripe: Authorization failed
|
||||
//
|
||||
// Need to logout and login again to refresh the CLI token - `stripe logout && stripe login`
|
||||
|
||||
webhook: async (stripe: Stripe, _path: SubPath, ctx: AppContext, event: Stripe.Event = null, logErrors: boolean = true) => {
|
||||
event = event ? event : await stripeEvent(stripe, ctx.req);
|
||||
|
||||
@@ -399,8 +425,6 @@ const getHandlers: Record<string, StripeRouteHandler> = {
|
||||
},
|
||||
|
||||
checkoutTest: async (_stripe: Stripe, _path: SubPath, ctx: AppContext) => {
|
||||
if (globalConfig().env === Env.Prod) throw new ErrorForbidden();
|
||||
|
||||
const basicPrice = findPrice(stripeConfig().prices, { accountType: 1, period: PricePeriod.Monthly });
|
||||
const proPrice = findPrice(stripeConfig().prices, { accountType: 2, period: PricePeriod.Monthly });
|
||||
|
||||
|
@@ -55,7 +55,7 @@ describe('TaskService', function() {
|
||||
clearInterval(iid);
|
||||
taskHasRan = true;
|
||||
}
|
||||
}, 1);
|
||||
}, 10);
|
||||
},
|
||||
schedule: '',
|
||||
};
|
||||
@@ -69,11 +69,11 @@ describe('TaskService', function() {
|
||||
void service.runTask(taskId, RunType.Manual);
|
||||
expect(service.taskState(taskId).running).toBe(true);
|
||||
|
||||
while (!taskHasRan) {
|
||||
await msleep(1);
|
||||
finishTask = true;
|
||||
}
|
||||
await msleep(10);
|
||||
finishTask = true;
|
||||
await msleep(10);
|
||||
|
||||
expect(taskHasRan).toBe(true);
|
||||
expect(service.taskState(taskId).running).toBe(false);
|
||||
|
||||
const events = await service.taskLastEvents(taskId);
|
||||
|
@@ -150,17 +150,13 @@ export async function shareWithUserAndAccept(sharerSessionId: string, shareeSess
|
||||
|
||||
shareUser = await models().shareUser().load(shareUser.id);
|
||||
|
||||
await respondInvitation(shareeSessionId, shareUser.id, ShareUserStatus.Accepted);
|
||||
await patchApi(shareeSessionId, `share_users/${shareUser.id}`, { status: ShareUserStatus.Accepted });
|
||||
|
||||
await models().share().updateSharedItems3();
|
||||
|
||||
return { share, item, shareUser };
|
||||
}
|
||||
|
||||
export async function respondInvitation(recipientSessionId: Uuid, shareUserId: Uuid, status: ShareUserStatus) {
|
||||
await patchApi(recipientSessionId, `share_users/${shareUserId}`, { status });
|
||||
}
|
||||
|
||||
export async function postShareContext(sessionId: string, shareType: ShareType, itemId: Uuid): Promise<AppContext> {
|
||||
const context = await koaAppContext({
|
||||
sessionId: sessionId,
|
||||
|
@@ -73,23 +73,23 @@ export async function beforeAllDb(unitName: string, createDbOptions: CreateDbOpt
|
||||
//
|
||||
// sudo docker compose -f docker-compose.db-dev.yml up
|
||||
|
||||
// await initConfig(Env.Dev, {
|
||||
// DB_CLIENT: 'pg',
|
||||
// POSTGRES_DATABASE: unitName,
|
||||
// POSTGRES_USER: 'joplin',
|
||||
// POSTGRES_PASSWORD: 'joplin',
|
||||
// SUPPORT_EMAIL: 'testing@localhost',
|
||||
// }, {
|
||||
// tempDir: tempDir,
|
||||
// });
|
||||
|
||||
await initConfig(Env.Dev, {
|
||||
SQLITE_DATABASE: createdDbPath_,
|
||||
DB_CLIENT: 'pg',
|
||||
POSTGRES_DATABASE: unitName,
|
||||
POSTGRES_USER: 'joplin',
|
||||
POSTGRES_PASSWORD: 'joplin',
|
||||
SUPPORT_EMAIL: 'testing@localhost',
|
||||
}, {
|
||||
tempDir: tempDir,
|
||||
});
|
||||
|
||||
// await initConfig(Env.Dev, {
|
||||
// SQLITE_DATABASE: createdDbPath_,
|
||||
// SUPPORT_EMAIL: 'testing@localhost',
|
||||
// }, {
|
||||
// tempDir: tempDir,
|
||||
// });
|
||||
|
||||
initGlobalLogger();
|
||||
|
||||
await createDb(config().database, { dropIfExists: true, ...createDbOptions });
|
||||
|
@@ -7,7 +7,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: Joplin-CLI 1.0.0\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"Last-Translator: ERYpTION\n"
|
||||
"Last-Translator: Mustafa Al-Dailemi <dailemi@hotmail.com>Language-Team:\n"
|
||||
"Language-Team: \n"
|
||||
"Language: da_DK\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
|
@@ -1,15 +0,0 @@
|
||||
# External URL links
|
||||
|
||||
This feature allows creation of links to notes, folder, and tags. When opening such link Joplin will start, unless it's already running, and open the corresponding item.
|
||||
|
||||
To create a link, right click a note, a folder, or a tag in the sidebar and select "Copy external link". The link will be copied to clipboard.
|
||||
|
||||
## Link format
|
||||
|
||||
* `joplin://x-callback-url/openNote?id=<note id>` for note
|
||||
* `joplin://x-callback-url/openFolder?id=<folder id>` for folder
|
||||
* `joplin://x-callback-url/openTag?id=<tag id>` for tag
|
||||
|
||||
## Known problems
|
||||
|
||||
On macOS if Joplin isn't running it will start but it won't open the note.
|
@@ -1,110 +0,0 @@
|
||||
# Markdown with Front Matter Exporter/Importer
|
||||
|
||||
This exporter/importer is built around the MD exporter/importer. It functions identically, but includes a block of YAML front matter that contains note metadata.
|
||||
|
||||
YAML front matter is represented simply as a block of YAML between `---` delimiters. An illustrative example can be seen below.
|
||||
|
||||
```
|
||||
---
|
||||
title: Joplin Interop
|
||||
created: 1970-01-01 00:00Z
|
||||
tags:
|
||||
- export
|
||||
- import
|
||||
---
|
||||
```
|
||||
|
||||
## Supported Metadata Fields
|
||||
|
||||
All of the below fields are supported by both the exporter and the importer.
|
||||
|
||||
- `title`: Title of the note
|
||||
- `updated`: Time of last not update (corresponds to `user_updated_time`)
|
||||
- `created`: Creation time of note (corresponds to `user_created_time`)
|
||||
- `source`: The source URL for a note that comes from the web clipper
|
||||
- `author`: Author's name
|
||||
- `latitude`: Latitude where note was created
|
||||
- `longitude`: Longitude where note was created
|
||||
- `altitude`: Altitude where note was created
|
||||
- `completed?`: Exists if the note is a todo, indicates if the todo is completed
|
||||
- `due`: Exists if the note is a todo, due date (alarm time) of note
|
||||
- `tags`: List of all associated tag names
|
||||
|
||||
### Exporter
|
||||
|
||||
The exporter will export all the above fields that hold values in the database. So `due` and `completed?` will only be included for "todo" notesm `tags` will only exist for notes that include tags, etc.
|
||||
|
||||
### Importer
|
||||
|
||||
The importer will import the metadata corresponding to all of the above fields. Missing data will be filled in as if the note was just created. Extra fields will be ignored.
|
||||
|
||||
There are other tools that use similar YAML front matter blocks, notably [pandoc](https://pandoc.org/MANUAL.html#extension-yaml_metadata_block) and [r-markdown](https://github.com/hao203/rmarkdown-YAML). The importer attempts to provide compatibility with these formats where possible.
|
||||
|
||||
## Dates
|
||||
### Exporter
|
||||
|
||||
All dates are exported in the ISO 8601 format (substituting the 'T' for a space based on RFC 3339 for readability) in the UTC time zone.
|
||||
|
||||
e.g. `1970-01-01 00:00:00Z`
|
||||
|
||||
### Importer
|
||||
|
||||
The importer is more flexible with dates. It will handle ISO 8601 dates with or without a timezone, if no timezone is specified, local time will be used. If there is a timezone specified (Z notation or +00:00 notation) that timezone will be used. If the format is not ISO 8601, the importer will attempt to read based on the users configured date and time preferences (Tools -> Options -> General or Joplin -> Preferences -> General). The importer will fallback on the Javascript [Date](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date) functionality if the format can not be read.
|
||||
|
||||
## Examples
|
||||
|
||||
Below are a collection of examples that represent valid notes that may have been exported by the exporter, and can be imported by the importer.
|
||||
|
||||
```
|
||||
---
|
||||
title: Frogs
|
||||
source: https://en.wikipedia.org/wiki/Frog
|
||||
created: 2021-05-01 16:40:00Z
|
||||
updated: 2021-05-01 16:40:00Z
|
||||
tags:
|
||||
- Reference
|
||||
- Cool
|
||||
---
|
||||
|
||||
This article is about the group of amphibians. For other uses, see [Frog (disambiguation)](https://en.wikipedia.org/wiki/Frog_%28disambiguation%29 "Frog (disambiguation)").
|
||||
...
|
||||
```
|
||||
|
||||
```
|
||||
---
|
||||
title: Take Home Quiz
|
||||
created: 2021-05-01 16:40:00Z
|
||||
updated: 2021-06-17 23:59:00Z
|
||||
tags:
|
||||
- school
|
||||
- math
|
||||
- homework
|
||||
completed?: no
|
||||
due: 2021-06-18 08:00:00Z
|
||||
---
|
||||
|
||||
**Prove or give a counter-example of the following statement:**
|
||||
|
||||
> In three space dimensions and time, given an initial velocity field, there exists a vector velocity and a scalar pressure field, which are both smooth and globally defined, that solve the Navier–Stokes equations.
|
||||
```
|
||||
|
||||
```
|
||||
---
|
||||
title: All Fields
|
||||
updated: 2019-05-01 16:54:00Z
|
||||
created: 2019-05-01 16:54:00Z
|
||||
source: https://joplinapp.org
|
||||
author: Joplin
|
||||
latitude: 37.084021
|
||||
longitude: -94.51350100
|
||||
altitude: 0.0000
|
||||
completed?: no
|
||||
due: 2021-08-22 00:00:00Z
|
||||
tags:
|
||||
- joplin
|
||||
- note
|
||||
- pencil
|
||||
---
|
||||
|
||||
All of this metadata is available to be imported/exported.
|
||||
```
|
@@ -1,33 +1,16 @@
|
||||
# Joplin Server sharing feature
|
||||
|
||||
## Sharing a notebook with a user
|
||||
## Sharing a file via a public URL
|
||||
|
||||
Sharing a notebook is done via synchronisation using the following API objects:
|
||||
Joplin Server is essentially a file hosting service and it allows sharing files via public URLs. To do so, an API call is made to `/api/shares` with the ID or path of the file that needs to be shared. This call returns a SHAREID that is then used to access the file via URL. When viewing the file, it will display it according to its mime type. Thus by default a Markdown file will be displayed as plain text.
|
||||
|
||||
- `item`: any Joplin item such as a note or notebook.
|
||||
- `user_item`: owned by a user and points to an item. Multiple user_items can point to the same item, which is important to enable sharing.
|
||||
- `share`: associated with a notebook ID, it specifies which notebook should be shared and by whom
|
||||
- `share_user`: associated with share and a user. This is essentially an invitation that the sharer sent to recipients. There can be multiple such objects, and they can be accepted or rejected by the recipient.
|
||||
## Sharing a note via a public URL
|
||||
|
||||
The process to share is then:
|
||||
It is built on top of the file sharing feature. The file corresponding to the note is shared via the above API. Then a separate application, specific to Joplin, read and parse the Markdown file, and display it as note.
|
||||
|
||||
- First, the sharer calls `POST /api/shares` with the notebook ID that needs to be shared.
|
||||
- Then invitations can be sent by calling `POST /api/share_users` and providing the share ID and recipient email.
|
||||
- The recipient accept or reject the application by setting the status onn the `share_users` object (which corresponds to an invitation).
|
||||
That application works as a viewer - instead of displaying the Markdown file as plain text (by default), it renders it and displays it as HTML.
|
||||
|
||||
Once share is setup, the client recursively goes through all notes, sub-notebooks and resources within the shared notebook, and set their `share_id` property. Basically any item within the notebook should have this property set. Then all these items are synchronized.
|
||||
|
||||
On the server, a service is running at regular interval to check the `share_id` property, and generate `user_item` objects for each recipient. Once these objects have been created, the recipient will start receiving the shared notebooks and notes.
|
||||
|
||||
### Why is the share_id set on the client and not the server?
|
||||
|
||||
Technically, the server would only need to know the root shared folder, and from that can be find out its children. This approach was tried but it makes the system much more complex because some information is lost after sync - in particular when notes or notebooks are moved out of folders, when resources are attached or removed, etc. Keeping track of all this is possible but complex and innefficient.
|
||||
|
||||
On the other hand, all that information is present on the client. Whenever a notes is moved out a shared folder, or whenever a resources is attached, the changes are tracked, and that can be used to easily assign a `share_id` property. Once this is set, it makes the whole system more simple and reliable.
|
||||
|
||||
## Publishing a note via a public URL
|
||||
|
||||
This is done by posting a note ID to `/api/shares`.
|
||||
The rendering engine is the same as the main applications, which allows us to use the same plugins and settings.
|
||||
|
||||
### Attached resources
|
||||
|
||||
@@ -46,3 +29,7 @@ Any linked note will **not** be shared, due to the following reasons:
|
||||
It should be possible to have multiple share links for a given note. For example: I share a note with one person, then the same note with a different person. I revoke the share for one person, but I sill want the other person to access the note.
|
||||
|
||||
So when a share link is created for a note, the API always return a new link.
|
||||
|
||||
## Sharing a note with a user
|
||||
|
||||
TBD
|
||||
|
Reference in New Issue
Block a user