1
0
mirror of https://github.com/laurent22/joplin.git synced 2025-12-26 23:38:08 +02:00

Compare commits

...

33 Commits

Author SHA1 Message Date
Laurent Cozic
a5b5ef1886 Merge branch 'dev' into server_content_drivers 2021-11-09 15:33:34 +00:00
Laurent Cozic
7b3ad32103 Update translations 2021-11-09 15:28:38 +00:00
Laurent Cozic
3745cd7cb0 Tools: Do not process context when running build-translation tool 2021-11-09 15:26:45 +00:00
Laurent Cozic
920f2d9655 Revert "Update translations"
This reverts commit f800ca0269.

Reverting for now due to some translations being incorrectly marked as
fuzzy.
2021-11-09 13:49:20 +00:00
Laurent Cozic
f800ca0269 Update translations 2021-11-09 13:16:09 +00:00
Laurent Cozic
33be306d01 Tools: Fixed missing translations when running build-translation tool 2021-11-09 13:14:41 +00:00
Laurent Cozic
3782255c27 Tools: Fixed build-translation script to handle .ts and .tsx files directly 2021-11-09 12:20:07 +00:00
Laurent Cozic
68f77f6bbc fix 2021-11-08 17:46:29 +00:00
Laurent
0ab235273b Tools: Detect missing translation strings on CI (#5688) 2021-11-08 17:10:33 +00:00
Laurent Cozic
75256613cc Security: Ensure Markdown links that contain single quotes are correctly escaped 2021-11-08 15:39:45 +00:00
Helmut K. C. Tessarek
b328094033 update en_US.po 2021-11-08 10:30:18 -05:00
Helmut K. C. Tessarek
4f0f1af5d1 Update translations 2021-11-08 10:22:16 -05:00
Laurent Cozic
b402bc7ff7 Merge branch 'dev' into server_content_drivers 2021-11-08 15:06:06 +00:00
Laurent Cozic
0ed0690bf8 rename 2021-11-08 14:58:10 +00:00
Laurent Cozic
467b1156cc s3 2021-11-08 14:24:42 +00:00
Laurent Cozic
6a9d9f6542 Merge branch 'dev' into server_content_drivers 2021-11-07 17:30:04 +00:00
Laurent Cozic
69b413ce2b storage table 2021-11-07 11:46:25 +00:00
Laurent Cozic
e3d6334372 id 2021-11-06 19:51:32 +00:00
Laurent Cozic
cc4c50c219 rename 2021-11-06 16:23:43 +00:00
Laurent Cozic
5d646f7ced env 2021-11-06 15:33:07 +00:00
Laurent Cozic
fa3612405c tests 2021-11-05 12:05:03 +00:00
Laurent Cozic
20df46c066 fallback 2021-11-05 11:43:27 +00:00
Laurent Cozic
9b0a659416 connection string 2021-11-04 18:00:37 +00:00
Laurent Cozic
a00e0e7043 Merge branch 'dev' into server_content_drivers 2021-11-04 15:46:55 +00:00
Laurent Cozic
560523bdc2 tests 2021-10-26 19:09:33 +01:00
Laurent Cozic
a13242e803 Merge branch 'dev' into server_content_drivers 2021-10-26 17:58:47 +01:00
Laurent Cozic
72834fcfc4 clean up 2021-10-22 14:46:54 +01:00
Laurent Cozic
731142218b comment 2021-10-22 14:33:03 +01:00
Laurent Cozic
17b580b71b support fallback driver 2021-10-21 11:41:01 +01:00
Laurent Cozic
f7be45c236 tests 2021-10-20 12:18:56 +01:00
Laurent Cozic
b298861dc3 db driver 2021-10-19 19:44:43 +01:00
Laurent Cozic
2343de3763 Merge branch 'dev' into server_content_drivers 2021-10-19 17:52:46 +01:00
Laurent Cozic
abb37258d0 Content drivers 2021-10-12 18:56:42 +01:00
127 changed files with 219371 additions and 186822 deletions

View File

@@ -81,7 +81,7 @@ fi
# release randomly fail.
# =============================================================================
if [ "$IS_PULL_REQUEST" == "1" ]; then
if [ "$IS_PULL_REQUEST" == "1" ] || [ "$IS_DEV_BRANCH" = "1" ]; then
echo "Step: Running linter..."
npm run linter-ci ./
@@ -109,6 +109,27 @@ if [ "$IS_PULL_REQUEST" == "1" ]; then
fi
fi
# =============================================================================
# Check that we didn't lose any string due to gettext not being able to parse
# newly modified or added scripts. This is convenient to quickly view on GitHub
# what commit may have broken translation building. We run this on macOS because
# we need the latest version of gettext (and stable Ubuntu doesn't have it).
# =============================================================================
if [ "$IS_PULL_REQUEST" == "1" ] || [ "$IS_DEV_BRANCH" = "1" ]; then
if [ "$IS_MACOS" == "1" ]; then
echo "Step: Checking for lost translation strings..."
xgettext --version
node packages/tools/build-translation.js --missing-strings-check-only
testResult=$?
if [ $testResult -ne 0 ]; then
exit $testResult
fi
fi
fi
# =============================================================================
# Find out if we should run the build or not. Electron-builder gets stuck when
# building PRs so we disable it in this case. The Linux build should provide
@@ -124,13 +145,12 @@ if [ "$IS_PULL_REQUEST" == "1" ]; then
fi
# =============================================================================
# Prepare the Electron app and build it
# Build the Electron app or Docker image depending on the current tag.
#
# If the current tag is a desktop release tag (starts with "v", such as
# "v1.4.7"), we build and publish to github
#
# Otherwise we only build but don't publish to GitHub. It helps finding
# out any issue in pull requests and dev branch.
# "v1.4.7"), we build and publish to GitHub. Otherwise we only build but don't
# publish to GitHub. It helps finding out any issue in pull requests and dev
# branch.
# =============================================================================
cd "$ROOT_DIR/packages/app-desktop"

View File

@@ -19,6 +19,14 @@ jobs:
sudo apt-get update || true
sudo apt-get install -y gettext
sudo apt-get install -y libsecret-1-dev
sudo apt-get install -y translate-toolkit
- name: Install macOS dependencies
if: runner.os == 'macOS'
run: |
brew update
brew install gettext
brew install translate-toolkit
- name: Install Docker Engine
if: runner.os == 'Linux' && startsWith(github.ref, 'refs/tags/server-v')

View File

@@ -505,47 +505,47 @@ Current translations:
<!-- LOCALE-TABLE-AUTO-GENERATED -->
&nbsp; | Language | Po File | Last translator | Percent done
---|---|---|---|---
<img src="https://joplinapp.org/images/flags/country-4x3/arableague.png" width="16px"/> | Arabic | [ar](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/ar.po) | [Whaell O](mailto:Whaell@protonmail.com) | 99%
<img src="https://joplinapp.org/images/flags/es/basque_country.png" width="16px"/> | Basque | [eu](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/eu.po) | juan.abasolo@ehu.eus | 28%
<img src="https://joplinapp.org/images/flags/country-4x3/ba.png" width="16px"/> | Bosnian (Bosna i Hercegovina) | [bs_BA](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/bs_BA.po) | [Derviš T.](mailto:dervis.t@pm.me) | 71%
<img src="https://joplinapp.org/images/flags/country-4x3/bg.png" width="16px"/> | Bulgarian (България) | [bg_BG](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/bg_BG.po) | | 55%
<img src="https://joplinapp.org/images/flags/es/catalonia.png" width="16px"/> | Catalan | [ca](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/ca.po) | [Xavi Ivars](mailto:xavi.ivars@gmail.com) | 99%
<img src="https://joplinapp.org/images/flags/country-4x3/arableague.png" width="16px"/> | Arabic | [ar](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/ar.po) | [Whaell O](mailto:Whaell@protonmail.com) | 95%
<img src="https://joplinapp.org/images/flags/es/basque_country.png" width="16px"/> | Basque | [eu](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/eu.po) | juan.abasolo@ehu.eus | 27%
<img src="https://joplinapp.org/images/flags/country-4x3/ba.png" width="16px"/> | Bosnian (Bosna i Hercegovina) | [bs_BA](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/bs_BA.po) | [Derviš T.](mailto:dervis.t@pm.me) | 68%
<img src="https://joplinapp.org/images/flags/country-4x3/bg.png" width="16px"/> | Bulgarian (България) | [bg_BG](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/bg_BG.po) | | 54%
<img src="https://joplinapp.org/images/flags/es/catalonia.png" width="16px"/> | Catalan | [ca](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/ca.po) | [Xavi Ivars](mailto:xavi.ivars@gmail.com) | 95%
<img src="https://joplinapp.org/images/flags/country-4x3/hr.png" width="16px"/> | Croatian (Hrvatska) | [hr_HR](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/hr_HR.po) | [Milo Ivir](mailto:mail@milotype.de) | 95%
<img src="https://joplinapp.org/images/flags/country-4x3/cz.png" width="16px"/> | Czech (Česká republika) | [cs_CZ](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/cs_CZ.po) | [Michal Stanke](mailto:michal@stanke.cz) | 94%
<img src="https://joplinapp.org/images/flags/country-4x3/dk.png" width="16px"/> | Dansk (Danmark) | [da_DK](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/da_DK.po) | Mustafa Al-Dailemi (dailemi@hotmail.com)Language-Team: | 99%
<img src="https://joplinapp.org/images/flags/country-4x3/de.png" width="16px"/> | Deutsch (Deutschland) | [de_DE](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/de_DE.po) | [marph91](mailto:martin.d@andix.de) | 99%
<img src="https://joplinapp.org/images/flags/country-4x3/ee.png" width="16px"/> | Eesti Keel (Eesti) | [et_EE](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/et_EE.po) | | 54%
<img src="https://joplinapp.org/images/flags/country-4x3/cz.png" width="16px"/> | Czech (Česká republika) | [cs_CZ](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/cs_CZ.po) | [Michal Stanke](mailto:michal@stanke.cz) | 91%
<img src="https://joplinapp.org/images/flags/country-4x3/dk.png" width="16px"/> | Dansk (Danmark) | [da_DK](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/da_DK.po) | ERYpTION | 96%
<img src="https://joplinapp.org/images/flags/country-4x3/de.png" width="16px"/> | Deutsch (Deutschland) | [de_DE](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/de_DE.po) | [MrKanister](mailto:s.robin@tutanota.de) | 96%
<img src="https://joplinapp.org/images/flags/country-4x3/ee.png" width="16px"/> | Eesti Keel (Eesti) | [et_EE](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/et_EE.po) | | 52%
<img src="https://joplinapp.org/images/flags/country-4x3/gb.png" width="16px"/> | English (United Kingdom) | [en_GB](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/en_GB.po) | | 100%
<img src="https://joplinapp.org/images/flags/country-4x3/us.png" width="16px"/> | English (United States of America) | [en_US](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/en_US.po) | | 100%
<img src="https://joplinapp.org/images/flags/country-4x3/es.png" width="16px"/> | Español (España) | [es_ES](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/es_ES.po) | [Francisco Mora](mailto:francisco.m.collao@gmail.com) | 95%
<img src="https://joplinapp.org/images/flags/esperanto.png" width="16px"/> | Esperanto | [eo](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/eo.po) | Marton Paulo | 31%
<img src="https://joplinapp.org/images/flags/country-4x3/fi.png" width="16px"/> | Finnish (Suomi) | [fi_FI](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/fi_FI.po) | mrkaato | 99%
<img src="https://joplinapp.org/images/flags/country-4x3/fr.png" width="16px"/> | Français (France) | [fr_FR](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/fr_FR.po) | Nicolas Viviani | 100%
<img src="https://joplinapp.org/images/flags/es/galicia.png" width="16px"/> | Galician (España) | [gl_ES](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/gl_ES.po) | [Marcos Lans](mailto:marcoslansgarza@gmail.com) | 36%
<img src="https://joplinapp.org/images/flags/country-4x3/id.png" width="16px"/> | Indonesian (Indonesia) | [id_ID](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/id_ID.po) | [eresytter](mailto:42007357+eresytter@users.noreply.github.com) | 95%
<img src="https://joplinapp.org/images/flags/country-4x3/it.png" width="16px"/> | Italiano (Italia) | [it_IT](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/it_IT.po) | [Albano Battistella](mailto:albano_battistella@hotmail.com) | 95%
<img src="https://joplinapp.org/images/flags/country-4x3/hu.png" width="16px"/> | Magyar (Magyarország) | [hu_HU](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/hu_HU.po) | [Magyari Balázs](mailto:balmag@gmail.com) | 83%
<img src="https://joplinapp.org/images/flags/country-4x3/be.png" width="16px"/> | Nederlands (België, Belgique, Belgien) | [nl_BE](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/nl_BE.po) | | 86%
<img src="https://joplinapp.org/images/flags/country-4x3/nl.png" width="16px"/> | Nederlands (Nederland) | [nl_NL](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/nl_NL.po) | [MetBril](mailto:metbril@users.noreply.github.com) | 90%
<img src="https://joplinapp.org/images/flags/country-4x3/no.png" width="16px"/> | Norwegian (Norge, Noreg) | [nb_NO](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/nb_NO.po) | Alexander Dawson | 96%
<img src="https://joplinapp.org/images/flags/country-4x3/ir.png" width="16px"/> | Persian | [fa](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/fa.po) | [Kourosh Firoozbakht](mailto:kourox@protonmail.com) | 67%
<img src="https://joplinapp.org/images/flags/country-4x3/pl.png" width="16px"/> | Polski (Polska) | [pl_PL](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/pl_PL.po) | [konhi](mailto:hello.konhi@gmail.com) | 89%
<img src="https://joplinapp.org/images/flags/country-4x3/br.png" width="16px"/> | Português (Brasil) | [pt_BR](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/pt_BR.po) | [Nicolas Suzuki](mailto:nicolas.suzuki@pm.me) | 96%
<img src="https://joplinapp.org/images/flags/country-4x3/pt.png" width="16px"/> | Português (Portugal) | [pt_PT](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/pt_PT.po) | [Diogo Caveiro](mailto:dcaveiro@yahoo.com) | 89%
<img src="https://joplinapp.org/images/flags/country-4x3/ro.png" width="16px"/> | Română | [ro](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/ro.po) | [Cristi Duluta](mailto:cristi.duluta@gmail.com) | 62%
<img src="https://joplinapp.org/images/flags/country-4x3/si.png" width="16px"/> | Slovenian (Slovenija) | [sl_SI](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/sl_SI.po) | [Martin Korelič](mailto:martin.korelic@protonmail.com) | 90%
<img src="https://joplinapp.org/images/flags/country-4x3/se.png" width="16px"/> | Svenska | [sv](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/sv.po) | [Jonatan Nyberg](mailto:jonatan@autistici.org) | 99%
<img src="https://joplinapp.org/images/flags/country-4x3/th.png" width="16px"/> | Thai (ประเทศไทย) | [th_TH](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/th_TH.po) | | 42%
<img src="https://joplinapp.org/images/flags/country-4x3/vi.png" width="16px"/> | Tiếng Việt | [vi](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/vi.po) | | 96%
<img src="https://joplinapp.org/images/flags/country-4x3/tr.png" width="16px"/> | Türkçe (Türkiye) | [tr_TR](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/tr_TR.po) | [Arda Kılıçdağı](mailto:arda@kilicdagi.com) | 99%
<img src="https://joplinapp.org/images/flags/country-4x3/ua.png" width="16px"/> | Ukrainian (Україна) | [uk_UA](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/uk_UA.po) | [Vyacheslav Andreykiv](mailto:vandreykiv@gmail.com) | 89%
<img src="https://joplinapp.org/images/flags/country-4x3/gr.png" width="16px"/> | Ελληνικά (Ελλάδα) | [el_GR](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/el_GR.po) | [Harris Arvanitis](mailto:xaris@tuta.io) | 92%
<img src="https://joplinapp.org/images/flags/country-4x3/ru.png" width="16px"/> | Русский (Россия) | [ru_RU](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/ru_RU.po) | [Sergey Segeda](mailto:thesermanarm@gmail.com) | 99%
<img src="https://joplinapp.org/images/flags/country-4x3/rs.png" width="16px"/> | српски језик (Србија) | [sr_RS](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/sr_RS.po) | | 80%
<img src="https://joplinapp.org/images/flags/country-4x3/cn.png" width="16px"/> | 中文 (简体) | [zh_CN](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/zh_CN.po) | [南宫小骏](mailto:jackytsu@vip.qq.com) | 100%
<img src="https://joplinapp.org/images/flags/country-4x3/tw.png" width="16px"/> | 中文 (繁體) | [zh_TW](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/zh_TW.po) | [SiderealArt](mailto:nelson22768384@gmail.com) | 95%
<img src="https://joplinapp.org/images/flags/country-4x3/jp.png" width="16px"/> | 日本語 (日本) | [ja_JP](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/ja_JP.po) | [genneko](mailto:genneko217@gmail.com) | 100%
<img src="https://joplinapp.org/images/flags/country-4x3/kr.png" width="16px"/> | 한국어 | [ko](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/ko.po) | [Ji-Hyeon Gim](mailto:potatogim@potatogim.net) | 94%
<img src="https://joplinapp.org/images/flags/country-4x3/es.png" width="16px"/> | Español (España) | [es_ES](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/es_ES.po) | [Francisco Mora](mailto:francisco.m.collao@gmail.com) | 96%
<img src="https://joplinapp.org/images/flags/esperanto.png" width="16px"/> | Esperanto | [eo](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/eo.po) | Marton Paulo | 30%
<img src="https://joplinapp.org/images/flags/country-4x3/fi.png" width="16px"/> | Finnish (Suomi) | [fi_FI](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/fi_FI.po) | mrkaato | 95%
<img src="https://joplinapp.org/images/flags/country-4x3/fr.png" width="16px"/> | Français (France) | [fr_FR](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/fr_FR.po) | Nicolas Viviani | 96%
<img src="https://joplinapp.org/images/flags/es/galicia.png" width="16px"/> | Galician (España) | [gl_ES](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/gl_ES.po) | [Marcos Lans](mailto:marcoslansgarza@gmail.com) | 35%
<img src="https://joplinapp.org/images/flags/country-4x3/id.png" width="16px"/> | Indonesian (Indonesia) | [id_ID](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/id_ID.po) | [eresytter](mailto:42007357+eresytter@users.noreply.github.com) | 94%
<img src="https://joplinapp.org/images/flags/country-4x3/it.png" width="16px"/> | Italiano (Italia) | [it_IT](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/it_IT.po) | [Albano Battistella](mailto:albano_battistella@hotmail.com) | 92%
<img src="https://joplinapp.org/images/flags/country-4x3/hu.png" width="16px"/> | Magyar (Magyarország) | [hu_HU](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/hu_HU.po) | [Magyari Balázs](mailto:balmag@gmail.com) | 80%
<img src="https://joplinapp.org/images/flags/country-4x3/be.png" width="16px"/> | Nederlands (België, Belgique, Belgien) | [nl_BE](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/nl_BE.po) | | 83%
<img src="https://joplinapp.org/images/flags/country-4x3/nl.png" width="16px"/> | Nederlands (Nederland) | [nl_NL](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/nl_NL.po) | [MetBril](mailto:metbril@users.noreply.github.com) | 87%
<img src="https://joplinapp.org/images/flags/country-4x3/no.png" width="16px"/> | Norwegian (Norge, Noreg) | [nb_NO](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/nb_NO.po) | Alexander Dawson | 93%
<img src="https://joplinapp.org/images/flags/country-4x3/ir.png" width="16px"/> | Persian | [fa](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/fa.po) | [Kourosh Firoozbakht](mailto:kourox@protonmail.com) | 65%
<img src="https://joplinapp.org/images/flags/country-4x3/pl.png" width="16px"/> | Polski (Polska) | [pl_PL](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/pl_PL.po) | [konhi](mailto:hello.konhi@gmail.com) | 86%
<img src="https://joplinapp.org/images/flags/country-4x3/br.png" width="16px"/> | Português (Brasil) | [pt_BR](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/pt_BR.po) | [Felipe Viggiano](mailto:felipeviggiano@gmail.com) | 96%
<img src="https://joplinapp.org/images/flags/country-4x3/pt.png" width="16px"/> | Português (Portugal) | [pt_PT](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/pt_PT.po) | [Diogo Caveiro](mailto:dcaveiro@yahoo.com) | 86%
<img src="https://joplinapp.org/images/flags/country-4x3/ro.png" width="16px"/> | Română | [ro](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/ro.po) | [Cristi Duluta](mailto:cristi.duluta@gmail.com) | 60%
<img src="https://joplinapp.org/images/flags/country-4x3/si.png" width="16px"/> | Slovenian (Slovenija) | [sl_SI](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/sl_SI.po) | [Martin Korelič](mailto:martin.korelic@protonmail.com) | 96%
<img src="https://joplinapp.org/images/flags/country-4x3/se.png" width="16px"/> | Svenska | [sv](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/sv.po) | [Jonatan Nyberg](mailto:jonatan@autistici.org) | 95%
<img src="https://joplinapp.org/images/flags/country-4x3/th.png" width="16px"/> | Thai (ประเทศไทย) | [th_TH](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/th_TH.po) | | 43%
<img src="https://joplinapp.org/images/flags/country-4x3/vi.png" width="16px"/> | Tiếng Việt | [vi](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/vi.po) | | 93%
<img src="https://joplinapp.org/images/flags/country-4x3/tr.png" width="16px"/> | Türkçe (Türkiye) | [tr_TR](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/tr_TR.po) | [Arda Kılıçdağı](mailto:arda@kilicdagi.com) | 95%
<img src="https://joplinapp.org/images/flags/country-4x3/ua.png" width="16px"/> | Ukrainian (Україна) | [uk_UA](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/uk_UA.po) | [Vyacheslav Andreykiv](mailto:vandreykiv@gmail.com) | 85%
<img src="https://joplinapp.org/images/flags/country-4x3/gr.png" width="16px"/> | Ελληνικά (Ελλάδα) | [el_GR](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/el_GR.po) | [Harris Arvanitis](mailto:xaris@tuta.io) | 89%
<img src="https://joplinapp.org/images/flags/country-4x3/ru.png" width="16px"/> | Русский (Россия) | [ru_RU](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/ru_RU.po) | [Sergey Segeda](mailto:thesermanarm@gmail.com) | 95%
<img src="https://joplinapp.org/images/flags/country-4x3/rs.png" width="16px"/> | српски језик (Србија) | [sr_RS](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/sr_RS.po) | | 78%
<img src="https://joplinapp.org/images/flags/country-4x3/cn.png" width="16px"/> | 中文 (简体) | [zh_CN](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/zh_CN.po) | [南宫小骏](mailto:jackytsu@vip.qq.com) | 96%
<img src="https://joplinapp.org/images/flags/country-4x3/tw.png" width="16px"/> | 中文 (繁體) | [zh_TW](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/zh_TW.po) | [SiderealArt](mailto:nelson22768384@gmail.com) | 92%
<img src="https://joplinapp.org/images/flags/country-4x3/jp.png" width="16px"/> | 日本語 (日本) | [ja_JP](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/ja_JP.po) | [genneko](mailto:genneko217@gmail.com) | 96%
<img src="https://joplinapp.org/images/flags/country-4x3/kr.png" width="16px"/> | 한국어 | [ko](https://github.com/laurent22/joplin/blob/dev/packages/tools/locales/ko.po) | [Ji-Hyeon Gim](mailto:potatogim@potatogim.net) | 91%
<!-- LOCALE-TABLE-AUTO-GENERATED -->
# Contributors

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -41,45 +41,45 @@ locales['uk_UA'] = require('./uk_UA.json');
locales['vi'] = require('./vi.json');
locales['zh_CN'] = require('./zh_CN.json');
locales['zh_TW'] = require('./zh_TW.json');
stats['ar'] = {"percentDone":99};
stats['eu'] = {"percentDone":28};
stats['bs_BA'] = {"percentDone":71};
stats['bg_BG'] = {"percentDone":55};
stats['ca'] = {"percentDone":99};
stats['ar'] = {"percentDone":95};
stats['eu'] = {"percentDone":27};
stats['bs_BA'] = {"percentDone":68};
stats['bg_BG'] = {"percentDone":54};
stats['ca'] = {"percentDone":95};
stats['hr_HR'] = {"percentDone":95};
stats['cs_CZ'] = {"percentDone":94};
stats['da_DK'] = {"percentDone":99};
stats['de_DE'] = {"percentDone":99};
stats['et_EE'] = {"percentDone":54};
stats['cs_CZ'] = {"percentDone":91};
stats['da_DK'] = {"percentDone":96};
stats['de_DE'] = {"percentDone":96};
stats['et_EE'] = {"percentDone":52};
stats['en_GB'] = {"percentDone":100};
stats['en_US'] = {"percentDone":100};
stats['es_ES'] = {"percentDone":95};
stats['eo'] = {"percentDone":31};
stats['fi_FI'] = {"percentDone":99};
stats['fr_FR'] = {"percentDone":100};
stats['gl_ES'] = {"percentDone":36};
stats['id_ID'] = {"percentDone":95};
stats['it_IT'] = {"percentDone":95};
stats['hu_HU'] = {"percentDone":83};
stats['nl_BE'] = {"percentDone":86};
stats['nl_NL'] = {"percentDone":90};
stats['nb_NO'] = {"percentDone":96};
stats['fa'] = {"percentDone":67};
stats['pl_PL'] = {"percentDone":89};
stats['es_ES'] = {"percentDone":96};
stats['eo'] = {"percentDone":30};
stats['fi_FI'] = {"percentDone":95};
stats['fr_FR'] = {"percentDone":96};
stats['gl_ES'] = {"percentDone":35};
stats['id_ID'] = {"percentDone":94};
stats['it_IT'] = {"percentDone":92};
stats['hu_HU'] = {"percentDone":80};
stats['nl_BE'] = {"percentDone":83};
stats['nl_NL'] = {"percentDone":87};
stats['nb_NO'] = {"percentDone":93};
stats['fa'] = {"percentDone":65};
stats['pl_PL'] = {"percentDone":86};
stats['pt_BR'] = {"percentDone":96};
stats['pt_PT'] = {"percentDone":89};
stats['ro'] = {"percentDone":62};
stats['sl_SI'] = {"percentDone":90};
stats['sv'] = {"percentDone":99};
stats['th_TH'] = {"percentDone":42};
stats['vi'] = {"percentDone":96};
stats['tr_TR'] = {"percentDone":99};
stats['uk_UA'] = {"percentDone":89};
stats['el_GR'] = {"percentDone":92};
stats['ru_RU'] = {"percentDone":99};
stats['sr_RS'] = {"percentDone":80};
stats['zh_CN'] = {"percentDone":100};
stats['zh_TW'] = {"percentDone":95};
stats['ja_JP'] = {"percentDone":100};
stats['ko'] = {"percentDone":94};
stats['pt_PT'] = {"percentDone":86};
stats['ro'] = {"percentDone":60};
stats['sl_SI'] = {"percentDone":96};
stats['sv'] = {"percentDone":95};
stats['th_TH'] = {"percentDone":43};
stats['vi'] = {"percentDone":93};
stats['tr_TR'] = {"percentDone":95};
stats['uk_UA'] = {"percentDone":85};
stats['el_GR'] = {"percentDone":89};
stats['ru_RU'] = {"percentDone":95};
stats['sr_RS'] = {"percentDone":78};
stats['zh_CN'] = {"percentDone":96};
stats['zh_TW'] = {"percentDone":92};
stats['ja_JP'] = {"percentDone":96};
stats['ko'] = {"percentDone":91};
module.exports = { locales: locales, stats: stats };

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -12,6 +12,13 @@ describe('linkReplacement', () => {
expect(r).toBe('<a data-from-md href=\'https://example.com/test\'>');
});
test('should handle non-resource links with single quotes in it', () => {
// Handles a link such as:
// [Google](https://www.goo'onclick=javascript:alert(/1/);f=')
const r = linkReplacement('https://www.goo\'onclick=javascript:alert(/1/);f=\'', { linkRenderingType: 1 }).html;
expect(r).toBe('<a data-from-md href=\'https://www.goo&apos;onclick=javascript:alert(/1/);f=&apos;\' onclick=\'postMessage("https://www.goo%27onclick=javascript:alert(/1/);f=%27", { resourceId: "" }); return false;\'>');
});
test('should handle resource links - downloaded status', () => {
const resourceId = 'f6afba55bdf74568ac94f8d1e3578d2c';

View File

@@ -122,7 +122,7 @@ export default function(href: string, options: Options = null): LinkReplacementR
icon = '';
attrHtml.push(`href='${htmlentities(href)}'`);
} else {
attrHtml.push(`href='${hrefAttr}'`);
attrHtml.push(`href='${htmlentities(hrefAttr)}'`);
if (js) attrHtml.push(js);
}

View File

@@ -4,7 +4,9 @@ const nodeSqlite = require('sqlite3');
shimInit({ nodeSqlite });
// We don't want the tests to fail due to timeout, especially on CI, and certain
// tests can take more time since we do integration testing too.
jest.setTimeout(30 * 1000);
// tests can take more time since we do integration testing too. The share tests
// in particular can take a while.
jest.setTimeout(60 * 1000);
process.env.JOPLIN_IS_TESTING = '1';

File diff suppressed because it is too large Load Diff

View File

@@ -21,6 +21,7 @@
"watch": "tsc --watch --project tsconfig.json"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.40.0",
"@fortawesome/fontawesome-free": "^5.15.1",
"@joplin/lib": "~2.6",
"@joplin/renderer": "~2.6",

Binary file not shown.

View File

@@ -5,7 +5,7 @@ import * as Koa from 'koa';
import * as fs from 'fs-extra';
import Logger, { LoggerWrapper, TargetType } from '@joplin/lib/Logger';
import config, { initConfig, runningInDocker } from './config';
import { migrateLatest, waitForConnection, sqliteDefaultDir, latestMigration } from './db';
import { migrateLatest, waitForConnection, sqliteDefaultDir, latestMigration, DbConnection } from './db';
import { AppContext, Env, KoaNext } from './utils/types';
import FsDriverNode from '@joplin/lib/fs-driver-node';
import routeHandler from './middleware/routeHandler';
@@ -17,10 +17,11 @@ import startServices from './utils/startServices';
import { credentialFile } from './utils/testing/testUtils';
import apiVersionHandler from './middleware/apiVersionHandler';
import clickJackingHandler from './middleware/clickJackingHandler';
import newModelFactory from './models/factory';
import newModelFactory, { Options } from './models/factory';
import setupCommands from './utils/setupCommands';
import { RouteResponseFormat, routeResponseFormat } from './utils/routeUtils';
import { parseEnv } from './env';
import storageDriverFromConfig from './models/items/storage/storageDriverFromConfig';
interface Argv {
env?: Env;
@@ -61,6 +62,8 @@ function appLogger(): LoggerWrapper {
}
function markPasswords(o: Record<string, any>): Record<string, any> {
if (!o) return o;
const output: Record<string, any> = {};
for (const k of Object.keys(o)) {
@@ -219,6 +222,13 @@ async function main() {
fs.writeFileSync(pidFile, `${process.pid}`);
}
const newModelFactoryOptions = async (db: DbConnection): Promise<Options> => {
return {
storageDriver: await storageDriverFromConfig(config().storageDriver, db, { assignDriverId: env !== 'buildTypes' }),
storageDriverFallback: await storageDriverFromConfig(config().storageDriverFallback, db, { assignDriverId: env !== 'buildTypes' }),
};
};
let runCommandAndExitApp = true;
if (selectedCommand) {
@@ -235,7 +245,7 @@ async function main() {
});
} else {
const connectionCheck = await waitForConnection(config().database);
const models = newModelFactory(connectionCheck.connection, config());
const models = newModelFactory(connectionCheck.connection, config(), await newModelFactoryOptions(connectionCheck.connection));
await selectedCommand.run(commandArgv, {
db: connectionCheck.connection,
@@ -253,6 +263,8 @@ async function main() {
appLogger().info('Log dir:', config().logDir);
appLogger().info('DB Config:', markPasswords(config().database));
appLogger().info('Mailer Config:', markPasswords(config().mailer));
appLogger().info('Content driver:', markPasswords(config().storageDriver));
appLogger().info('Content driver (fallback):', markPasswords(config().storageDriverFallback));
appLogger().info('Trying to connect to database...');
const connectionCheck = await waitForConnection(config().database);
@@ -263,7 +275,8 @@ async function main() {
appLogger().info('Connection check:', connectionCheckLogInfo);
const ctx = app.context as AppContext;
await setupAppContext(ctx, env, connectionCheck.connection, appLogger);
await setupAppContext(ctx, env, connectionCheck.connection, appLogger, await newModelFactoryOptions(connectionCheck.connection));
await initializeJoplinUtils(config(), ctx.joplinBase.models, ctx.joplinBase.services.mustache);
if (config().database.autoMigration) {

View File

@@ -3,6 +3,7 @@ import { Config, DatabaseConfig, DatabaseConfigClient, Env, MailerConfig, RouteT
import * as pathUtils from 'path';
import { loadStripeConfig, StripePublicConfig } from '@joplin/lib/utils/joplinCloud';
import { EnvVariables } from './env';
import parseStorageDriverConnectionString from './models/items/storage/parseStorageDriverConnectionString';
interface PackageJson {
version: string;
@@ -130,6 +131,8 @@ export async function initConfig(envType: Env, env: EnvVariables, overrides: any
supportName: env.SUPPORT_NAME || appName,
businessEmail: env.BUSINESS_EMAIL || supportEmail,
cookieSecure: env.COOKIES_SECURE,
storageDriver: parseStorageDriverConnectionString(env.STORAGE_DRIVER),
storageDriverFallback: parseStorageDriverConnectionString(env.STORAGE_DRIVER_FALLBACK),
...overrides,
};
}

View File

@@ -1,70 +1,13 @@
export interface EnvVariables {
// The possible env variables and their defaults are listed below.
//
// The env variables can be of type string, integer or boolean. When the type is
// boolean, set the variable to "0" or "1" in your env file.
const defaultEnvValues: EnvVariables = {
// ==================================================
// General config
// ==================================================
APP_NAME: string;
APP_PORT: number;
SIGNUP_ENABLED: boolean;
TERMS_ENABLED: boolean;
ACCOUNT_TYPES_ENABLED: boolean;
ERROR_STACK_TRACES: boolean;
COOKIES_SECURE: boolean;
RUNNING_IN_DOCKER: boolean;
// ==================================================
// URL config
// ==================================================
APP_BASE_URL: string;
USER_CONTENT_BASE_URL: string;
API_BASE_URL: string;
JOPLINAPP_BASE_URL: string;
// ==================================================
// Database config
// ==================================================
DB_CLIENT: string;
DB_SLOW_QUERY_LOG_ENABLED: boolean;
DB_SLOW_QUERY_LOG_MIN_DURATION: number;
DB_AUTO_MIGRATION: boolean;
POSTGRES_PASSWORD: string;
POSTGRES_DATABASE: string;
POSTGRES_USER: string;
POSTGRES_HOST: string;
POSTGRES_PORT: number;
// This must be the full path to the database file
SQLITE_DATABASE: string;
// ==================================================
// Mailer config
// ==================================================
MAILER_ENABLED: boolean;
MAILER_HOST: string;
MAILER_PORT: number;
MAILER_SECURE: boolean;
MAILER_AUTH_USER: string;
MAILER_AUTH_PASSWORD: string;
MAILER_NOREPLY_NAME: string;
MAILER_NOREPLY_EMAIL: string;
SUPPORT_EMAIL: string;
SUPPORT_NAME: string;
BUSINESS_EMAIL: string;
// ==================================================
// Stripe config
// ==================================================
STRIPE_SECRET_KEY: string;
STRIPE_WEBHOOK_SECRET: string;
}
const defaultEnvValues: EnvVariables = {
APP_NAME: 'Joplin Server',
APP_PORT: 22300,
SIGNUP_ENABLED: false,
@@ -74,11 +17,19 @@ const defaultEnvValues: EnvVariables = {
COOKIES_SECURE: false,
RUNNING_IN_DOCKER: false,
// ==================================================
// URL config
// ==================================================
APP_BASE_URL: '',
USER_CONTENT_BASE_URL: '',
API_BASE_URL: '',
JOPLINAPP_BASE_URL: 'https://joplinapp.org',
// ==================================================
// Database config
// ==================================================
DB_CLIENT: 'sqlite3',
DB_SLOW_QUERY_LOG_ENABLED: false,
DB_SLOW_QUERY_LOG_MIN_DURATION: 1000,
@@ -90,8 +41,20 @@ const defaultEnvValues: EnvVariables = {
POSTGRES_HOST: '',
POSTGRES_PORT: 5432,
// This must be the full path to the database file
SQLITE_DATABASE: '',
// ==================================================
// Content driver config
// ==================================================
STORAGE_DRIVER: 'Type=Database',
STORAGE_DRIVER_FALLBACK: '',
// ==================================================
// Mailer config
// ==================================================
MAILER_ENABLED: false,
MAILER_HOST: '',
MAILER_PORT: 587,
@@ -105,10 +68,62 @@ const defaultEnvValues: EnvVariables = {
SUPPORT_NAME: '',
BUSINESS_EMAIL: '',
// ==================================================
// Stripe config
// ==================================================
STRIPE_SECRET_KEY: '',
STRIPE_WEBHOOK_SECRET: '',
};
export interface EnvVariables {
APP_NAME: string;
APP_PORT: number;
SIGNUP_ENABLED: boolean;
TERMS_ENABLED: boolean;
ACCOUNT_TYPES_ENABLED: boolean;
ERROR_STACK_TRACES: boolean;
COOKIES_SECURE: boolean;
RUNNING_IN_DOCKER: boolean;
APP_BASE_URL: string;
USER_CONTENT_BASE_URL: string;
API_BASE_URL: string;
JOPLINAPP_BASE_URL: string;
DB_CLIENT: string;
DB_SLOW_QUERY_LOG_ENABLED: boolean;
DB_SLOW_QUERY_LOG_MIN_DURATION: number;
DB_AUTO_MIGRATION: boolean;
POSTGRES_PASSWORD: string;
POSTGRES_DATABASE: string;
POSTGRES_USER: string;
POSTGRES_HOST: string;
POSTGRES_PORT: number;
SQLITE_DATABASE: string;
STORAGE_DRIVER: string;
STORAGE_DRIVER_FALLBACK: string;
MAILER_ENABLED: boolean;
MAILER_HOST: string;
MAILER_PORT: number;
MAILER_SECURE: boolean;
MAILER_AUTH_USER: string;
MAILER_AUTH_PASSWORD: string;
MAILER_NOREPLY_NAME: string;
MAILER_NOREPLY_EMAIL: string;
SUPPORT_EMAIL: string;
SUPPORT_NAME: string;
BUSINESS_EMAIL: string;
STRIPE_SECRET_KEY: string;
STRIPE_WEBHOOK_SECRET: string;
}
export function parseEnv(rawEnv: any, defaultOverrides: any = null): EnvVariables {
const output: EnvVariables = {
...defaultEnvValues,
@@ -125,7 +140,7 @@ export function parseEnv(rawEnv: any, defaultOverrides: any = null): EnvVariable
if (isNaN(v)) throw new Error(`Invalid number value for env variable ${key} = ${rawEnvValue}`);
(output as any)[key] = v;
} else if (typeof value === 'boolean') {
if (rawEnvValue !== '0' && rawEnvValue !== '1') throw new Error(`Invalid boolean for for env variable ${key}: ${rawEnvValue}`);
if (rawEnvValue !== '0' && rawEnvValue !== '1') throw new Error(`Invalid boolean value for env variable ${key}: ${rawEnvValue} (Should be either "0" or "1")`);
(output as any)[key] = rawEnvValue === '1';
} else if (typeof value === 'string') {
(output as any)[key] = `${rawEnvValue}`;

View File

@@ -0,0 +1,32 @@
import { Knex } from 'knex';
import { DbConnection } from '../db';
export async function up(db: DbConnection): Promise<any> {
await db.schema.createTable('storages', (table: Knex.CreateTableBuilder) => {
table.increments('id').unique().primary().notNullable();
table.text('connection_string').notNullable();
});
await db('storages').insert({
connection_string: 'Type=Database',
});
// First we create the column and set a default so as to populate the
// content_storage_id field.
await db.schema.alterTable('items', (table: Knex.CreateTableBuilder) => {
table.integer('content_storage_id').defaultTo(1).notNullable();
});
// Once it's set, we remove the default as that should be explicitly set.
await db.schema.alterTable('items', (table: Knex.CreateTableBuilder) => {
table.integer('content_storage_id').notNullable().alter();
});
}
export async function down(db: DbConnection): Promise<any> {
await db.schema.dropTable('storages');
await db.schema.alterTable('items', (table: Knex.CreateTableBuilder) => {
table.dropColumn('content_storage_id');
});
}

View File

@@ -3,7 +3,7 @@ import { DbConnection } from '../db';
import TransactionHandler from '../utils/TransactionHandler';
import uuidgen from '../utils/uuidgen';
import { ErrorUnprocessableEntity, ErrorBadRequest } from '../utils/errors';
import { Models } from './factory';
import { Models, NewModelFactoryHandler } from './factory';
import * as EventEmitter from 'events';
import { Config } from '../utils/types';
import personalizedUserContentBaseUrl from '@joplin/lib/services/joplinServer/personalizedUserContentBaseUrl';
@@ -54,12 +54,12 @@ export default abstract class BaseModel<T> {
private defaultFields_: string[] = [];
private db_: DbConnection;
private transactionHandler_: TransactionHandler;
private modelFactory_: Function;
private modelFactory_: NewModelFactoryHandler;
private static eventEmitter_: EventEmitter = null;
private config_: Config;
private savePoints_: SavePoint[] = [];
public constructor(db: DbConnection, modelFactory: Function, config: Config) {
public constructor(db: DbConnection, modelFactory: NewModelFactoryHandler, config: Config) {
this.db_ = db;
this.modelFactory_ = modelFactory;
this.config_ = config;
@@ -71,7 +71,7 @@ export default abstract class BaseModel<T> {
// connection is passed to it. That connection can be the regular db
// connection, or the active transaction.
protected models(db: DbConnection = null): Models {
return this.modelFactory_(db || this.db, this.config_);
return this.modelFactory_(db || this.db);
}
protected get baseUrl(): string {
@@ -90,7 +90,7 @@ export default abstract class BaseModel<T> {
return this.config_.appName;
}
protected get db(): DbConnection {
public get db(): DbConnection {
if (this.transactionHandler_.activeTransaction) return this.transactionHandler_.activeTransaction;
return this.db_;
}

View File

@@ -38,12 +38,12 @@ describe('ChangeModel', function() {
const changeModel = models().change();
await msleep(1); const item1 = await models().item().makeTestItem(user.id, 1); // [1] CREATE 1
await msleep(1); await itemModel.saveForUser(user.id, { id: item1.id, name: '0000000000000000000000000000001A.md' }); // [2] UPDATE 1a
await msleep(1); await itemModel.saveForUser(user.id, { id: item1.id, name: '0000000000000000000000000000001B.md' }); // [3] UPDATE 1b
await msleep(1); await itemModel.saveForUser(user.id, { id: item1.id, name: '0000000000000000000000000000001A.md', content: Buffer.from('') }); // [2] UPDATE 1a
await msleep(1); await itemModel.saveForUser(user.id, { id: item1.id, name: '0000000000000000000000000000001B.md', content: Buffer.from('') }); // [3] UPDATE 1b
await msleep(1); const item2 = await models().item().makeTestItem(user.id, 2); // [4] CREATE 2
await msleep(1); await itemModel.saveForUser(user.id, { id: item2.id, name: '0000000000000000000000000000002A.md' }); // [5] UPDATE 2a
await msleep(1); await itemModel.saveForUser(user.id, { id: item2.id, name: '0000000000000000000000000000002A.md', content: Buffer.from('') }); // [5] UPDATE 2a
await msleep(1); await itemModel.delete(item1.id); // [6] DELETE 1
await msleep(1); await itemModel.saveForUser(user.id, { id: item2.id, name: '0000000000000000000000000000002B.md' }); // [7] UPDATE 2b
await msleep(1); await itemModel.saveForUser(user.id, { id: item2.id, name: '0000000000000000000000000000002B.md', content: Buffer.from('') }); // [7] UPDATE 2b
await msleep(1); const item3 = await models().item().makeTestItem(user.id, 3); // [8] CREATE 3
// Check that the 8 changes were created
@@ -120,7 +120,7 @@ describe('ChangeModel', function() {
let i = 1;
await msleep(1); const item1 = await models().item().makeTestItem(user.id, 1); // CREATE 1
await msleep(1); await itemModel.saveForUser(user.id, { id: item1.id, name: `test_mod${i++}` }); // UPDATE 1
await msleep(1); await itemModel.saveForUser(user.id, { id: item1.id, name: `test_mod${i++}`, content: Buffer.from('') }); // UPDATE 1
await expectThrow(async () => changeModel.delta(user.id, { limit: 1, cursor: 'invalid' }), 'resyncRequired');
});

View File

@@ -7,6 +7,10 @@ import { ApiError, ErrorForbidden, ErrorUnprocessableEntity } from '../utils/err
import { Knex } from 'knex';
import { ChangePreviousItem } from './ChangeModel';
import { unique } from '../utils/array';
import StorageDriverBase, { Context } from './items/storage/StorageDriverBase';
import { DbConnection } from '../db';
import { Config, StorageDriverMode } from '../utils/types';
import { NewModelFactoryHandler, Options } from './factory';
const mimeUtils = require('@joplin/lib/mime-utils.js').mime;
@@ -38,9 +42,22 @@ export interface ItemSaveOption extends SaveOptions {
shareId?: Uuid;
}
export interface ItemLoadOptions extends LoadOptions {
withContent?: boolean;
}
export default class ItemModel extends BaseModel<Item> {
private updatingTotalSizes_: boolean = false;
private storageDriver_: StorageDriverBase = null;
private storageDriverFallback_: StorageDriverBase = null;
public constructor(db: DbConnection, modelFactory: NewModelFactoryHandler, config: Config, options: Options) {
super(db, modelFactory, config);
this.storageDriver_ = options.storageDriver;
this.storageDriverFallback_ = options.storageDriverFallback;
}
protected get tableName(): string {
return 'items';
@@ -106,62 +123,106 @@ export default class ItemModel extends BaseModel<Item> {
return path.replace(extractNameRegex, '$1');
}
public byShareIdQuery(shareId: Uuid, options: LoadOptions = {}): Knex.QueryBuilder {
public byShareIdQuery(shareId: Uuid, options: ItemLoadOptions = {}): Knex.QueryBuilder {
return this
.db('items')
.select(this.selectFields(options, null, 'items'))
.where('jop_share_id', '=', shareId);
}
public async byShareId(shareId: Uuid, options: LoadOptions = {}): Promise<Item[]> {
public async byShareId(shareId: Uuid, options: ItemLoadOptions = {}): Promise<Item[]> {
const query = this.byShareIdQuery(shareId, options);
return await query;
}
public async loadByJopIds(userId: Uuid | Uuid[], jopIds: string[], options: LoadOptions = {}): Promise<Item[]> {
private async storageDriverWrite(itemId: Uuid, content: Buffer, context: Context) {
await this.storageDriver_.write(itemId, content, context);
if (this.storageDriverFallback_) {
if (this.storageDriverFallback_.mode === StorageDriverMode.ReadWrite) {
await this.storageDriverFallback_.write(itemId, content, context);
} else if (this.storageDriverFallback_.mode === StorageDriverMode.ReadOnly) {
await this.storageDriverFallback_.write(itemId, Buffer.from(''), context);
} else {
throw new Error(`Unsupported fallback mode: ${this.storageDriverFallback_.mode}`);
}
}
}
private async storageDriverRead(itemId: Uuid, context: Context) {
if (await this.storageDriver_.exists(itemId, context)) {
return this.storageDriver_.read(itemId, context);
} else {
if (!this.storageDriverFallback_) throw new Error(`Content does not exist but fallback content driver is not defined: ${itemId}`);
return this.storageDriverFallback_.read(itemId, context);
}
}
public async loadByJopIds(userId: Uuid | Uuid[], jopIds: string[], options: ItemLoadOptions = {}): Promise<Item[]> {
if (!jopIds.length) return [];
const userIds = Array.isArray(userId) ? userId : [userId];
if (!userIds.length) return [];
return this
const rows: Item[] = await this
.db('user_items')
.leftJoin('items', 'items.id', 'user_items.item_id')
.distinct(this.selectFields(options, null, 'items'))
.whereIn('user_items.user_id', userIds)
.whereIn('jop_id', jopIds);
if (options.withContent) {
for (const row of rows) {
row.content = await this.storageDriverRead(row.id, { models: this.models() });
}
}
return rows;
}
public async loadByJopId(userId: Uuid, jopId: string, options: LoadOptions = {}): Promise<Item> {
public async loadByJopId(userId: Uuid, jopId: string, options: ItemLoadOptions = {}): Promise<Item> {
const items = await this.loadByJopIds(userId, [jopId], options);
return items.length ? items[0] : null;
}
public async loadByNames(userId: Uuid | Uuid[], names: string[], options: LoadOptions = {}): Promise<Item[]> {
public async loadByNames(userId: Uuid | Uuid[], names: string[], options: ItemLoadOptions = {}): Promise<Item[]> {
if (!names.length) return [];
const userIds = Array.isArray(userId) ? userId : [userId];
return this
const rows: Item[] = await this
.db('user_items')
.leftJoin('items', 'items.id', 'user_items.item_id')
.distinct(this.selectFields(options, null, 'items'))
.whereIn('user_items.user_id', userIds)
.whereIn('name', names);
if (options.withContent) {
for (const row of rows) {
row.content = await this.storageDriverRead(row.id, { models: this.models() });
}
}
return rows;
}
public async loadByName(userId: Uuid, name: string, options: LoadOptions = {}): Promise<Item> {
public async loadByName(userId: Uuid, name: string, options: ItemLoadOptions = {}): Promise<Item> {
const items = await this.loadByNames(userId, [name], options);
return items.length ? items[0] : null;
}
public async loadWithContent(id: Uuid, options: LoadOptions = {}): Promise<Item> {
return this
.db('user_items')
.leftJoin('items', 'items.id', 'user_items.item_id')
.select(this.selectFields(options, ['*'], 'items'))
.where('items.id', '=', id)
.first();
public async loadWithContent(id: Uuid, options: ItemLoadOptions = {}): Promise<Item> {
const content = await this.storageDriverRead(id, { models: this.models() });
return {
...await this
.db('user_items')
.leftJoin('items', 'items.id', 'user_items.item_id')
.select(this.selectFields(options, ['*'], 'items'))
.where('items.id', '=', id)
.first(),
content,
};
}
public async loadAsSerializedJoplinItem(id: Uuid): Promise<string> {
@@ -255,9 +316,11 @@ export default class ItemModel extends BaseModel<Item> {
return this.itemToJoplinItem(raw);
}
public async saveFromRawContent(user: User, rawContentItems: SaveFromRawContentItem[], options: ItemSaveOption = null): Promise<SaveFromRawContentResult> {
public async saveFromRawContent(user: User, rawContentItems: SaveFromRawContentItem[] | SaveFromRawContentItem, options: ItemSaveOption = null): Promise<SaveFromRawContentResult> {
options = options || {};
if (!Array.isArray(rawContentItems)) rawContentItems = [rawContentItems];
// In this function, first we process the input items, which may be
// serialized Joplin items or actual buffers (for resources) and convert
// them to database items. Once it's done those db items are saved in
@@ -349,11 +412,46 @@ export default class ItemModel extends BaseModel<Item> {
continue;
}
const itemToSave = o.item;
const itemToSave = { ...o.item };
try {
const content = itemToSave.content;
delete itemToSave.content;
itemToSave.content_storage_id = this.storageDriver_.storageId;
itemToSave.content_size = content ? content.byteLength : 0;
// Here we save the item row and content, and we want to
// make sure that either both are saved or none of them.
// This is done by setting up a save point before saving the
// row, and rollbacking if the content cannot be saved.
//
// Normally, since we are in a transaction, throwing an
// error should work, but since we catch all errors within
// this block it doesn't work.
// TODO: When an item is uploaded multiple times
// simultaneously there could be a race condition, where the
// content would not match the db row (for example, the
// content_size would differ).
//
// Possible solutions:
//
// - Row-level lock on items.id, and release once the
// content is saved.
// - Or external lock - eg. Redis.
const savePoint = await this.setSavePoint();
const savedItem = await this.saveForUser(user.id, itemToSave);
try {
await this.storageDriverWrite(savedItem.id, content, { models: this.models() });
await this.releaseSavePoint(savePoint);
} catch (error) {
await this.rollbackSavePoint(savePoint);
throw error;
}
if (o.isNote) {
await this.models().itemResource().deleteByItemId(savedItem.id);
await this.models().itemResource().addResourceIds(savedItem.id, o.resourceIds);
@@ -390,7 +488,7 @@ export default class ItemModel extends BaseModel<Item> {
}
private childrenQuery(userId: Uuid, pathQuery: string = '', count: boolean = false, options: LoadOptions = {}): Knex.QueryBuilder {
private childrenQuery(userId: Uuid, pathQuery: string = '', count: boolean = false, options: ItemLoadOptions = {}): Knex.QueryBuilder {
const query = this
.db('user_items')
.innerJoin('items', 'user_items.item_id', 'items.id')
@@ -420,7 +518,7 @@ export default class ItemModel extends BaseModel<Item> {
return `${this.baseUrl}/items/${itemId}/content`;
}
public async children(userId: Uuid, pathQuery: string = '', pagination: Pagination = null, options: LoadOptions = {}): Promise<PaginatedItems> {
public async children(userId: Uuid, pathQuery: string = '', pagination: Pagination = null, options: ItemLoadOptions = {}): Promise<PaginatedItems> {
pagination = pagination || defaultPagination();
const query = this.childrenQuery(userId, pathQuery, false, options);
return paginateDbQuery(query, pagination, 'items');
@@ -532,6 +630,8 @@ export default class ItemModel extends BaseModel<Item> {
await this.models().share().delete(shares.map(s => s.id));
await this.models().userItem().deleteByItemIds(ids);
await this.models().itemResource().deleteByItemIds(ids);
await this.storageDriver_.delete(ids, { models: this.models() });
if (this.storageDriverFallback_) await this.storageDriverFallback_.delete(ids, { models: this.models() });
await super.delete(ids, options);
}, 'ItemModel::delete');
@@ -552,6 +652,7 @@ export default class ItemModel extends BaseModel<Item> {
public async makeTestItem(userId: Uuid, num: number) {
return this.saveForUser(userId, {
name: `${num.toString().padStart(32, '0')}.md`,
content: Buffer.from(''),
});
}
@@ -560,23 +661,27 @@ export default class ItemModel extends BaseModel<Item> {
for (let i = 1; i <= count; i++) {
await this.saveForUser(userId, {
name: `${i.toString().padStart(32, '0')}.md`,
content: Buffer.from(''),
});
}
}, 'ItemModel::makeTestItems');
}
// This method should be private because items should only be saved using
// saveFromRawContent, which is going to deal with the content driver. But
// since it's used in various test units, it's kept public for now.
public async saveForUser(userId: Uuid, item: Item, options: SaveOptions = {}): Promise<Item> {
if (!userId) throw new Error('userId is required');
item = { ... item };
const isNew = await this.isNew(item, options);
if (item.content) {
item.content_size = item.content.byteLength;
}
let previousItem: ChangePreviousItem = null;
if (item.content && !item.content_storage_id) {
item.content_storage_id = this.storageDriver_.storageId;
}
if (isNew) {
if (!item.mime_type) item.mime_type = mimeUtils.fromFilename(item.name) || '';
if (!item.owner_id) item.owner_id = userId;

View File

@@ -0,0 +1,18 @@
import { Storage } from '../services/database/types';
import BaseModel from './BaseModel';
export default class StorageModel extends BaseModel<Storage> {
public get tableName(): string {
return 'storages';
}
protected hasUuid(): boolean {
return false;
}
public async byConnectionString(connectionString: string): Promise<Storage> {
return this.db(this.tableName).where('connection_string', connectionString).first();
}
}

View File

@@ -360,18 +360,18 @@ describe('UserModel', function() {
const syncInfo3: any = JSON.parse(JSON.stringify(syncInfo1));
delete syncInfo3.ppk;
await models().item().saveForUser(user1.id, {
content: Buffer.from(JSON.stringify(syncInfo1)),
await models().item().saveFromRawContent(user1, {
body: Buffer.from(JSON.stringify(syncInfo1)),
name: 'info.json',
});
await models().item().saveForUser(user2.id, {
content: Buffer.from(JSON.stringify(syncInfo2)),
await models().item().saveFromRawContent(user2, {
body: Buffer.from(JSON.stringify(syncInfo2)),
name: 'info.json',
});
await models().item().saveForUser(user3.id, {
content: Buffer.from(JSON.stringify(syncInfo3)),
await models().item().saveFromRawContent(user3, {
body: Buffer.from(JSON.stringify(syncInfo3)),
name: 'info.json',
});

View File

@@ -593,7 +593,7 @@ export default class UserModel extends BaseModel<User> {
public async publicPrivateKey(userId: string): Promise<PublicPrivateKeyPair> {
const syncInfo = await this.syncInfo(userId);
return syncInfo.ppk?.value || null;// syncInfo.ppk?.value.publicKey || '';
return syncInfo.ppk?.value || null;
}
// Note that when the "password" property is provided, it is going to be

View File

@@ -72,88 +72,111 @@ import SubscriptionModel from './SubscriptionModel';
import UserFlagModel from './UserFlagModel';
import EventModel from './EventModel';
import { Config } from '../utils/types';
import StorageDriverBase from './items/storage/StorageDriverBase';
import LockModel from './LockModel';
import StorageModel from './StorageModel';
export interface Options {
storageDriver: StorageDriverBase;
storageDriverFallback?: StorageDriverBase;
}
export type NewModelFactoryHandler = (db: DbConnection)=> Models;
export class Models {
private db_: DbConnection;
private config_: Config;
private options_: Options;
public constructor(db: DbConnection, config: Config) {
public constructor(db: DbConnection, config: Config, options: Options) {
this.db_ = db;
this.config_ = config;
this.options_ = options;
// if (!options.storageDriver) throw new Error('StorageDriver is required');
this.newModelFactory = this.newModelFactory.bind(this);
}
private newModelFactory(db: DbConnection) {
return new Models(db, this.config_, this.options_);
}
public item() {
return new ItemModel(this.db_, newModelFactory, this.config_);
return new ItemModel(this.db_, this.newModelFactory, this.config_, this.options_);
}
public user() {
return new UserModel(this.db_, newModelFactory, this.config_);
return new UserModel(this.db_, this.newModelFactory, this.config_);
}
public email() {
return new EmailModel(this.db_, newModelFactory, this.config_);
return new EmailModel(this.db_, this.newModelFactory, this.config_);
}
public userItem() {
return new UserItemModel(this.db_, newModelFactory, this.config_);
return new UserItemModel(this.db_, this.newModelFactory, this.config_);
}
public token() {
return new TokenModel(this.db_, newModelFactory, this.config_);
return new TokenModel(this.db_, this.newModelFactory, this.config_);
}
public itemResource() {
return new ItemResourceModel(this.db_, newModelFactory, this.config_);
return new ItemResourceModel(this.db_, this.newModelFactory, this.config_);
}
public apiClient() {
return new ApiClientModel(this.db_, newModelFactory, this.config_);
return new ApiClientModel(this.db_, this.newModelFactory, this.config_);
}
public session() {
return new SessionModel(this.db_, newModelFactory, this.config_);
return new SessionModel(this.db_, this.newModelFactory, this.config_);
}
public change() {
return new ChangeModel(this.db_, newModelFactory, this.config_);
return new ChangeModel(this.db_, this.newModelFactory, this.config_);
}
public notification() {
return new NotificationModel(this.db_, newModelFactory, this.config_);
return new NotificationModel(this.db_, this.newModelFactory, this.config_);
}
public share() {
return new ShareModel(this.db_, newModelFactory, this.config_);
return new ShareModel(this.db_, this.newModelFactory, this.config_);
}
public shareUser() {
return new ShareUserModel(this.db_, newModelFactory, this.config_);
return new ShareUserModel(this.db_, this.newModelFactory, this.config_);
}
public keyValue() {
return new KeyValueModel(this.db_, newModelFactory, this.config_);
return new KeyValueModel(this.db_, this.newModelFactory, this.config_);
}
public subscription() {
return new SubscriptionModel(this.db_, newModelFactory, this.config_);
return new SubscriptionModel(this.db_, this.newModelFactory, this.config_);
}
public userFlag() {
return new UserFlagModel(this.db_, newModelFactory, this.config_);
return new UserFlagModel(this.db_, this.newModelFactory, this.config_);
}
public event() {
return new EventModel(this.db_, newModelFactory, this.config_);
return new EventModel(this.db_, this.newModelFactory, this.config_);
}
public lock() {
return new LockModel(this.db_, newModelFactory, this.config_);
return new LockModel(this.db_, this.newModelFactory, this.config_);
}
public storage() {
return new StorageModel(this.db_, this.newModelFactory, this.config_);
}
}
export default function newModelFactory(db: DbConnection, config: Config): Models {
return new Models(db, config);
export default function newModelFactory(db: DbConnection, config: Config, options: Options): Models {
return new Models(db, config, options);
}

View File

@@ -0,0 +1,42 @@
import { StorageDriverConfig, StorageDriverMode } from '../../../utils/types';
import { Models } from '../../factory';
// ItemModel passes the models object when calling any of the driver handler.
// This is so that if there's an active transaction, the driver can use that (as
// required for example by StorageDriverDatabase).
export interface Context {
models: Models;
}
export default class StorageDriverBase {
private storageId_: number;
private config_: StorageDriverConfig;
public constructor(storageId: number, config: StorageDriverConfig) {
this.storageId_ = storageId;
this.config_ = config;
}
public get storageId(): number {
return this.storageId_;
}
public get config(): StorageDriverConfig {
return this.config_;
}
public get mode(): StorageDriverMode {
return this.config.mode || StorageDriverMode.ReadOnly;
}
public async write(_itemId: string, _content: Buffer, _context: Context): Promise<void> { throw new Error('Not implemented'); }
public async read(_itemId: string, _context: Context): Promise<Buffer> { throw new Error('Not implemented'); }
public async delete(_itemId: string | string[], _context: Context): Promise<void> { throw new Error('Not implemented'); }
public async exists(_itemId: string, _context: Context): Promise<boolean> { throw new Error('Not implemented'); }
}

View File

@@ -0,0 +1,70 @@
import { clientType } from '../../../db';
import { afterAllTests, beforeAllDb, beforeEachDb, db, expectNotThrow, expectThrow, models } from '../../../utils/testing/testUtils';
import { StorageDriverMode } from '../../../utils/types';
import StorageDriverDatabase from './StorageDriverDatabase';
import StorageDriverMemory from './StorageDriverMemory';
import { shouldDeleteContent, shouldNotCreateItemIfContentNotSaved, shouldNotUpdateItemIfContentNotSaved, shouldSupportFallbackDriver, shouldSupportFallbackDriverInReadWriteMode, shouldUpdateContentStorageIdAfterSwitchingDriver, shouldWriteToContentAndReadItBack } from './testUtils';
const newDriver = () => {
return new StorageDriverDatabase(1, {
dbClientType: clientType(db()),
});
};
describe('StorageDriverDatabase', function() {
beforeAll(async () => {
await beforeAllDb('StorageDriverDatabase');
});
afterAll(async () => {
await afterAllTests();
});
beforeEach(async () => {
await beforeEachDb();
});
test('should write to content and read it back', async function() {
const driver = newDriver();
await shouldWriteToContentAndReadItBack(driver);
});
test('should delete the content', async function() {
const driver = newDriver();
await shouldDeleteContent(driver);
});
test('should not create the item if the content cannot be saved', async function() {
const driver = newDriver();
await shouldNotCreateItemIfContentNotSaved(driver);
});
test('should not update the item if the content cannot be saved', async function() {
const driver = newDriver();
await shouldNotUpdateItemIfContentNotSaved(driver);
});
test('should fail if the item row does not exist', async function() {
const driver = newDriver();
await expectThrow(async () => driver.read('oops', { models: models() }));
});
test('should do nothing if deleting non-existing row', async function() {
const driver = newDriver();
await expectNotThrow(async () => driver.delete('oops', { models: models() }));
});
test('should support fallback content drivers', async function() {
await shouldSupportFallbackDriver(newDriver(), new StorageDriverMemory(2));
});
test('should support fallback content drivers in rw mode', async function() {
await shouldSupportFallbackDriverInReadWriteMode(newDriver(), new StorageDriverMemory(2, { mode: StorageDriverMode.ReadWrite }));
});
test('should update content storage ID after switching driver', async function() {
await shouldUpdateContentStorageIdAfterSwitchingDriver(newDriver(), new StorageDriverMemory(2));
});
});

View File

@@ -0,0 +1,58 @@
// This driver allows storing the content directly with the item row in the
// database (as a binary blob). For now the driver expects that the content is
// stored in the same table as the items, as it originally was.
import { DatabaseConfigClient, StorageDriverConfig, StorageDriverType } from '../../../utils/types';
import StorageDriverBase, { Context } from './StorageDriverBase';
interface StorageDriverDatabaseConfig extends StorageDriverConfig {
dbClientType: DatabaseConfigClient;
}
export default class StorageDriverDatabase extends StorageDriverBase {
private handleReturnedRows_: boolean = null;
public constructor(id: number, config: StorageDriverDatabaseConfig) {
super(id, { type: StorageDriverType.Database, ...config });
this.handleReturnedRows_ = config.dbClientType === DatabaseConfigClient.PostgreSQL;
}
public async write(itemId: string, content: Buffer, context: Context): Promise<void> {
const returningOption = this.handleReturnedRows_ ? ['id'] : undefined;
const updatedRows = await context.models.item().db('items').update({ content }, returningOption).where('id', '=', itemId);
if (!this.handleReturnedRows_) return;
// Not possible because the ID is unique
if (updatedRows.length > 1) throw new Error('Update more than one row');
// Not possible either because the row is created before this handler is called, but still could happen
if (!updatedRows.length) throw new Error(`No such item: ${itemId}`);
// That would be weird
if (updatedRows[0].id !== itemId) throw new Error(`Did not update the right row. Expected: ${itemId}. Got: ${updatedRows[0].id}`);
}
public async read(itemId: string, context: Context): Promise<Buffer> {
const row = await context.models.item().db('items').select('content').where('id', '=', itemId).first();
// Calling code should only call this handler if the row exists, so if
// we find it doesn't, it's an error.
if (!row) throw new Error(`No such row: ${itemId}`);
return row.content;
}
public async delete(_itemId: string | string[], _context: Context): Promise<void> {
// noop because the calling code deletes the whole row, including the
// content.
}
public async exists(itemId: string, context: Context): Promise<boolean> {
const row = await context.models.item().db('items').select('content').where('id', '=', itemId).first();
return !!row && !!row.content;
}
}

View File

@@ -0,0 +1,84 @@
import { pathExists, remove } from 'fs-extra';
import { afterAllTests, beforeAllDb, beforeEachDb, expectNotThrow, expectThrow, tempDirPath } from '../../../utils/testing/testUtils';
import StorageDriverFs from './StorageDriverFs';
import { shouldDeleteContent, shouldNotCreateItemIfContentNotSaved, shouldNotUpdateItemIfContentNotSaved, shouldWriteToContentAndReadItBack } from './testUtils';
let basePath_: string = '';
const newDriver = () => {
return new StorageDriverFs(1, { path: basePath_ });
};
describe('StorageDriverFs', function() {
beforeAll(async () => {
await beforeAllDb('StorageDriverFs');
});
afterAll(async () => {
await afterAllTests();
});
beforeEach(async () => {
basePath_ = tempDirPath();
await beforeEachDb();
});
afterEach(async () => {
await remove(basePath_);
basePath_ = '';
});
test('should write to content and read it back', async function() {
const driver = newDriver();
await shouldWriteToContentAndReadItBack(driver);
});
test('should delete the content', async function() {
const driver = newDriver();
await shouldDeleteContent(driver);
});
test('should not create the item if the content cannot be saved', async function() {
const driver = newDriver();
await shouldNotCreateItemIfContentNotSaved(driver);
});
test('should not update the item if the content cannot be saved', async function() {
const driver = newDriver();
await shouldNotUpdateItemIfContentNotSaved(driver);
});
test('should write to a file and read it back', async function() {
const driver = newDriver();
await driver.write('testing', Buffer.from('testing'));
const content = await driver.read('testing');
expect(content.toString()).toBe('testing');
});
test('should automatically create the base path', async function() {
expect(await pathExists(basePath_)).toBe(false);
const driver = newDriver();
await driver.write('testing', Buffer.from('testing'));
expect(await pathExists(basePath_)).toBe(true);
});
test('should delete a file', async function() {
const driver = newDriver();
await driver.write('testing', Buffer.from('testing'));
expect((await driver.read('testing')).toString()).toBe('testing');
await driver.delete('testing');
await expectThrow(async () => driver.read('testing'), 'ENOENT');
});
test('should throw if the file does not exist when reading it', async function() {
const driver = newDriver();
await expectThrow(async () => driver.read('testread'), 'ENOENT');
});
test('should not throw if deleting a file that does not exist', async function() {
const driver = newDriver();
await expectNotThrow(async () => driver.delete('notthere'));
});
});

View File

@@ -0,0 +1,48 @@
import { mkdirp, pathExists, readFile, remove, writeFile } from 'fs-extra';
import { StorageDriverConfig, StorageDriverType } from '../../../utils/types';
import StorageDriverBase from './StorageDriverBase';
export default class StorageDriverFs extends StorageDriverBase {
private pathCreated_: Record<string, boolean> = {};
public constructor(id: number, config: StorageDriverConfig) {
super(id, { type: StorageDriverType.Filesystem, ...config });
}
private async createParentDirectories(path: string) {
const p = path.split('/');
p.pop();
const basename = p.join('/');
if (this.pathCreated_[basename]) return;
await mkdirp(basename);
this.pathCreated_[basename] = true;
}
private itemPath(itemId: string): string {
return `${this.config.path}/${itemId.substr(0, 2).toLowerCase()}/${itemId.substr(2, 2).toLowerCase()}/${itemId}`;
}
public async write(itemId: string, content: Buffer): Promise<void> {
const itemPath = this.itemPath(itemId);
await this.createParentDirectories(itemPath);
await writeFile(itemPath, content);
}
public async read(itemId: string): Promise<Buffer> {
return readFile(this.itemPath(itemId));
}
public async delete(itemId: string | string[]): Promise<void> {
const itemIds = Array.isArray(itemId) ? itemId : [itemId];
for (const id of itemIds) {
await remove(this.itemPath(id));
}
}
public async exists(itemId: string): Promise<boolean> {
return pathExists(this.itemPath(itemId));
}
}

View File

@@ -0,0 +1,40 @@
import { afterAllTests, beforeAllDb, beforeEachDb } from '../../../utils/testing/testUtils';
import StorageDriverMemory from './StorageDriverMemory';
import { shouldDeleteContent, shouldNotCreateItemIfContentNotSaved, shouldNotUpdateItemIfContentNotSaved, shouldWriteToContentAndReadItBack } from './testUtils';
describe('StorageDriverMemory', function() {
beforeAll(async () => {
await beforeAllDb('StorageDriverMemory');
});
afterAll(async () => {
await afterAllTests();
});
beforeEach(async () => {
await beforeEachDb();
});
test('should write to content and read it back', async function() {
const driver = new StorageDriverMemory(1);
await shouldWriteToContentAndReadItBack(driver);
});
test('should delete the content', async function() {
const driver = new StorageDriverMemory(1);
await shouldDeleteContent(driver);
});
test('should not create the item if the content cannot be saved', async function() {
const driver = new StorageDriverMemory(1);
await shouldNotCreateItemIfContentNotSaved(driver);
});
test('should not update the item if the content cannot be saved', async function() {
const driver = new StorageDriverMemory(1);
await shouldNotUpdateItemIfContentNotSaved(driver);
});
});

View File

@@ -0,0 +1,32 @@
import { StorageDriverConfig, StorageDriverType } from '../../../utils/types';
import StorageDriverBase from './StorageDriverBase';
export default class StorageDriverMemory extends StorageDriverBase {
private data_: Record<string, Buffer> = {};
public constructor(id: number, config: StorageDriverConfig = null) {
super(id, { type: StorageDriverType.Memory, ...config });
}
public async write(itemId: string, content: Buffer): Promise<void> {
this.data_[itemId] = content;
}
public async read(itemId: string): Promise<Buffer> {
if (!(itemId in this.data_)) throw new Error(`No such item: ${itemId}`);
return this.data_[itemId];
}
public async delete(itemId: string | string[]): Promise<void> {
const itemIds = Array.isArray(itemId) ? itemId : [itemId];
for (const id of itemIds) {
delete this.data_[id];
}
}
public async exists(itemId: string): Promise<boolean> {
return itemId in this.data_;
}
}

View File

@@ -0,0 +1,85 @@
// Note that these tests require an S3 bucket to be set, with the credentials
// defined in the below config file. If the credentials are missing, all the
// tests are skipped.
import { afterAllTests, beforeAllDb, beforeEachDb, expectNotThrow, expectThrow, readCredentialFile } from '../../../utils/testing/testUtils';
import { StorageDriverType } from '../../../utils/types';
import StorageDriverS3 from './StorageDriverS3';
import { shouldDeleteContent, shouldNotCreateItemIfContentNotSaved, shouldNotUpdateItemIfContentNotSaved, shouldWriteToContentAndReadItBack } from './testUtils';
const s3Config = async () => {
const s = await readCredentialFile('server-s3-test-units.json', '');
if (!s) return null;
return JSON.parse(s);
};
const newDriver = async () => {
return new StorageDriverS3(1, {
type: StorageDriverType.S3,
...await s3Config(),
});
};
const configIsSet = async () => {
const c = await s3Config();
return !!c;
};
describe('StorageDriverS3', function() {
beforeAll(async () => {
if (!(await configIsSet())) {
return;
} else {
console.warn('Running S3 unit tests on live environment!');
await beforeAllDb('StorageDriverS3');
}
});
afterAll(async () => {
if (!(await configIsSet())) return;
await afterAllTests();
});
beforeEach(async () => {
if (!(await configIsSet())) return;
await beforeEachDb();
});
test('should write to content and read it back', async function() {
if (!(await configIsSet())) return;
const driver = await newDriver();
await shouldWriteToContentAndReadItBack(driver);
});
test('should delete the content', async function() {
if (!(await configIsSet())) return;
const driver = await newDriver();
await shouldDeleteContent(driver);
});
test('should not create the item if the content cannot be saved', async function() {
if (!(await configIsSet())) return;
const driver = await newDriver();
await shouldNotCreateItemIfContentNotSaved(driver);
});
test('should not update the item if the content cannot be saved', async function() {
if (!(await configIsSet())) return;
const driver = await newDriver();
await shouldNotUpdateItemIfContentNotSaved(driver);
});
test('should fail if the item row does not exist', async function() {
if (!(await configIsSet())) return;
const driver = await newDriver();
await expectThrow(async () => driver.read('oops'));
});
test('should do nothing if deleting non-existing row', async function() {
if (!(await configIsSet())) return;
const driver = await newDriver();
await expectNotThrow(async () => driver.delete('oops'));
});
});

View File

@@ -0,0 +1,97 @@
import { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectsCommand, ObjectIdentifier, HeadObjectCommand } from '@aws-sdk/client-s3';
import { StorageDriverConfig, StorageDriverType } from '../../../utils/types';
import StorageDriverBase from './StorageDriverBase';
function stream2buffer(stream: any): Promise<Buffer> {
return new Promise((resolve, reject) => {
const buffer: Uint8Array[] = [];
let hasError = false;
stream.on('data', (chunk: Uint8Array) => {
if (hasError) return;
buffer.push(chunk);
});
stream.on('end', () => {
if (hasError) return;
resolve(Buffer.concat(buffer));
});
stream.on('error', (error: any) => {
if (hasError) return;
hasError = true;
reject(error);
});
});
}
export default class StorageDriverS3 extends StorageDriverBase {
private client_: S3Client;
public constructor(id: number, config: StorageDriverConfig) {
super(id, { type: StorageDriverType.S3, ...config });
this.client_ = new S3Client({
// We need to set a region. See https://github.com/aws/aws-sdk-js-v3/issues/1845#issuecomment-754832210
region: this.config.region,
credentials: {
accessKeyId: this.config.accessKeyId,
secretAccessKey: this.config.secretAccessKeyId,
},
});
}
public async write(itemId: string, content: Buffer): Promise<void> {
await this.client_.send(new PutObjectCommand({
Bucket: this.config.bucket,
Key: itemId,
Body: content,
}));
}
public async read(itemId: string): Promise<Buffer | null> {
try {
const response = await this.client_.send(new GetObjectCommand({
Bucket: this.config.bucket,
Key: itemId,
}));
return stream2buffer(response.Body);
} catch (error) {
error.message = `Could not get item "${itemId}": ${error.message}`;
throw error;
}
}
public async delete(itemId: string | string[]): Promise<void> {
const itemIds = Array.isArray(itemId) ? itemId : [itemId];
const objects: ObjectIdentifier[] = itemIds.map(id => {
return { Key: id };
});
await this.client_.send(new DeleteObjectsCommand({
Bucket: this.config.bucket,
Delete: {
Objects: objects,
},
}));
}
public async exists(itemId: string): Promise<boolean> {
try {
await this.client_.send(new HeadObjectCommand({
Bucket: this.config.bucket,
Key: itemId,
}));
return true;
} catch (error) {
if (error?.$metadata?.httpStatusCode === 404) return false;
error.message = `Could not check if object exists: "${itemId}": ${error.message}`;
throw error;
}
}
}

View File

@@ -0,0 +1,42 @@
import { StorageDriverConfig, StorageDriverType } from '../../../utils/types';
import parseStorageDriverConnectionString from './parseStorageDriverConnectionString';
describe('parseStorageDriverConnectionString', function() {
test('should parse a connection string', async function() {
const testCases: Record<string, StorageDriverConfig> = {
'Type=Database': {
type: StorageDriverType.Database,
},
' Type = Database ': {
type: StorageDriverType.Database,
},
'Type=Filesystem; Path=/path/to/dir': {
type: StorageDriverType.Filesystem,
path: '/path/to/dir',
},
' Type = Filesystem ; Path = /path/to/dir ': {
type: StorageDriverType.Filesystem,
path: '/path/to/dir',
},
'Type=Memory;': {
type: StorageDriverType.Memory,
},
'': null,
};
for (const [connectionString, config] of Object.entries(testCases)) {
const actual = parseStorageDriverConnectionString(connectionString);
expect(actual).toEqual(config);
}
});
test('should detect errors', async function() {
expect(() => parseStorageDriverConnectionString('Path=/path/to/dir')).toThrow(); // Type is missing
expect(() => parseStorageDriverConnectionString('Type=')).toThrow();
expect(() => parseStorageDriverConnectionString('Type;')).toThrow();
expect(() => parseStorageDriverConnectionString('Type=DoesntExist')).toThrow();
expect(() => parseStorageDriverConnectionString('Type=Filesystem')).toThrow();
});
});

View File

@@ -0,0 +1,63 @@
// Type={Database,Filesystem,Memory,S3}; Path={/path/to/dir,https://s3bucket}
import { StorageDriverConfig, StorageDriverMode, StorageDriverType } from '../../../utils/types';
const parseType = (type: string): StorageDriverType => {
if (type === 'Database') return StorageDriverType.Database;
if (type === 'Filesystem') return StorageDriverType.Filesystem;
if (type === 'Memory') return StorageDriverType.Memory;
throw new Error(`Invalid type: "${type}"`);
};
const parseMode = (mode: string): StorageDriverMode => {
if (mode === 'rw') return StorageDriverMode.ReadWrite;
if (mode === 'r') return StorageDriverMode.ReadOnly;
throw new Error(`Invalid type: "${mode}"`);
};
const validate = (config: StorageDriverConfig) => {
if (!config.type) throw new Error('Type must be specified');
if (config.type === StorageDriverType.Filesystem && !config.path) throw new Error('Path must be set for filesystem driver');
return config;
};
export default function(connectionString: string): StorageDriverConfig | null {
if (!connectionString) return null;
const output: StorageDriverConfig = {
type: null,
};
const items = connectionString.split(';').map(i => i.trim());
try {
for (const item of items) {
if (!item) continue;
const [key, value] = item.split('=').map(s => s.trim());
if (key === 'Type') {
output.type = parseType(value);
} else if (key === 'Path') {
output.path = value;
} else if (key === 'Mode') {
output.mode = parseMode(value);
} else if (key === 'Region') {
output.region = value;
} else if (key === 'AccessKeyId') {
output.accessKeyId = value;
} else if (key === 'SecretAccessKeyId') {
output.secretAccessKeyId = value;
} else if (key === 'Bucket') {
output.bucket = value;
} else {
throw new Error(`Invalid key: "${key}"`);
}
}
} catch (error) {
error.message = `In connection string "${connectionString}": ${error.message}`;
throw error;
}
return validate(output);
}

View File

@@ -0,0 +1,30 @@
import { StorageDriverConfig, StorageDriverMode, StorageDriverType } from '../../../utils/types';
const serializeType = (type: StorageDriverType): string => {
if (type === StorageDriverType.Database) return 'Database';
if (type === StorageDriverType.Filesystem) return 'Filesystem';
if (type === StorageDriverType.Memory) return 'Memory';
throw new Error(`Invalid type: "${type}"`);
};
const serializeMode = (mode: StorageDriverMode): string => {
if (mode === StorageDriverMode.ReadWrite) return 'rw';
if (mode === StorageDriverMode.ReadOnly) return 'r';
throw new Error(`Invalid type: "${mode}"`);
};
export default function(config: StorageDriverConfig, locationOnly: boolean = true): string {
if (!config) return '';
const items: string[] = [];
items.push(`Type=${serializeType(config.type)}`);
if (config.path) items.push(`Path=${config.path}`);
if (!locationOnly && config.mode) items.push(`Mode=${serializeMode(config.mode)}`);
items.sort();
return items.join('; ');
}

View File

@@ -0,0 +1,54 @@
import globalConfig from '../../../config';
import { clientType, DbConnection } from '../../../db';
import { StorageDriverConfig, StorageDriverType } from '../../../utils/types';
import newModelFactory from '../../factory';
import serializeStorageConfig from './serializeStorageConfig';
import StorageDriverBase from './StorageDriverBase';
import StorageDriverDatabase from './StorageDriverDatabase';
import StorageDriverFs from './StorageDriverFs';
import StorageDriverMemory from './StorageDriverMemory';
export interface Options {
assignDriverId?: boolean;
}
export default async function(config: StorageDriverConfig, db: DbConnection, options: Options = null): Promise<StorageDriverBase | null> {
if (!config) return null;
options = {
assignDriverId: true,
...options,
};
let storageId: number = 0;
if (options.assignDriverId) {
const models = newModelFactory(db, globalConfig(), { storageDriver: null });
const connectionString = serializeStorageConfig(config);
const existingStorage = await models.storage().byConnectionString(connectionString);
if (existingStorage) {
storageId = existingStorage.id;
} else {
const storage = await models.storage().save({
connection_string: connectionString,
});
storageId = storage.id;
}
}
if (config.type === StorageDriverType.Database) {
return new StorageDriverDatabase(storageId, { ...config, dbClientType: clientType(db) });
}
if (config.type === StorageDriverType.Filesystem) {
return new StorageDriverFs(storageId, config);
}
if (config.type === StorageDriverType.Memory) {
return new StorageDriverMemory(storageId, config);
}
throw new Error(`Invalid config: ${JSON.stringify(config)}`);
}

View File

@@ -0,0 +1,245 @@
import { Item } from '../../../services/database/types';
import { createUserAndSession, makeNoteSerializedBody, models } from '../../../utils/testing/testUtils';
import { StorageDriverMode } from '../../../utils/types';
import StorageDriverBase, { Context } from './StorageDriverBase';
const testModels = (driver: StorageDriverBase) => {
return models({ storageDriver: driver });
};
export async function shouldWriteToContentAndReadItBack(driver: StorageDriverBase) {
const { user } = await createUserAndSession(1);
const noteBody = makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing driver',
});
const output = await testModels(driver).item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(noteBody),
}]);
const result = output['00000000000000000000000000000001.md'];
expect(result.error).toBeFalsy();
const item = await testModels(driver).item().loadWithContent(result.item.id);
expect(item.content.byteLength).toBe(item.content_size);
expect(item.content_storage_id).toBe(driver.storageId);
const rawContent = await driver.read(item.id, { models: models() });
expect(rawContent.byteLength).toBe(item.content_size);
const jopItem = testModels(driver).item().itemToJoplinItem(item);
expect(jopItem.id).toBe('00000000000000000000000000000001');
expect(jopItem.title).toBe('testing driver');
}
export async function shouldDeleteContent(driver: StorageDriverBase) {
const { user } = await createUserAndSession(1);
const noteBody = makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing driver',
});
const output = await testModels(driver).item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(noteBody),
}]);
const item: Item = output['00000000000000000000000000000001.md'].item;
expect((await testModels(driver).item().all()).length).toBe(1);
await testModels(driver).item().delete(item.id);
expect((await testModels(driver).item().all()).length).toBe(0);
}
export async function shouldNotCreateItemIfContentNotSaved(driver: StorageDriverBase) {
const previousWrite = driver.write;
driver.write = () => { throw new Error('not working!'); };
try {
const { user } = await createUserAndSession(1);
const noteBody = makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing driver',
});
const output = await testModels(driver).item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(noteBody),
}]);
expect(output['00000000000000000000000000000001.md'].error.message).toBe('not working!');
expect((await testModels(driver).item().all()).length).toBe(0);
} finally {
driver.write = previousWrite;
}
}
export async function shouldNotUpdateItemIfContentNotSaved(driver: StorageDriverBase) {
const { user } = await createUserAndSession(1);
const noteBody = makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing driver',
});
await testModels(driver).item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(noteBody),
}]);
const noteBodyMod1 = makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'updated 1',
});
await testModels(driver).item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(noteBodyMod1),
}]);
const itemMod1 = testModels(driver).item().itemToJoplinItem(await testModels(driver).item().loadByJopId(user.id, '00000000000000000000000000000001', { withContent: true }));
expect(itemMod1.title).toBe('updated 1');
const noteBodyMod2 = makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'updated 2',
});
const previousWrite = driver.write;
driver.write = () => { throw new Error('not working!'); };
try {
const output = await testModels(driver).item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(noteBodyMod2),
}]);
expect(output['00000000000000000000000000000001.md'].error.message).toBe('not working!');
const itemMod2 = testModels(driver).item().itemToJoplinItem(await testModels(driver).item().loadByJopId(user.id, '00000000000000000000000000000001', { withContent: true }));
expect(itemMod2.title).toBe('updated 1'); // Check it has not been updated
} finally {
driver.write = previousWrite;
}
}
export async function shouldSupportFallbackDriver(driver: StorageDriverBase, fallbackDriver: StorageDriverBase) {
const { user } = await createUserAndSession(1);
const output = await testModels(driver).item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing',
})),
}]);
const itemId = output['00000000000000000000000000000001.md'].item.id;
let previousByteLength = 0;
{
const content = await driver.read(itemId, { models: models() });
expect(content.byteLength).toBeGreaterThan(10);
previousByteLength = content.byteLength;
}
const testModelWithFallback = models({
storageDriver: driver,
storageDriverFallback: fallbackDriver,
});
// If the item content is not on the main content driver, it should get
// it from the fallback one.
const itemFromDb = await testModelWithFallback.item().loadWithContent(itemId);
expect(itemFromDb.content.byteLength).toBe(previousByteLength);
// When writing content, it should use the main content driver, and set
// the content for the fallback one to "".
await testModelWithFallback.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing1234',
})),
}]);
{
// Check that it has cleared the fallback driver content
const context: Context = { models: models() };
const fallbackContent = await fallbackDriver.read(itemId, context);
expect(fallbackContent.byteLength).toBe(0);
// Check that it has written to the main driver content
const mainContent = await driver.read(itemId, context);
expect(mainContent.byteLength).toBe(previousByteLength + 4);
}
}
export async function shouldSupportFallbackDriverInReadWriteMode(driver: StorageDriverBase, fallbackDriver: StorageDriverBase) {
if (fallbackDriver.mode !== StorageDriverMode.ReadWrite) throw new Error('Content driver must be configured in RW mode for this test');
const { user } = await createUserAndSession(1);
const testModelWithFallback = models({
storageDriver: driver,
storageDriverFallback: fallbackDriver,
});
const output = await testModelWithFallback.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing',
})),
}]);
const itemId = output['00000000000000000000000000000001.md'].item.id;
{
// Check that it has written the content to both drivers
const context: Context = { models: models() };
const fallbackContent = await fallbackDriver.read(itemId, context);
expect(fallbackContent.byteLength).toBeGreaterThan(10);
const mainContent = await driver.read(itemId, context);
expect(mainContent.toString()).toBe(fallbackContent.toString());
}
}
export async function shouldUpdateContentStorageIdAfterSwitchingDriver(oldDriver: StorageDriverBase, newDriver: StorageDriverBase) {
if (oldDriver.storageId === newDriver.storageId) throw new Error('Drivers must be different for this test');
const { user } = await createUserAndSession(1);
const oldDriverModel = models({
storageDriver: oldDriver,
});
const newDriverModel = models({
storageDriver: newDriver,
});
const output = await oldDriverModel.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing',
})),
}]);
const itemId = output['00000000000000000000000000000001.md'].item.id;
expect((await oldDriverModel.item().load(itemId)).content_storage_id).toBe(oldDriver.storageId);
await newDriverModel.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing',
})),
}]);
expect(await newDriverModel.item().count()).toBe(1);
expect((await oldDriverModel.item().load(itemId)).content_storage_id).toBe(newDriver.storageId);
}

View File

@@ -1,4 +1,4 @@
export type Uuid = any;
export type Uuid = string;
export enum ItemAddressingType {
Id = 1,
@@ -246,6 +246,11 @@ export interface Event extends WithUuid {
created_time?: number;
}
export interface Storage {
id?: number;
connection_string?: string;
}
export interface Item extends WithDates, WithUuid {
name?: string;
mime_type?: string;
@@ -258,6 +263,7 @@ export interface Item extends WithDates, WithUuid {
jop_encryption_applied?: number;
jop_updated_time?: number;
owner_id?: Uuid;
content_storage_id?: number;
}
export const databaseSchema: DatabaseTables = {
@@ -418,6 +424,10 @@ export const databaseSchema: DatabaseTables = {
name: { type: 'string' },
created_time: { type: 'string' },
},
storages: {
id: { type: 'number' },
connection_string: { type: 'string' },
},
items: {
id: { type: 'string' },
name: { type: 'string' },
@@ -433,6 +443,7 @@ export const databaseSchema: DatabaseTables = {
jop_encryption_applied: { type: 'number' },
jop_updated_time: { type: 'string' },
owner_id: { type: 'string' },
content_storage_id: { type: 'number' },
},
};
// AUTO-GENERATED-TYPES

View File

@@ -1,6 +1,7 @@
import time from '@joplin/lib/time';
import { DbConnection, dropTables, migrateLatest } from '../db';
import newModelFactory from '../models/factory';
import storageDriverFromConfig from '../models/items/storage/storageDriverFromConfig';
import { AccountType } from '../models/UserModel';
import { User, UserFlagType } from '../services/database/types';
import { Config } from '../utils/types';
@@ -34,9 +35,12 @@ export async function createTestUsers(db: DbConnection, config: Config, options:
const password = 'hunter1hunter2hunter3';
if (options.count) {
const models = newModelFactory(db, config);
const models = newModelFactory(db, config, {
// storageDriver: new StorageDriverDatabase(1, { dbClientType: clientType(db) }),
storageDriver: await storageDriverFromConfig(config.storageDriver, db), // new StorageDriverDatabase(1, { dbClientType: clientType(db) }),
});
if (options.count) {
const users: User[] = [];
for (let i = 0; i < options.count; i++) {
@@ -52,7 +56,6 @@ export async function createTestUsers(db: DbConnection, config: Config, options:
} else {
await dropTables(db);
await migrateLatest(db);
const models = newModelFactory(db, config);
for (let userNum = 1; userNum <= 2; userNum++) {
await models.user().save({

View File

@@ -141,7 +141,7 @@ async function noteLinkedItemInfos(userId: Uuid, itemModel: ItemModel, note: Not
const output: LinkedItemInfos = {};
for (const jopId of jopIds) {
const item = await itemModel.loadByJopId(userId, jopId, { fields: ['*'] });
const item = await itemModel.loadByJopId(userId, jopId, { fields: ['*'], withContent: true });
if (!item) continue;
output[jopId] = {
@@ -265,7 +265,7 @@ export async function renderItem(userId: Uuid, item: Item, share: Share, query:
};
if (query.resource_id) {
const resourceItem = await models_.item().loadByName(userId, resourceBlobPath(query.resource_id), { fields: ['*'] });
const resourceItem = await models_.item().loadByName(userId, resourceBlobPath(query.resource_id), { fields: ['*'], withContent: true });
fileToRender.item = resourceItem;
fileToRender.content = resourceItem.content;
fileToRender.jopItemId = query.resource_id;

View File

@@ -1,7 +1,7 @@
import { LoggerWrapper } from '@joplin/lib/Logger';
import config from '../config';
import { DbConnection } from '../db';
import newModelFactory, { Models } from '../models/factory';
import newModelFactory, { Models, Options as ModelFactoryOptions } from '../models/factory';
import { AppContext, Config, Env } from './types';
import routes from '../routes/routes';
import ShareService from '../services/ShareService';
@@ -23,8 +23,8 @@ async function setupServices(env: Env, models: Models, config: Config): Promise<
return output;
}
export default async function(appContext: AppContext, env: Env, dbConnection: DbConnection, appLogger: ()=> LoggerWrapper): Promise<AppContext> {
const models = newModelFactory(dbConnection, config());
export default async function(appContext: AppContext, env: Env, dbConnection: DbConnection, appLogger: ()=> LoggerWrapper, options: ModelFactoryOptions): Promise<AppContext> {
const models = newModelFactory(dbConnection, config(), options);
// The joplinBase object is immutable because it is shared by all requests.
// Then a "joplin" context property is created from it per request, which

View File

@@ -1,7 +1,7 @@
import { DbConnection, connectDb, disconnectDb, truncateTables } from '../../db';
import { User, Session, Item, Uuid } from '../../services/database/types';
import { createDb, CreateDbOptions } from '../../tools/dbTools';
import modelFactory from '../../models/factory';
import modelFactory, { Options as ModelFactoryOptions } from '../../models/factory';
import { AppContext, Env } from '../types';
import config, { initConfig } from '../../config';
import Logger from '@joplin/lib/Logger';
@@ -23,6 +23,7 @@ import MustacheService from '../../services/MustacheService';
import uuidgen from '../uuidgen';
import { createCsrfToken } from '../csrf';
import { cookieSet } from '../cookies';
import StorageDriverMemory from '../../models/items/storage/StorageDriverMemory';
import { parseEnv } from '../../env';
// Takes into account the fact that this file will be inside the /dist directory
@@ -37,10 +38,14 @@ export function randomHash(): string {
return crypto.createHash('md5').update(`${Date.now()}-${Math.random()}`).digest('hex');
}
export function tempDirPath(): string {
return `${packageRootDir}/temp/${randomHash()}`;
}
let tempDir_: string = null;
export async function tempDir(): Promise<string> {
if (tempDir_) return tempDir_;
tempDir_ = `${packageRootDir}/temp/${randomHash()}`;
tempDir_ = tempDirPath();
await fs.mkdirp(tempDir_);
return tempDir_;
}
@@ -190,7 +195,7 @@ export async function koaAppContext(options: AppContextTestOptions = null): Prom
const appLogger = Logger.create('AppTest');
const baseAppContext = await setupAppContext({} as any, Env.Dev, db_, () => appLogger);
const baseAppContext = await setupAppContext({} as any, Env.Dev, db_, () => appLogger, { storageDriver: new StorageDriverMemory(1) });
// Set type to "any" because the Koa context has many properties and we
// don't need to mock all of them.
@@ -238,12 +243,16 @@ export function db() {
return db_;
}
// function baseUrl() {
// return 'http://localhost:22300';
// }
const storageDriverMemory = new StorageDriverMemory(1);
export function models() {
return modelFactory(db(), config());
export function models(options: ModelFactoryOptions = null) {
options = {
storageDriver: storageDriverMemory,
storageDriverFallback: null,
...options,
};
return modelFactory(db(), config(), options);
}
export function parseHtml(html: string): Document {

View File

@@ -87,6 +87,48 @@ export interface StripeConfig extends StripePublicConfig {
webhookSecret: string;
}
export enum StorageDriverType {
Database = 1,
Filesystem = 2,
Memory = 3,
S3 = 4,
}
// The driver mode is only used by fallback drivers. Regardless of the mode, the
// fallback always work like this:
//
// When reading, first the app checks if the content exists on the main driver.
// If it does it returns this. Otherwise it reads the content from the fallback
// driver.
//
// When writing, the app writes to the main driver. Then the mode determines how
// it writes to the fallback driver:
//
// - In read-only mode, it's going to clear the fallback driver content. This is
// used to migrate from one driver to another. It means that over time the old
// storage will be cleared and all content will be on the new storage.
//
// - In read/write mode, it's going to write the content to the fallback driver.
// This is purely for safey - it allows deploying the new storage (such as the
// filesystem or S3) but still keep the old content up-to-date. So if
// something goes wrong it's possible to go back to the old storage until the
// new one is working.
export enum StorageDriverMode {
ReadWrite = 1,
ReadOnly = 2,
}
export interface StorageDriverConfig {
type?: StorageDriverType;
path?: string;
mode?: StorageDriverMode;
region?: string;
accessKeyId?: string;
secretAccessKeyId?: string;
bucket?: string;
}
export interface Config {
appVersion: string;
appName: string;
@@ -115,6 +157,8 @@ export interface Config {
businessEmail: string;
isJoplinCloud: boolean;
cookieSecure: boolean;
storageDriver: StorageDriverConfig;
storageDriverFallback: StorageDriverConfig;
}
export enum HttpMethod {

View File

@@ -19,6 +19,9 @@ const libDir = `${rootDir}/packages/lib`;
const { execCommand, isMac, insertContentIntoFile, filename, dirname, fileExtension } = require('./tool-utils.js');
const { countryDisplayName, countryCodeOnly } = require('@joplin/lib/locale');
const { GettextExtractor, JsExtractors } = require('gettext-extractor');
function parsePoFile(filePath) {
const content = fs.readFileSync(filePath);
return gettextParser.po.parse(content);
@@ -26,22 +29,29 @@ function parsePoFile(filePath) {
function serializeTranslation(translation) {
const output = {};
const translations = translation.translations[''];
for (const n in translations) {
if (!translations.hasOwnProperty(n)) continue;
if (n == '') continue;
const t = translations[n];
let translated = '';
if (t.comments && t.comments.flag && t.comments.flag.indexOf('fuzzy') >= 0) {
// Don't include fuzzy translations
} else {
translated = t['msgstr'][0];
}
if (translated) output[n] = translated;
// Translations are grouped by "msgctxt"
for (const msgctxt of Object.keys(translation.translations)) {
const translations = translation.translations[msgctxt];
for (const n in translations) {
if (!translations.hasOwnProperty(n)) continue;
if (n == '') continue;
const t = translations[n];
let translated = '';
if (t.comments && t.comments.flag && t.comments.flag.indexOf('fuzzy') >= 0) {
// Don't include fuzzy translations
} else {
translated = t['msgstr'][0];
}
if (translated) output[n] = translated;
}
}
return JSON.stringify(output);
// Sort the translations to make the diff easier to read.
return JSON.stringify(output, Object.keys(output).sort((a, b) => a.toLowerCase() < b.toLowerCase() ? -1 : +1), ' ');
}
function saveToFile(filePath, data) {
@@ -113,14 +123,7 @@ async function createPotFile(potFilePath) {
'./readme/*',
];
// We get all the .ts and .js files, preferring the .ts file when it's
// available (because the .js file is a minified version and gettext might
// fail on it).
//
// As of 2021-11, gettext doesn't process .tsx files so we still need to use
// the .js for this.
const findCommand = `find . -type f \\( -iname \\*.js -o -iname \\*.ts \\) -not -path '${excludedDirs.join('\' -not -path \'')}'`;
const findCommand = `find . -type f \\( -iname \\*.js -o -iname \\*.ts -o -iname \\*.tsx \\) -not -path '${excludedDirs.join('\' -not -path \'')}'`;
process.chdir(rootDir);
let files = (await execCommand(findCommand)).split('\n');
@@ -152,26 +155,59 @@ async function createPotFile(potFilePath) {
files.sort();
// Use this to get the list of files that are going to be processed. Useful
// to debug issues with files that shouldn't be in the list.
// console.info(files.join('\n'));
// Note: we previously used the xgettext utility, but it only partially
// supports TypeScript and doesn't support .tsx files at all. Besides; the
// TypeScript compiler now converts some `_('some string')` calls to
// `(0,locale1._)('some string')`, which cannot be detected by xgettext.
//
// So now we use this gettext-extractor utility, which seems to do the job.
// It supports .ts and .tsx files and appears to find the same strings as
// xgettext.
const baseArgs = [];
baseArgs.push('--from-code=utf-8');
baseArgs.push(`--output="${potFilePath}"`);
baseArgs.push('--language=JavaScript');
baseArgs.push('--copyright-holder="Laurent Cozic"');
baseArgs.push('--package-name=Joplin');
baseArgs.push('--package-version=1.0.0');
baseArgs.push('--keyword=_n:1,2');
const extractor = new GettextExtractor();
// In the following string:
//
// _('Hello %s', 'Scott')
//
// "Hello %s" is the `text` (or "msgstr" in gettext parlance) , and "Scott"
// is the `context` ("msgctxt").
//
// gettext-extractor allows adding both the text and context to the pot
// file, however we should avoid this because a change in the context string
// would mark the associated string as fuzzy. We want to avoid this because
// the point of splitting into text and context is that even if the context
// changes we don't need to retranslate the text. We use this for URLs for
// instance.
//
// Because of this, below we don't set the "context" property.
const parser = extractor
.createJsParser([
JsExtractors.callExpression('_', {
arguments: {
text: 0,
// context: 1,
},
}),
JsExtractors.callExpression('_n', {
arguments: {
text: 0,
textPlural: 1,
// context: 2,
},
}),
]);
for (const file of files) {
parser.parseFile(file);
}
extractor.savePotFile(potFilePath, {
'Project-Id-Version': 'Joplin',
'Content-Type': 'text/plain; charset=UTF-8',
});
let args = baseArgs.slice();
args = args.concat(files);
let xgettextPath = 'xgettext';
if (isMac()) xgettextPath = executablePath('xgettext'); // Needs to have been installed with `brew install gettext`
const cmd = `${xgettextPath} ${args.join(' ')}`;
const result = await execCommand(cmd);
if (result && result.trim()) console.error(result.trim());
await removePoHeaderDate(potFilePath);
}
@@ -346,7 +382,18 @@ function deletedStrings(oldStrings, newStrings) {
async function main() {
const argv = require('yargs').argv;
const potFilePath = `${localesDir}/joplin.pot`;
const missingStringsCheckOnly = !!argv['missing-strings-check-only'];
let potFilePath = `${localesDir}/joplin.pot`;
let tempPotFilePath = '';
if (missingStringsCheckOnly) {
tempPotFilePath = `${localesDir}/joplin-temp-${Math.floor(Math.random() * 10000000)}.pot`;
await fs.copy(potFilePath, tempPotFilePath);
potFilePath = tempPotFilePath;
}
const jsonLocalesDir = `${libDir}/locales`;
const defaultLocale = 'en_GB';
@@ -360,6 +407,8 @@ async function main() {
console.info(`Updated pot file. Total strings: ${oldPotStatus.untranslatedCount} => ${newPotStatus.untranslatedCount}`);
if (tempPotFilePath) await fs.remove(tempPotFilePath);
const deletedCount = oldPotStatus.untranslatedCount - newPotStatus.untranslatedCount;
if (deletedCount >= 5) {
if (argv['skip-missing-strings-check']) {
@@ -374,6 +423,8 @@ async function main() {
}
}
if (missingStringsCheckOnly) return;
await execCommand(`cp "${potFilePath}" ` + `"${localesDir}/${defaultLocale}.po"`);
fs.mkdirpSync(jsonLocalesDir, 0o755);

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More