1
0
mirror of https://github.com/laurent22/joplin.git synced 2026-03-09 09:47:34 +02:00

Compare commits

...

19 Commits

Author SHA1 Message Date
Laurent Cozic
dac3685a56 fix docker-compose 2021-01-18 08:15:14 +00:00
Laurent Cozic
bdf0e50b56 tags 2021-01-18 01:39:33 +00:00
Laurent Cozic
0b14d927ce Server release v1.7.1 2021-01-18 01:38:09 +00:00
Laurent Cozic
767041dddb release script 2021-01-18 01:33:35 +00:00
Laurent Cozic
df65b8e6de Clean up config 2021-01-17 19:39:27 +00:00
Laurent Cozic
58c2bb5916 Detect env 2021-01-17 18:41:17 +00:00
Laurent Cozic
dce2f2955f Merge branch 'docker_server_update' of github.com:laurent22/joplin into docker_server_update 2021-01-17 16:57:17 +00:00
Florian Jensen
fdbd790ce1 Server: fix Docker build (#4381) 2021-01-17 16:51:41 +00:00
Laurent Cozic
d979866f2f Update doc 2021-01-17 12:45:35 +00:00
Laurent Cozic
d3a53abe32 Various tweaks 2021-01-17 12:04:55 +00:00
Laurent Cozic
31ef1d675c update 2021-01-15 23:34:40 +00:00
Laurent Cozic
58b5f4830c Merge branch 'dev' into docker_server_update 2021-01-15 22:30:50 +00:00
Laurent Cozic
eb3493f648 Server: Fixed tests and clean up 2021-01-15 22:02:36 +00:00
Laurent Cozic
7fd4c28a5b Plugin Generator release v1.7.3 2021-01-15 17:04:18 +00:00
Laurent Cozic
d1b55aeceb Generator: Fixes #4360: Scripts were no longer being compiled 2021-01-15 17:03:38 +00:00
Laurent Cozic
92095c5f34 update 2021-01-15 16:51:02 +00:00
Laurent Cozic
413ec1a933 Server: Refactored to use Router class 2021-01-14 22:36:46 +00:00
Laurent Cozic
7ad29577f9 Server: Improved how routes can be defined 2021-01-14 18:27:59 +00:00
Laurent Cozic
7652a5a0a0 Server: Added tests for logout and fixed transaction deadlock 2021-01-14 17:18:27 +00:00
58 changed files with 1083 additions and 878 deletions

View File

@@ -1,9 +1,26 @@
# Example of local config, for development:
# =============================================================================
# PRODUCTION CONFIG EXAMPLE
# -----------------------------------------------------------------------------
# By default it will use SQLite, but that's mostly to test and evaluate the
# server. So you'll want to specify db connection settings to use Postgres.
# =============================================================================
#
# JOPLIN_BASE_URL=http://localhost:22300
# JOPLIN_PORT=22300
# APP_BASE_URL=https://example.com/joplin
# APP_PORT=22300
#
# DB_CLIENT=pg
# POSTGRES_PASSWORD=joplin
# POSTGRES_DATABASE=joplin
# POSTGRES_USER=joplin
# POSTGRES_PORT=5432
# POSTGRES_HOST=localhost
# Example of config for production:
# =============================================================================
# DEV CONFIG EXAMPLE
# -----------------------------------------------------------------------------
# Example of local config, for development. In dev mode, you would usually use
# SQLite so database settings are not needed.
# =============================================================================
#
# JOPLIN_BASE_URL=https://example.com/joplin
# JOPLIN_PORT=22300
# APP_BASE_URL=http://localhost:22300
# APP_PORT=22300

View File

@@ -6,6 +6,7 @@ _releases/
**/node_modules/
Assets/
docs/
packages/plugins/**/dist
packages/server/dist/
highlight.pack.js
Modules/TinyMCE/IconPack/postinstall.js
@@ -1478,30 +1479,6 @@ packages/server/src/config-tests.js.map
packages/server/src/config.d.ts
packages/server/src/config.js
packages/server/src/config.js.map
packages/server/src/controllers/BaseController.d.ts
packages/server/src/controllers/BaseController.js
packages/server/src/controllers/BaseController.js.map
packages/server/src/controllers/api/OAuthController.d.ts
packages/server/src/controllers/api/OAuthController.js
packages/server/src/controllers/api/OAuthController.js.map
packages/server/src/controllers/factory.d.ts
packages/server/src/controllers/factory.js
packages/server/src/controllers/factory.js.map
packages/server/src/controllers/index/FileController.d.ts
packages/server/src/controllers/index/FileController.js
packages/server/src/controllers/index/FileController.js.map
packages/server/src/controllers/index/HomeController.d.ts
packages/server/src/controllers/index/HomeController.js
packages/server/src/controllers/index/HomeController.js.map
packages/server/src/controllers/index/LoginController.d.ts
packages/server/src/controllers/index/LoginController.js
packages/server/src/controllers/index/LoginController.js.map
packages/server/src/controllers/index/NotificationController.d.ts
packages/server/src/controllers/index/NotificationController.js
packages/server/src/controllers/index/NotificationController.js.map
packages/server/src/controllers/index/UserController.d.ts
packages/server/src/controllers/index/UserController.js
packages/server/src/controllers/index/UserController.js.map
packages/server/src/db.d.ts
packages/server/src/db.js
packages/server/src/db.js.map
@@ -1577,9 +1554,6 @@ packages/server/src/routes/api/files.js.map
packages/server/src/routes/api/files.test.d.ts
packages/server/src/routes/api/files.test.js
packages/server/src/routes/api/files.test.js.map
packages/server/src/routes/api/index.d.ts
packages/server/src/routes/api/index.js
packages/server/src/routes/api/index.js.map
packages/server/src/routes/api/ping.d.ts
packages/server/src/routes/api/ping.js
packages/server/src/routes/api/ping.js.map
@@ -1601,6 +1575,9 @@ packages/server/src/routes/index/files.js.map
packages/server/src/routes/index/home.d.ts
packages/server/src/routes/index/home.js
packages/server/src/routes/index/home.js.map
packages/server/src/routes/index/home.test.d.ts
packages/server/src/routes/index/home.test.js
packages/server/src/routes/index/home.test.js.map
packages/server/src/routes/index/login.d.ts
packages/server/src/routes/index/login.js
packages/server/src/routes/index/login.js.map
@@ -1610,9 +1587,15 @@ packages/server/src/routes/index/login.test.js.map
packages/server/src/routes/index/logout.d.ts
packages/server/src/routes/index/logout.js
packages/server/src/routes/index/logout.js.map
packages/server/src/routes/index/logout.test.d.ts
packages/server/src/routes/index/logout.test.js
packages/server/src/routes/index/logout.test.js.map
packages/server/src/routes/index/notifications.d.ts
packages/server/src/routes/index/notifications.js
packages/server/src/routes/index/notifications.js.map
packages/server/src/routes/index/notifications.test.d.ts
packages/server/src/routes/index/notifications.test.js
packages/server/src/routes/index/notifications.test.js.map
packages/server/src/routes/index/users.d.ts
packages/server/src/routes/index/users.js
packages/server/src/routes/index/users.js.map
@@ -1637,6 +1620,9 @@ packages/server/src/tools/dbTools.js.map
packages/server/src/tools/generate-types.d.ts
packages/server/src/tools/generate-types.js
packages/server/src/tools/generate-types.js.map
packages/server/src/utils/Router.d.ts
packages/server/src/utils/Router.js
packages/server/src/utils/Router.js.map
packages/server/src/utils/TransactionHandler.d.ts
packages/server/src/utils/TransactionHandler.js
packages/server/src/utils/TransactionHandler.js.map

39
.gitignore vendored
View File

@@ -1467,30 +1467,6 @@ packages/server/src/config-tests.js.map
packages/server/src/config.d.ts
packages/server/src/config.js
packages/server/src/config.js.map
packages/server/src/controllers/BaseController.d.ts
packages/server/src/controllers/BaseController.js
packages/server/src/controllers/BaseController.js.map
packages/server/src/controllers/api/OAuthController.d.ts
packages/server/src/controllers/api/OAuthController.js
packages/server/src/controllers/api/OAuthController.js.map
packages/server/src/controllers/factory.d.ts
packages/server/src/controllers/factory.js
packages/server/src/controllers/factory.js.map
packages/server/src/controllers/index/FileController.d.ts
packages/server/src/controllers/index/FileController.js
packages/server/src/controllers/index/FileController.js.map
packages/server/src/controllers/index/HomeController.d.ts
packages/server/src/controllers/index/HomeController.js
packages/server/src/controllers/index/HomeController.js.map
packages/server/src/controllers/index/LoginController.d.ts
packages/server/src/controllers/index/LoginController.js
packages/server/src/controllers/index/LoginController.js.map
packages/server/src/controllers/index/NotificationController.d.ts
packages/server/src/controllers/index/NotificationController.js
packages/server/src/controllers/index/NotificationController.js.map
packages/server/src/controllers/index/UserController.d.ts
packages/server/src/controllers/index/UserController.js
packages/server/src/controllers/index/UserController.js.map
packages/server/src/db.d.ts
packages/server/src/db.js
packages/server/src/db.js.map
@@ -1566,9 +1542,6 @@ packages/server/src/routes/api/files.js.map
packages/server/src/routes/api/files.test.d.ts
packages/server/src/routes/api/files.test.js
packages/server/src/routes/api/files.test.js.map
packages/server/src/routes/api/index.d.ts
packages/server/src/routes/api/index.js
packages/server/src/routes/api/index.js.map
packages/server/src/routes/api/ping.d.ts
packages/server/src/routes/api/ping.js
packages/server/src/routes/api/ping.js.map
@@ -1590,6 +1563,9 @@ packages/server/src/routes/index/files.js.map
packages/server/src/routes/index/home.d.ts
packages/server/src/routes/index/home.js
packages/server/src/routes/index/home.js.map
packages/server/src/routes/index/home.test.d.ts
packages/server/src/routes/index/home.test.js
packages/server/src/routes/index/home.test.js.map
packages/server/src/routes/index/login.d.ts
packages/server/src/routes/index/login.js
packages/server/src/routes/index/login.js.map
@@ -1599,9 +1575,15 @@ packages/server/src/routes/index/login.test.js.map
packages/server/src/routes/index/logout.d.ts
packages/server/src/routes/index/logout.js
packages/server/src/routes/index/logout.js.map
packages/server/src/routes/index/logout.test.d.ts
packages/server/src/routes/index/logout.test.js
packages/server/src/routes/index/logout.test.js.map
packages/server/src/routes/index/notifications.d.ts
packages/server/src/routes/index/notifications.js
packages/server/src/routes/index/notifications.js.map
packages/server/src/routes/index/notifications.test.d.ts
packages/server/src/routes/index/notifications.test.js
packages/server/src/routes/index/notifications.test.js.map
packages/server/src/routes/index/users.d.ts
packages/server/src/routes/index/users.js
packages/server/src/routes/index/users.js.map
@@ -1626,6 +1608,9 @@ packages/server/src/tools/dbTools.js.map
packages/server/src/tools/generate-types.d.ts
packages/server/src/tools/generate-types.js
packages/server/src/tools/generate-types.js.map
packages/server/src/utils/Router.d.ts
packages/server/src/utils/Router.js
packages/server/src/utils/Router.js.map
packages/server/src/utils/TransactionHandler.d.ts
packages/server/src/utils/TransactionHandler.js
packages/server/src/utils/TransactionHandler.js.map

View File

@@ -1,3 +0,0 @@
FROM postgres:13.1
EXPOSE 5432

View File

@@ -16,8 +16,15 @@ WORKDIR /home/$user
RUN mkdir /home/$user/logs
# Install the root scripts but don't run postinstall (which would bootstrap
# and build TypeScript files, but we don't have the TypeScript files at
# this point)
COPY --chown=$user:$user package*.json ./
RUN npm install --ignore-scripts
# To take advantage of the Docker cache, we first copy all the package.json
# and package-lock.json files, as they rarely change? and then bootstrap
# and package-lock.json files, as they rarely change, and then bootstrap
# all the packages.
#
# Note that bootstrapping the packages will run all the postinstall
@@ -27,19 +34,10 @@ RUN mkdir /home/$user/logs
# We can't run boostrap with "--ignore-scripts" because that would
# prevent certain sub-packages, such as sqlite3, from being built
COPY --chown=$user:$user package*.json ./
# Install the root scripts but don't run postinstall (which would bootstrap
# and build TypeScript files, but we don't have the TypeScript files at
# this point)
RUN npm install --ignore-scripts
COPY --chown=$user:$user packages/fork-sax/package*.json ./packages/fork-sax/
COPY --chown=$user:$user packages/lib/package*.json ./packages/lib/
COPY --chown=$user:$user packages/renderer/package*.json ./packages/renderer/
COPY --chown=$user:$user packages/tools/package*.json ./packages/tools/
COPY --chown=$user:$user packages/server/package*.json ./packages/server/
COPY --chown=$user:$user packages/lib/package*.json ./packages/lib/
COPY --chown=$user:$user lerna.json .
COPY --chown=$user:$user tsconfig.json .
@@ -50,22 +48,29 @@ COPY --chown=$user:$user packages/turndown ./packages/turndown
COPY --chown=$user:$user packages/turndown-plugin-gfm ./packages/turndown-plugin-gfm
COPY --chown=$user:$user packages/fork-htmlparser2 ./packages/fork-htmlparser2
RUN ls -la /home/$user
# Then bootstrap only, without compiling the TypeScript files
RUN npm run bootstrap
# We have a separate step for the server files because they are more likely to
# change.
COPY --chown=$user:$user packages/server/package*.json ./packages/server/
RUN npm run bootstrapServerOnly
# Now copy the source files. Put lib and server last as they are more likely to change.
COPY --chown=$user:$user packages/fork-sax ./packages/fork-sax
COPY --chown=$user:$user packages/lib ./packages/lib
COPY --chown=$user:$user packages/renderer ./packages/renderer
COPY --chown=$user:$user packages/tools ./packages/tools
COPY --chown=$user:$user packages/lib ./packages/lib
COPY --chown=$user:$user packages/server ./packages/server
# Finally build everything, in particular the TypeScript files.
RUN npm run build
EXPOSE ${JOPLIN_PORT}
ENV RUNNING_IN_DOCKER=1
EXPOSE ${APP_PORT}
CMD [ "npm", "--prefix", "packages/server", "start" ]

15
docker-compose.db-dev.yml Normal file
View File

@@ -0,0 +1,15 @@
# For development this compose file starts the database only. The app can then
# be started using `npm run start-dev`, which is useful for development, because
# it means the app Docker file doesn't have to be rebuilt on each change.
version: '3'
services:
db:
image: postgres:13.1
ports:
- "5432:5432"
environment:
- POSTGRES_PASSWORD=joplin
- POSTGRES_USER=joplin
- POSTGRES_DB=joplin

View File

@@ -1,28 +1,27 @@
# For development, the easiest might be to only start the Postgres container and
# run the app directly with `npm start`. Or use sqlite3.
# This compose file can be used in development to run both the database and app
# within Docker.
version: '3'
services:
# app:
# build:
# context: .
# dockerfile: Dockerfile.server-dev
# ports:
# - "22300:22300"
# # volumes:
# # - ./packages/server/:/var/www/joplin/packages/server/
# # - /var/www/joplin/packages/server/node_modules/
db:
app:
build:
context: .
dockerfile: Dockerfile.db
dockerfile: Dockerfile.server
ports:
- "22300:22300"
environment:
- DB_CLIENT=pg
- POSTGRES_PASSWORD=joplin
- POSTGRES_DATABASE=joplin
- POSTGRES_USER=joplin
- POSTGRES_PORT=5432
- POSTGRES_HOST=localhost
db:
image: postgres:13.1
ports:
- "5432:5432"
environment:
# TODO: Considering the database is only exposed to the
# application, and not to the outside world, is there a need to
# pick a secure password?
- POSTGRES_PASSWORD=joplin
- POSTGRES_USER=joplin
- POSTGRES_DB=joplin
- POSTGRES_DB=joplin

View File

@@ -1,40 +1,34 @@
# This is a sample docker-compose file that can be used to run Joplin Server
# along with a PostgreSQL server.
#
# All environment variables are optional. If you don't set them, you will get a
# warning from docker-compose, however the app should use working defaults.
version: '3'
services:
app:
environment:
- JOPLIN_BASE_URL=${JOPLIN_BASE_URL}
- JOPLIN_PORT=${JOPLIN_PORT}
restart: unless-stopped
build:
context: .
dockerfile: Dockerfile.server
ports:
- "${JOPLIN_PORT}:${JOPLIN_PORT}"
# volumes:
# # Mount the server directory so that it's possible to edit file
# # while the container is running. However don't mount the
# # node_modules directory which will be specific to the Docker
# # image (eg native modules will be built for Ubuntu, while the
# # container might be running in Windows)
# # https://stackoverflow.com/a/37898591/561309
# - ./packages/server:/home/joplin/packages/server
# - /home/joplin/packages/server/node_modules/
db:
restart: unless-stopped
# By default, the Postgres image saves the data to a Docker volume,
# so it persists whenever the server is restarted using
# `docker-compose up`. Note that it would however be deleted when
# running `docker-compose down`.
build:
context: .
dockerfile: Dockerfile.db
image: postgres:13.1
ports:
- "5432:5432"
restart: unless-stopped
environment:
# TODO: Considering the database is only exposed to the
# application, and not to the outside world, is there a need to
# pick a secure password?
- POSTGRES_PASSWORD=joplin
- POSTGRES_USER=joplin
- POSTGRES_DB=joplin
- APP_PORT=22300
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_DB=${POSTGRES_DATABASE}
app:
image: joplin/server:latest
depends_on:
- db
ports:
- "22300:22300"
restart: unless-stopped
environment:
- APP_BASE_URL=${APP_BASE_URL}
- DB_CLIENT=pg
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_DATABASE=${POSTGRES_DATABASE}
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PORT=${POSTGRES_PORT}
- POSTGRES_HOST=db

View File

@@ -8,6 +8,7 @@
"license": "MIT",
"scripts": {
"bootstrap": "lerna bootstrap --no-ci",
"bootstrapServerOnly": "lerna bootstrap --no-ci --include-dependents --include-dependencies --scope @joplin/server",
"bootstrapIgnoreScripts": "lerna bootstrap --ignore-scripts --no-ci",
"build": "lerna run build && npm run tsc",
"buildApiDoc": "npm start --prefix=packages/app-cli -- apidoc ../../readme/api/references/rest_api.md",

View File

@@ -586,4 +586,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: a0d1ca4e385ef46f9103f02206ebf612107dd508
COCOAPODS: 1.9.3
COCOAPODS: 1.10.1

View File

@@ -233,7 +233,7 @@ function main(processArgv) {
const configs = {
// Builds the main src/index.ts and copy the extra content from /src to
// /dist including scripts, CSS and any other asset.
buildMain: pluginConfig,
buildMain: [pluginConfig],
// Builds the extra scripts as defined in plugin.config.json. When doing
// so, some JavaScript files that were copied in the previous might be
@@ -247,7 +247,7 @@ function main(processArgv) {
// run without this. So we give it an entry that we know is going to
// exist and output in the publish dir. Then the plugin will delete this
// temporary file before packaging the plugin.
createArchive: createArchiveConfig,
createArchive: [createArchiveConfig],
};
// If we are running the first config step, we clean up and create the build

View File

@@ -1,6 +1,6 @@
{
"name": "generator-joplin",
"version": "1.7.2",
"version": "1.7.3",
"lockfileVersion": 1,
"requires": true,
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "generator-joplin",
"version": "1.7.2",
"version": "1.7.3",
"description": "Scaffolds out a new Joplin plugin",
"homepage": "https://github.com/laurent22/joplin/tree/dev/packages/generator-joplin",
"author": {

View File

@@ -4,64 +4,86 @@
First copy `.env-sample` to `.env` and edit the values in there:
- `JOPLIN_BASE_URL`: This is the base public URL where the service will be running. For example, if you want it to run from `https://example.com/joplin`, this is what you should set the URL to. The base URL can include the port.
- `JOPLIN_PORT`: The local port on which the Docker container will listen. You would typically map this port to 443 (TLS) with a reverse proxy.
- `APP_BASE_URL`: This is the base public URL where the service will be running. For example, if you want it to run from `https://example.com/joplin`, this is what you should set the URL to. The base URL can include the port.
- `APP_PORT`: The local port on which the Docker container will listen. You would typically map this port to 443 (TLS) with a reverse proxy.
## Install application
## Running the server
To start the server with default configuration, run:
```shell
wget https://github.com/laurent22/joplin/archive/server-v1.6.4.tar.gz
tar xzvf server-v1.6.4.tar.gz
mv joplin-server-v1.6.4 joplin-server
cd joplin-server
docker-compose --file docker-compose.server.yml up --detach
docker run --env-file .env -p 22300:22300 joplin/server:latest
```
This will start the server, which will listen on port **22300** on **localhost**.
This will start the server, which will listen on port **22300** on **localhost**. By default it will use SQLite, which allows you to test the app without setting up a database. To run it for production though, you'll want to connect the container to a database, as described below.
Due to the restart policy defined in the docker-compose file, the server will be restarted automatically whenever the host reboots.
## Setup the database
You can setup the container to either use an existing PostgreSQL server, or connect it to a new one using docker-compose
### Using an existing PostgreSQL server
To use an existing PostgresSQL server, set the following environment variables in the .env file:
```conf
DB_CLIENT=pg
POSTGRES_PASSWORD=joplin
POSTGRES_DATABASE=joplin
POSTGRES_USER=joplin
POSTGRES_PORT=5432
POSTGRES_HOST=localhost
```
Make sure that the provided database and user exist as the server will not create them.
### Using docker-compose
A [sample docker-compose file](https://github.com/laurent22/joplin/blob/dev/docker-compose.server.yml
) is available to show how to use Docker to install both the database and server and connect them:
## Setup reverse proxy
You will then need to expose this server to the internet by setting up a reverse proxy, and that will depend on how your server is currently configured, and whether you already have Nginx or Apache running:
Once Joplin Server is running, you will then need to expose it to the internet by setting up a reverse proxy, and that will depend on how your server is currently configured, and whether you already have Nginx or Apache running:
- [Apache Reverse Proxy](https://httpd.apache.org/docs/current/mod/mod_proxy.html)
- [Nginx Reverse Proxy](https://docs.nginx.com/nginx/admin-guide/web-server/reverse-proxy/)
## Setup admin user
## Setup the website
For the following instructions, we'll assume that the Joplin server is running on `https://example.com/joplin`.
Once the server is exposed to the internet, you can open the admin UI and get it ready for synchronisation. For the following instructions, we'll assume that the Joplin server is running on `https://example.com/joplin`.
By default, the instance will be setup with an admin user with email **admin@localhost** and password **admin** and you should change this by opening the admin UI. To do so, open `https://example.com/joplin/login`. From there, go to Profile and change the admin password.
### Secure the admin user
## Setup a user for sync
By default, the instance will be setup with an admin user with email **admin@localhost** and password **admin** and you should change this. To do so, open `https://example.com/joplin/login` and login as admin. Then go to the Profile section and change the admin password.
While the admin user can be used for synchronisation, it is recommended to create a separate non-admin user for it. To do, open the admin UI and navigate to the Users page - from there you can create a new user.
### Create a user for sync
Once this is done, you can use the email and password you specified to sync this user account with your Joplin clients.
While the admin user can be used for synchronisation, it is recommended to create a separate non-admin user for it. To do so, navigate to the Users page - from there you can create a new user. Once this is done, you can use the email and password you specified to sync this user account with your Joplin clients.
## Checking the logs
Checking the log can be done the standard Docker way:
```shell
```bash
# With Docker:
docker logs --follow CONTAINER
# With docker-compose:
docker-compose --file docker-compose.server.yml logs
```
# Set up for development
# Setup for development
## Setting up the database
## Setup up the database
### SQLite
The server supports SQLite for development and test units. To use it, open `src/config-dev.ts` and uncomment the sqlite3 config.
By default the server supports SQLite for development, so nothing needs to be setup.
### PostgreSQL
It's best to use PostgreSQL as this is what is used in production, however it requires Docker.
To use it, from the monorepo root, run `docker-compose --file docker-compose.server-dev.yml up`, which will start the PostgreSQL database.
To use Postgres, from the monorepo root, run `docker-compose --file docker-compose.server-dev.yml up`, which will start the PostgreSQL database.
## Starting the server
From `packages/server`, run `npm run start-dev`
From `packages/server`, run `npm run start-dev`

View File

@@ -1,6 +1,6 @@
{
"name": "@joplin/server",
"version": "1.7.0",
"version": "1.7.1",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@@ -5938,6 +5938,11 @@
"integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==",
"dev": true
},
"node-env-file": {
"version": "0.1.8",
"resolved": "https://registry.npmjs.org/node-env-file/-/node-env-file-0.1.8.tgz",
"integrity": "sha1-/Mt7BQ9zW1oz2p65N89vGrRX+2k="
},
"node-int64": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz",

View File

@@ -1,6 +1,6 @@
{
"name": "@joplin/server",
"version": "1.7.0",
"version": "1.7.1",
"private": true,
"scripts": {
"start-dev": "nodemon --config nodemon.json dist/app.js --env dev",
@@ -26,6 +26,7 @@
"markdown-it": "^12.0.4",
"mustache": "^3.1.0",
"nanoid": "^2.1.1",
"node-env-file": "^0.1.8",
"nodemon": "^2.0.6",
"pg": "^8.5.1",
"query-string": "^6.8.3",

View File

@@ -5,32 +5,30 @@ import * as Koa from 'koa';
import * as fs from 'fs-extra';
import { argv } from 'yargs';
import Logger, { LoggerWrapper, TargetType } from '@joplin/lib/Logger';
import config, { initConfig, baseUrl } from './config';
import configDev from './config-dev';
import configProd from './config-prod';
import configBuildTypes from './config-buildTypes';
import config, { initConfig, runningInDocker, EnvVariables } from './config';
import { createDb, dropDb } from './tools/dbTools';
import { dropTables, connectDb, disconnectDb, migrateDb, waitForConnection } from './db';
import { dropTables, connectDb, disconnectDb, migrateDb, waitForConnection, sqliteFilePath } from './db';
import modelFactory from './models/factory';
import { AppContext, Config, Env } from './utils/types';
import { AppContext, Env } from './utils/types';
import FsDriverNode from '@joplin/lib/fs-driver-node';
import routeHandler from './middleware/routeHandler';
import notificationHandler from './middleware/notificationHandler';
import ownerHandler from './middleware/ownerHandler';
const nodeEnvFile = require('node-env-file');
const { shimInit } = require('@joplin/lib/shim-init-node.js');
shimInit();
const env: Env = argv.env as Env || Env.Prod;
interface Configs {
[name: string]: Config;
}
const configs: Configs = {
dev: configDev,
prod: configProd,
buildTypes: configBuildTypes,
const envVariables: Record<Env, EnvVariables> = {
dev: {
SQLITE_DATABASE: 'dev',
},
buildTypes: {
SQLITE_DATABASE: 'buildTypes',
},
prod: {}, // Actually get the env variables from the environment
};
let appLogger_: LoggerWrapper = null;
@@ -52,11 +50,31 @@ app.use(ownerHandler);
app.use(notificationHandler);
app.use(routeHandler);
async function main() {
const configObject: Config = configs[env];
if (!configObject) throw new Error(`Invalid env: ${env}`);
function markPasswords(o: Record<string, any>): Record<string, any> {
const output: Record<string, any> = {};
initConfig(configObject);
for (const k of Object.keys(o)) {
if (k.toLowerCase().includes('password')) {
output[k] = '********';
} else {
output[k] = o[k];
}
}
return output;
}
async function main() {
if (argv.envFile) {
nodeEnvFile(argv.envFile);
}
if (!envVariables[env]) throw new Error(`Invalid env: ${env}`);
initConfig({
...envVariables[env],
...process.env,
});
await fs.mkdirp(config().logDir);
Logger.fsDriver_ = new FsDriverNode();
@@ -90,8 +108,11 @@ async function main() {
await createDb(config().database);
} else {
appLogger().info(`Starting server (${env}) on port ${config().port} and PID ${process.pid}...`);
appLogger().info('Public base URL:', baseUrl());
appLogger().info('DB Config:', config().database);
appLogger().info('Running in Docker:', runningInDocker());
appLogger().info('Public base URL:', config().baseUrl);
appLogger().info('Log dir:', config().logDir);
appLogger().info('DB Config:', markPasswords(config().database));
if (config().database.client === 'sqlite3') appLogger().info('DB file:', sqliteFilePath(config().database.name));
const appContext = app.context as AppContext;
@@ -104,13 +125,13 @@ async function main() {
appLogger().info('Connection check:', connectionCheckLogInfo);
appContext.env = env;
appContext.db = connectionCheck.connection;
appContext.models = modelFactory(appContext.db, baseUrl());
appContext.models = modelFactory(appContext.db, config().baseUrl);
appContext.appLogger = appLogger;
appLogger().info('Migrating database...');
await migrateDb(appContext.db);
appLogger().info(`Call this for testing: \`curl ${baseUrl()}/api/ping\``);
appLogger().info(`Call this for testing: \`curl ${config().baseUrl}/api/ping\``);
app.listen(config().port);
}

View File

@@ -1,21 +0,0 @@
import { Config } from './utils/types';
import * as pathUtils from 'path';
const rootDir = pathUtils.dirname(__dirname);
const viewDir = `${pathUtils.dirname(__dirname)}/src/views`;
const envPort = Number(process.env.JOPLIN_PORT);
const config: Config = {
port: (envPort && !isNaN(envPort)) ? envPort : 22300,
viewDir: viewDir,
rootDir: rootDir,
layoutDir: `${viewDir}/layouts`,
logDir: `${rootDir}/logs`,
database: {
client: 'pg',
name: 'joplin',
},
};
export default config;

View File

@@ -1,13 +0,0 @@
import { Config } from './utils/types';
import configBase from './config-base';
const config: Config = {
...configBase,
database: {
name: 'buildTypes',
client: 'sqlite3',
asyncStackTraces: true,
},
};
export default config;

View File

@@ -1,22 +0,0 @@
import { Config } from './utils/types';
import configBase from './config-base';
const config: Config = {
...configBase,
database: {
name: 'dev',
client: 'sqlite3',
asyncStackTraces: true,
},
// database: {
// client: 'pg',
// name: 'joplin',
// user: 'joplin',
// host: 'localhost',
// port: 5432,
// password: 'joplin',
// asyncStackTraces: true,
// },
};
export default config;

View File

@@ -1,20 +0,0 @@
import { Config } from './utils/types';
import configBase from './config-base';
const rootDir = '/home/joplin/';
const config: Config = {
...configBase,
rootDir: rootDir,
logDir: `${rootDir}/logs`,
database: {
client: 'pg',
name: 'joplin',
user: 'joplin',
host: 'db',
port: 5432,
password: 'joplin',
},
};
export default config;

View File

@@ -1,13 +0,0 @@
import { Config } from './utils/types';
import configBase from './config-base';
const config: Config = {
...configBase,
database: {
name: 'DYNAMIC',
client: 'sqlite3',
asyncStackTraces: true,
},
};
export default config;

View File

@@ -1,28 +1,93 @@
import { rtrimSlashes } from '@joplin/lib/path-utils';
import { Config } from './utils/types';
import { Config, DatabaseConfig, DatabaseConfigClient } from './utils/types';
import * as pathUtils from 'path';
let baseConfig_: Config = null;
let baseUrl_: string = null;
export interface EnvVariables {
APP_BASE_URL?: string;
APP_PORT?: string;
DB_CLIENT?: string;
RUNNING_IN_DOCKER?: string;
export function initConfig(baseConfig: Config) {
baseConfig_ = baseConfig;
POSTGRES_PASSWORD?: string;
POSTGRES_DATABASE?: string;
POSTGRES_USER?: string;
POSTGRES_HOST?: string;
POSTGRES_PORT?: string;
SQLITE_DATABASE?: string;
}
let runningInDocker_: boolean = false;
export function runningInDocker(): boolean {
return runningInDocker_;
}
function databaseHostFromEnv(runningInDocker: boolean, env: EnvVariables): string {
if (env.POSTGRES_HOST) {
// When running within Docker, the app localhost is different from the
// host's localhost. To access the latter, Docker defines a special host
// called "host.docker.internal", so here we swap the values if necessary.
if (runningInDocker && ['localhost', '127.0.0.1'].includes(env.POSTGRES_HOST)) {
return 'host.docker.internal';
} else {
return env.POSTGRES_HOST;
}
}
return null;
}
function databaseConfigFromEnv(runningInDocker: boolean, env: EnvVariables): DatabaseConfig {
if (env.DB_CLIENT === 'pg') {
return {
client: DatabaseConfigClient.PostgreSQL,
name: env.POSTGRES_DATABASE || 'joplin',
user: env.POSTGRES_USER || 'joplin',
password: env.POSTGRES_PASSWORD || 'joplin',
port: env.POSTGRES_PORT ? Number(env.POSTGRES_PORT) : 5432,
host: databaseHostFromEnv(runningInDocker, env) || 'localhost',
};
}
return {
client: DatabaseConfigClient.SQLite,
name: env.SQLITE_DATABASE || 'prod',
asyncStackTraces: true,
};
}
function baseUrlFromEnv(env: any, appPort: number): string {
if (env.APP_BASE_URL) {
return rtrimSlashes(env.APP_BASE_URL);
} else {
return `http://localhost:${appPort}`;
}
}
let config_: Config = null;
export function initConfig(env: EnvVariables) {
runningInDocker_ = !!env.RUNNING_IN_DOCKER;
const rootDir = pathUtils.dirname(__dirname);
const viewDir = `${pathUtils.dirname(__dirname)}/src/views`;
const appPort = env.APP_PORT ? Number(env.APP_PORT) : 22300;
config_ = {
rootDir: rootDir,
viewDir: viewDir,
layoutDir: `${viewDir}/layouts`,
logDir: `${rootDir}/logs`,
database: databaseConfigFromEnv(runningInDocker_, env),
port: appPort,
baseUrl: baseUrlFromEnv(env, appPort),
};
}
function config(): Config {
if (!baseConfig_) throw new Error('Config has not been initialized!');
return baseConfig_;
}
export function baseUrl() {
if (baseUrl_) return baseUrl_;
if (process.env.JOPLIN_BASE_URL) {
baseUrl_ = rtrimSlashes(process.env.JOPLIN_BASE_URL);
} else {
baseUrl_ = `http://localhost:${config().port}`;
}
return baseUrl_;
if (!config_) throw new Error('Config has not been initialized!');
return config_;
}
export default config;

View File

@@ -47,15 +47,15 @@ export interface ConnectionCheckResult {
connection: DbConnection;
}
export function sqliteFilePath(dbConfig: DatabaseConfig): string {
return `${sqliteDbDir}/db-${dbConfig.name}.sqlite`;
export function sqliteFilePath(name: string): string {
return `${sqliteDbDir}/db-${name}.sqlite`;
}
export function makeKnexConfig(dbConfig: DatabaseConfig): KnexDatabaseConfig {
const connection: DbConfigConnection = {};
if (dbConfig.client === 'sqlite3') {
connection.filename = sqliteFilePath(dbConfig);
connection.filename = sqliteFilePath(dbConfig.name);
} else {
connection.database = dbConfig.name;
connection.host = dbConfig.host;

View File

@@ -4,47 +4,80 @@ import { defaultAdminEmail, defaultAdminPassword, NotificationLevel } from '../d
import { _ } from '@joplin/lib/locale';
import Logger from '@joplin/lib/Logger';
import * as MarkdownIt from 'markdown-it';
import config from '../config';
const logger = Logger.create('notificationHandler');
async function handleChangeAdminPasswordNotification(ctx: AppContext) {
if (!ctx.owner.is_admin) return;
const defaultAdmin = await ctx.models.user().login(defaultAdminEmail, defaultAdminPassword);
const notificationModel = ctx.models.notification({ userId: ctx.owner.id });
if (defaultAdmin) {
await notificationModel.add(
'change_admin_password',
NotificationLevel.Important,
_('The default admin password is insecure and has not been changed! [Change it now](%s)', await ctx.models.user().profileUrl())
);
} else {
await notificationModel.markAsRead('change_admin_password');
}
if (config().database.client === 'sqlite3' && ctx.env === 'prod') {
await notificationModel.add(
'using_sqlite_in_prod',
NotificationLevel.Important,
'The server is currently using SQLite3 as a database. It is not recommended in production as it is slow and can cause locking issues. Please see the README for information on how to change it.'
);
}
}
async function handleSqliteInProdNotification(ctx: AppContext) {
if (!ctx.owner.is_admin) return;
const notificationModel = ctx.models.notification({ userId: ctx.owner.id });
if (config().database.client === 'sqlite3' && ctx.env === 'prod') {
await notificationModel.add(
'using_sqlite_in_prod',
NotificationLevel.Important,
'The server is currently using SQLite3 as a database. It is not recommended in production as it is slow and can cause locking issues. Please see the README for information on how to change it.'
);
}
}
async function makeNotificationViews(ctx: AppContext): Promise<NotificationView[]> {
const markdownIt = new MarkdownIt();
const notificationModel = ctx.models.notification({ userId: ctx.owner.id });
const notifications = await notificationModel.allUnreadByUserId(ctx.owner.id);
const views: NotificationView[] = [];
for (const n of notifications) {
views.push({
id: n.id,
messageHtml: markdownIt.render(n.message),
level: n.level === NotificationLevel.Important ? 'warning' : 'info',
closeUrl: notificationModel.closeUrl(n.id),
});
}
return views;
}
// The role of this middleware is to inspect the system and to generate
// notifications for any issue it finds. It is only active for logged in users
// on the website. It is inactive for API calls.
export default async function(ctx: AppContext, next: KoaNext): Promise<void> {
ctx.notifications = [];
try {
if (isApiRequest(ctx)) return next();
if (!ctx.owner) return next();
const user = ctx.owner;
if (!user) return next();
const notificationModel = ctx.models.notification({ userId: user.id });
if (user.is_admin) {
const defaultAdmin = await ctx.models.user().login(defaultAdminEmail, defaultAdminPassword);
if (defaultAdmin) {
await notificationModel.add(
'change_admin_password',
NotificationLevel.Important,
_('The default admin password is insecure and has not been changed! [Change it now](%s)', await ctx.models.user().profileUrl())
);
} else {
await notificationModel.markAsRead('change_admin_password');
}
}
const markdownIt = new MarkdownIt();
const notifications = await notificationModel.allUnreadByUserId(user.id);
const views: NotificationView[] = [];
for (const n of notifications) {
views.push({
id: n.id,
messageHtml: markdownIt.render(n.message),
level: n.level === NotificationLevel.Important ? 'warning' : 'info',
closeUrl: notificationModel.closeUrl(n.id),
});
}
ctx.notifications = views;
await handleChangeAdminPasswordNotification(ctx);
await handleSqliteInProdNotification(ctx);
ctx.notifications = await makeNotificationViews(ctx);
} catch (error) {
logger.error(error);
}

View File

@@ -1,7 +1,7 @@
import routes from '../routes/routes';
import { ErrorNotFound } from '../utils/errors';
import { ErrorForbidden, ErrorNotFound } from '../utils/errors';
import { routeResponseFormat, findMatchingRoute, Response, RouteResponseFormat, MatchedRoute } from '../utils/routeUtils';
import { AppContext, Env } from '../utils/types';
import { AppContext, Env, HttpMethod } from '../utils/types';
import mustacheService, { isView, View } from '../services/MustacheService';
export default async function(ctx: AppContext) {
@@ -13,7 +13,12 @@ export default async function(ctx: AppContext) {
const match = findMatchingRoute(ctx.path, routes);
if (match) {
const responseObject = await match.route.exec(match.subPath, ctx);
let responseObject = null;
const routeHandler = match.route.findEndPoint(ctx.request.method as HttpMethod, match.subPath.schema);
responseObject = await routeHandler(match.subPath, ctx);
if (!match.route.public && !ctx.owner) throw new ErrorForbidden();
if (responseObject instanceof Response) {
ctx.response = responseObject.response;
@@ -21,6 +26,7 @@ export default async function(ctx: AppContext) {
ctx.response.status = 200;
ctx.response.body = await mustacheService.renderView(responseObject, {
notifications: ctx.notifications || [],
hasNotifications: !!ctx.notifications && !!ctx.notifications.length,
owner: ctx.owner,
});
} else {

View File

@@ -102,16 +102,57 @@ export default abstract class BaseModel {
return false;
}
protected async withTransaction(fn: Function): Promise<void> {
// When using withTransaction, make sure any database call uses an instance
// of `this.db()` that was accessed within the `fn` callback, otherwise the
// transaction will be stuck!
//
// This for example, would result in a stuck transaction:
//
// const query = this.db(this.tableName).where('id', '=', id);
//
// this.withTransaction(async () => {
// await query.delete();
// });
//
// This is because withTransaction is going to swap the value of "this.db()"
// for as long as the transaction is active. So if the query is started
// outside the transaction, it will use the regular db connection and wait
// for the newly created transaction to finish, which will never happen.
//
// This is a bit of a leaky abstraction, which ideally should be improved
// but for now one just has to be aware of the caveat.
//
// The `name` argument is only for debugging, so that any stuck transaction
// can be more easily identified.
protected async withTransaction(fn: Function, name: string = null): Promise<void> {
const debugTransaction = false;
const debugTimerId = debugTransaction ? setTimeout(() => {
console.info('Transaction did not complete:', name, txIndex);
}, 5000) : null;
const txIndex = await this.transactionHandler_.start();
if (debugTransaction) console.info('START', name, txIndex);
try {
await fn();
} catch (error) {
await this.transactionHandler_.rollback(txIndex);
if (debugTransaction) {
console.info('ROLLBACK', name, txIndex);
clearTimeout(debugTimerId);
}
throw error;
}
if (debugTransaction) {
console.info('COMMIT', name, txIndex);
clearTimeout(debugTimerId);
}
await this.transactionHandler_.commit(txIndex);
}
@@ -197,7 +238,7 @@ export default abstract class BaseModel {
// Sanity check:
if (updatedCount !== 1) throw new ErrorBadRequest(`one row should have been updated, but ${updatedCount} row(s) were updated`);
}
});
}, 'BaseModel::save');
return toSave;
}
@@ -220,11 +261,6 @@ export default abstract class BaseModel {
if (!ids.length) throw new Error('no id provided');
const query = this.db(this.tableName).where({ id: ids[0] });
for (let i = 1; i < ids.length; i++) {
await query.orWhere({ id: ids[i] });
}
const trackChanges = this.trackChanges;
let itemsWithParentIds: AnyItemType[] = null;
@@ -233,13 +269,18 @@ export default abstract class BaseModel {
}
await this.withTransaction(async () => {
const query = this.db(this.tableName).where({ id: ids[0] });
for (let i = 1; i < ids.length; i++) {
await query.orWhere({ id: ids[i] });
}
const deletedCount = await query.del();
if (deletedCount !== ids.length) throw new Error(`${ids.length} row(s) should have been deleted by ${deletedCount} row(s) were deleted`);
if (trackChanges) {
for (const item of itemsWithParentIds) await this.handleChangeTracking({}, item, ChangeType.Delete);
}
});
}, 'BaseModel::delete');
}
}

View File

@@ -87,7 +87,7 @@ export default class FileModel extends BaseModel {
output[item.id] = segments.length ? (`root:/${segments.join('/')}:`) : 'root';
}
});
}, 'FileModel::itemFullPaths');
return output;
}
@@ -404,7 +404,7 @@ export default class FileModel extends BaseModel {
for (const childId of childrenIds) {
await this.delete(childId);
}
});
}, 'FileModel::deleteChildren');
}
public async delete(id: string, options: DeleteOptions = {}): Promise<void> {
@@ -427,7 +427,7 @@ export default class FileModel extends BaseModel {
}
await super.delete(id);
});
}, 'FileModel::delete');
}
}

View File

@@ -29,4 +29,9 @@ export default class SessionModel extends BaseModel {
return this.createUserSession(user.id);
}
public async logout(sessionId: string) {
if (!sessionId) return;
await this.delete(sessionId);
}
}

View File

@@ -2,10 +2,10 @@ import { createUserAndSession, beforeAllDb, afterAllTests, beforeEachDb, models,
import { File } from '../db';
import { ErrorForbidden, ErrorUnprocessableEntity } from '../utils/errors';
describe('NotificationModel', function() {
describe('UserModel', function() {
beforeAll(async () => {
await beforeAllDb('NotificationModel');
await beforeAllDb('UserModel');
});
afterAll(async () => {

View File

@@ -97,7 +97,7 @@ export default class UserModel extends BaseModel {
const rootFile = await fileModel.userRootFile();
await fileModel.delete(rootFile.id, { validationRules: { canDeleteRoot: true } });
await super.delete(id);
});
}, 'UserModel::delete');
}
public async save(object: User, options: SaveOptions = {}): Promise<User> {
@@ -114,7 +114,7 @@ export default class UserModel extends BaseModel {
const fileModel = this.models().file({ userId: newUser.id });
await fileModel.createRootFile();
}
});
}, 'UserModel::save');
return newUser;
}

View File

@@ -1,133 +1,98 @@
import { ErrorNotFound, ErrorMethodNotAllowed, ErrorBadRequest } from '../../utils/errors';
import { ErrorNotFound, ErrorBadRequest } from '../../utils/errors';
import { File } from '../../db';
import { bodyFields, formParse } from '../../utils/requestUtils';
import { SubPath, Route, respondWithFileContent } from '../../utils/routeUtils';
import { SubPath, respondWithFileContent } from '../../utils/routeUtils';
import Router from '../../utils/Router';
import { AppContext } from '../../utils/types';
import * as fs from 'fs-extra';
import { requestChangePagination, requestPagination } from '../../models/utils/pagination';
const route: Route = {
const router = new Router();
exec: async function(path: SubPath, ctx: AppContext) {
// console.info(`${ctx.method} ${path.id}${path.link ? `/${path.link}` : ''}`);
router.get('api/files/:id', async (path: SubPath, ctx: AppContext) => {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const fileId = path.id;
const file: File = await fileModel.entityFromItemId(fileId);
const loadedFile = await fileModel.load(file.id);
if (!loadedFile) throw new ErrorNotFound();
return fileModel.toApiOutput(loadedFile);
});
// -------------------------------------------
// ROUTE api/files/:id
// -------------------------------------------
router.patch('api/files/:id', async (path: SubPath, ctx: AppContext) => {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const fileId = path.id;
const inputFile: File = await bodyFields(ctx.req);
const existingFile: File = await fileModel.entityFromItemId(fileId);
const newFile = fileModel.fromApiInput(inputFile);
newFile.id = existingFile.id;
return fileModel.toApiOutput(await fileModel.save(newFile));
});
if (!path.link) {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const fileId = path.id;
if (ctx.method === 'GET') {
const file: File = await fileModel.entityFromItemId(fileId);
const loadedFile = await fileModel.load(file.id);
if (!loadedFile) throw new ErrorNotFound();
return fileModel.toApiOutput(loadedFile);
}
if (ctx.method === 'PATCH') {
const inputFile: File = await bodyFields(ctx.req);
const existingFile: File = await fileModel.entityFromItemId(fileId);
const newFile = fileModel.fromApiInput(inputFile);
newFile.id = existingFile.id;
return fileModel.toApiOutput(await fileModel.save(newFile));
}
if (ctx.method === 'DELETE') {
try {
const file: File = await fileModel.entityFromItemId(fileId, { mustExist: false });
if (!file.id) return;
await fileModel.delete(file.id);
} catch (error) {
if (error instanceof ErrorNotFound) {
// That's ok - a no-op
} else {
throw error;
}
}
return;
}
throw new ErrorMethodNotAllowed();
router.del('api/files/:id', async (path: SubPath, ctx: AppContext) => {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const fileId = path.id;
try {
const file: File = await fileModel.entityFromItemId(fileId, { mustExist: false });
if (!file.id) return;
await fileModel.delete(file.id);
} catch (error) {
if (error instanceof ErrorNotFound) {
// That's ok - a no-op
} else {
throw error;
}
}
});
// -------------------------------------------
// ROUTE api/files/:id/content
// -------------------------------------------
router.get('api/files/:id/content', async (path: SubPath, ctx: AppContext) => {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const fileId = path.id;
let file: File = await fileModel.entityFromItemId(fileId);
file = await fileModel.loadWithContent(file.id);
if (!file) throw new ErrorNotFound();
return respondWithFileContent(ctx.response, file);
});
if (path.link === 'content') {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const fileId = path.id;
router.put('api/files/:id/content', async (path: SubPath, ctx: AppContext) => {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const fileId = path.id;
const result = await formParse(ctx.req);
if (!result?.files?.file) throw new ErrorBadRequest('File data is missing');
const buffer = await fs.readFile(result.files.file.path);
if (ctx.method === 'GET') {
let file: File = await fileModel.entityFromItemId(fileId);
file = await fileModel.loadWithContent(file.id);
if (!file) throw new ErrorNotFound();
return respondWithFileContent(ctx.response, file);
}
const file: File = await fileModel.entityFromItemId(fileId, { mustExist: false });
file.content = buffer;
return fileModel.toApiOutput(await fileModel.save(file, { validationRules: { mustBeFile: true } }));
});
if (ctx.method === 'PUT') {
const result = await formParse(ctx.req);
if (!result?.files?.file) throw new ErrorBadRequest('File data is missing');
const buffer = await fs.readFile(result.files.file.path);
router.del('api/files/:id/content', async (path: SubPath, ctx: AppContext) => {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const fileId = path.id;
const file: File = await fileModel.entityFromItemId(fileId, { mustExist: false });
if (!file) return;
file.content = Buffer.alloc(0);
await fileModel.save(file, { validationRules: { mustBeFile: true } });
});
const file: File = await fileModel.entityFromItemId(fileId, { mustExist: false });
file.content = buffer;
return fileModel.toApiOutput(await fileModel.save(file, { validationRules: { mustBeFile: true } }));
}
router.get('api/files/:id/delta', async (path: SubPath, ctx: AppContext) => {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const dir: File = await fileModel.entityFromItemId(path.id, { mustExist: true });
const changeModel = ctx.models.change({ userId: ctx.owner.id });
return changeModel.byDirectoryId(dir.id, requestChangePagination(ctx.query));
});
if (ctx.method === 'DELETE') {
const file: File = await fileModel.entityFromItemId(fileId, { mustExist: false });
if (!file) return;
file.content = Buffer.alloc(0);
await fileModel.save(file, { validationRules: { mustBeFile: true } });
return;
}
router.get('api/files/:id/children', async (path: SubPath, ctx: AppContext) => {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const parent: File = await fileModel.entityFromItemId(path.id);
return fileModel.toApiOutput(await fileModel.childrens(parent.id, requestPagination(ctx.query)));
});
throw new ErrorMethodNotAllowed();
}
router.post('api/files/:id/children', async (path: SubPath, ctx: AppContext) => {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const child: File = fileModel.fromApiInput(await bodyFields(ctx.req));
const parent: File = await fileModel.entityFromItemId(path.id);
child.parent_id = parent.id;
return fileModel.toApiOutput(await fileModel.save(child));
});
// -------------------------------------------
// ROUTE api/files/:id/delta
// -------------------------------------------
if (path.link === 'delta') {
if (ctx.method === 'GET') {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const dir: File = await fileModel.entityFromItemId(path.id, { mustExist: true });
const changeModel = ctx.models.change({ userId: ctx.owner.id });
return changeModel.byDirectoryId(dir.id, requestChangePagination(ctx.query));
}
throw new ErrorMethodNotAllowed();
}
// -------------------------------------------
// ROUTE api/files/:id/children
// -------------------------------------------
if (path.link === 'children') {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
if (ctx.method === 'GET') {
const parent: File = await fileModel.entityFromItemId(path.id);
return fileModel.toApiOutput(await fileModel.childrens(parent.id, requestPagination(ctx.query)));
}
if (ctx.method === 'POST') {
const child: File = fileModel.fromApiInput(await bodyFields(ctx.req));
const parent: File = await fileModel.entityFromItemId(path.id);
child.parent_id = parent.id;
return fileModel.toApiOutput(await fileModel.save(child));
}
throw new ErrorMethodNotAllowed();
}
throw new ErrorNotFound(`Invalid link: ${path.link}`);
},
};
export default route;
export default router;

View File

@@ -1,11 +0,0 @@
import { Route } from '../../utils/routeUtils';
const route: Route = {
exec: async function() {
return { status: 'ok', message: 'Joplin Server is running' };
},
};
export default route;

View File

@@ -1,11 +1,11 @@
import { Route } from '../../utils/routeUtils';
import Router from '../../utils/Router';
const route: Route = {
const router = new Router();
exec: async function() {
return { status: 'ok', message: 'Joplin Server is running' };
},
router.public = true;
};
router.get('api/ping', async () => {
return { status: 'ok', message: 'Joplin Server is running' };
});
export default route;
export default router;

View File

@@ -1,33 +1,21 @@
import { SubPath, Route } from '../../utils/routeUtils';
import { ErrorForbidden, ErrorMethodNotAllowed, ErrorNotFound } from '../../utils/errors';
import { SubPath } from '../../utils/routeUtils';
import Router from '../../utils/Router';
import { ErrorForbidden } from '../../utils/errors';
import { AppContext } from '../../utils/types';
import { bodyFields } from '../../utils/requestUtils';
import { User } from '../../db';
const route: Route = {
const router = new Router();
exec: async function(path: SubPath, ctx: AppContext) {
router.public = true;
// -------------------------------------------
// ROUTE api/sessions
// -------------------------------------------
router.post('api/sessions', async (_path: SubPath, ctx: AppContext) => {
const fields: User = await bodyFields(ctx.req);
const user = await ctx.models.user().login(fields.email, fields.password);
if (!user) throw new ErrorForbidden('Invalid username or password');
if (!path.link) {
if (ctx.method === 'POST') {
const fields: User = await bodyFields(ctx.req);
const user = await ctx.models.user().login(fields.email, fields.password);
if (!user) throw new ErrorForbidden('Invalid username or password');
const session = await ctx.models.session().createUserSession(user.id);
return { id: session.id };
});
const session = await ctx.models.session().createUserSession(user.id);
return { id: session.id };
}
throw new ErrorMethodNotAllowed();
}
throw new ErrorNotFound(`Invalid link: ${path.link}`);
},
};
export default route;
export default router;

View File

@@ -1,5 +1,6 @@
import * as Koa from 'koa';
import { SubPath, Route, Response, ResponseType } from '../utils/routeUtils';
import { SubPath, Response, ResponseType } from '../utils/routeUtils';
import Router from '../utils/Router';
import { ErrorNotFound, ErrorForbidden } from '../utils/errors';
import { dirname, normalize } from 'path';
import { pathExists } from 'fs-extra';
@@ -36,28 +37,25 @@ async function findLocalFile(path: string): Promise<string> {
return localPath;
}
const route: Route = {
const router = new Router();
exec: async function(path: SubPath, ctx: Koa.Context) {
router.public = true;
if (ctx.method === 'GET') {
const localPath = await findLocalFile(path.raw);
// Used to serve static files, so it needs to be public because for example the
// login page, which is public, needs access to the CSS files.
router.get('', async (path: SubPath, ctx: Koa.Context) => {
const localPath = await findLocalFile(path.raw);
let mimeType: string = mime.fromFilename(localPath);
if (!mimeType) mimeType = 'application/octet-stream';
let mimeType: string = mime.fromFilename(localPath);
if (!mimeType) mimeType = 'application/octet-stream';
const fileContent: Buffer = await fs.readFile(localPath);
const fileContent: Buffer = await fs.readFile(localPath);
const koaResponse = ctx.response;
koaResponse.body = fileContent;
koaResponse.set('Content-Type', mimeType);
koaResponse.set('Content-Length', fileContent.length.toString());
return new Response(ResponseType.KoaResponse, koaResponse);
}
const koaResponse = ctx.response;
koaResponse.body = fileContent;
koaResponse.set('Content-Type', mimeType);
koaResponse.set('Content-Length', fileContent.length.toString());
return new Response(ResponseType.KoaResponse, koaResponse);
});
throw new ErrorNotFound();
},
};
export default route;
export default router;

View File

@@ -1,11 +1,12 @@
import { SubPath, Route, respondWithFileContent, redirect } from '../../utils/routeUtils';
import { AppContext } from '../../utils/types';
import { SubPath, respondWithFileContent, redirect } from '../../utils/routeUtils';
import Router from '../../utils/Router';
import { AppContext, HttpMethod } from '../../utils/types';
import { contextSessionId, formParse } from '../../utils/requestUtils';
import { ErrorMethodNotAllowed, ErrorNotFound } from '../../utils/errors';
import { ErrorNotFound } from '../../utils/errors';
import { File } from '../../db';
import { createPaginationLinks, pageMaxSize, Pagination, PaginationOrder, PaginationOrderDir, requestPaginationOrder, validatePagination } from '../../models/utils/pagination';
import { setQueryParameters } from '../../utils/urlUtils';
import { baseUrl } from '../../config';
import config from '../../config';
import { formatDateTime } from '../../utils/time';
import defaultView from '../../utils/defaultView';
import { View } from '../../services/MustacheService';
@@ -21,129 +22,104 @@ function makeFilePagination(query: any): Pagination {
return output;
}
const endPoints = {
const router = new Router();
'GET': {
'files/:id': async function(path: SubPath, ctx: AppContext) {
const dirId = path.id;
const query = ctx.query;
router.alias(HttpMethod.GET, 'files', 'files/:id');
// Query parameters that should be appended to pagination-related URLs
const baseUrlQuery: any = {};
if (query.limit) baseUrlQuery.limit = query.limit;
if (query.order_by) baseUrlQuery.order_by = query.order_by;
if (query.order_dir) baseUrlQuery.order_dir = query.order_dir;
router.get('files/:id', async (path: SubPath, ctx: AppContext) => {
const dirId = path.id;
const query = ctx.query;
const pagination = makeFilePagination(query);
const owner = ctx.owner;
const fileModel = ctx.models.file({ userId: owner.id });
const root = await fileModel.userRootFile();
const parentTemp: File = dirId ? await fileModel.entityFromItemId(dirId) : root;
const parent: File = await fileModel.load(parentTemp.id);
const paginatedFiles = await fileModel.childrens(parent.id, pagination);
const pageCount = Math.ceil((await fileModel.childrenCount(parent.id)) / pagination.limit);
// Query parameters that should be appended to pagination-related URLs
const baseUrlQuery: any = {};
if (query.limit) baseUrlQuery.limit = query.limit;
if (query.order_by) baseUrlQuery.order_by = query.order_by;
if (query.order_dir) baseUrlQuery.order_dir = query.order_dir;
const parentBaseUrl = await fileModel.fileUrl(parent.id);
const paginationLinks = createPaginationLinks(pagination.page, pageCount, setQueryParameters(parentBaseUrl, { ...baseUrlQuery, 'page': 'PAGE_NUMBER' }));
const pagination = makeFilePagination(query);
const owner = ctx.owner;
const fileModel = ctx.models.file({ userId: owner.id });
const root = await fileModel.userRootFile();
const parentTemp: File = dirId ? await fileModel.entityFromItemId(dirId) : root;
const parent: File = await fileModel.load(parentTemp.id);
const paginatedFiles = await fileModel.childrens(parent.id, pagination);
const pageCount = Math.ceil((await fileModel.childrenCount(parent.id)) / pagination.limit);
async function fileToViewItem(file: File, fileFullPaths: Record<string, string>): Promise<any> {
const filePath = fileFullPaths[file.id];
const parentBaseUrl = await fileModel.fileUrl(parent.id);
const paginationLinks = createPaginationLinks(pagination.page, pageCount, setQueryParameters(parentBaseUrl, { ...baseUrlQuery, 'page': 'PAGE_NUMBER' }));
let url = `${baseUrl()}/files/${filePath}`;
if (!file.is_directory) {
url += '/content';
} else {
url = setQueryParameters(url, baseUrlQuery);
}
async function fileToViewItem(file: File, fileFullPaths: Record<string, string>): Promise<any> {
const filePath = fileFullPaths[file.id];
return {
name: file.name,
url,
type: file.is_directory ? 'directory' : 'file',
icon: file.is_directory ? 'far fa-folder' : 'far fa-file',
timestamp: formatDateTime(file.updated_time),
mime: !file.is_directory ? (file.mime_type || 'binary') : '',
};
}
const files: any[] = [];
const fileFullPaths = await fileModel.itemFullPaths(paginatedFiles.items);
if (parent.id !== root.id) {
const p = await fileModel.load(parent.parent_id);
files.push({
...await fileToViewItem(p, await fileModel.itemFullPaths([p])),
icon: 'fas fa-arrow-left',
name: '..',
});
}
for (const file of paginatedFiles.items) {
files.push(await fileToViewItem(file, fileFullPaths));
}
const view: View = defaultView('files');
view.content.paginatedFiles = { ...paginatedFiles, items: files };
view.content.paginationLinks = paginationLinks;
view.content.postUrl = `${baseUrl()}/files`;
view.content.parentId = parent.id;
view.cssFiles = ['index/files'];
view.partials.push('pagination');
return view;
},
'files/:id/content': async function(path: SubPath, ctx: AppContext) {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
let file: File = await fileModel.entityFromItemId(path.id);
file = await fileModel.loadWithContent(file.id);
if (!file) throw new ErrorNotFound();
return respondWithFileContent(ctx.response, file);
},
},
'POST': {
'files': async function(_path: SubPath, ctx: AppContext) {
const sessionId = contextSessionId(ctx);
const body = await formParse(ctx.req);
const fields = body.fields;
const parentId = fields.parent_id;
const user = await ctx.models.session().sessionUser(sessionId);
if (fields.delete_all_button) {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const parent: File = await fileModel.entityFromItemId(parentId, { returnFullEntity: true });
await fileModel.deleteChildren(parent.id);
} else {
throw new Error('Invalid form button');
}
return redirect(ctx, await ctx.models.file({ userId: user.id }).fileUrl(parentId, ctx.query));
},
},
};
const route: Route = {
exec: async function(path: SubPath, ctx: AppContext) {
if (ctx.method === 'GET') {
if (!path.link) {
return endPoints.GET['files/:id'](path, ctx);
} else if (path.link === 'content') {
return endPoints.GET['files/:id/content'](path, ctx);
}
throw new ErrorNotFound();
let url = `${config().baseUrl}/files/${filePath}`;
if (!file.is_directory) {
url += '/content';
} else {
url = setQueryParameters(url, baseUrlQuery);
}
if (ctx.method === 'POST') {
return endPoints.POST['files'](path, ctx);
}
return {
name: file.name,
url,
type: file.is_directory ? 'directory' : 'file',
icon: file.is_directory ? 'far fa-folder' : 'far fa-file',
timestamp: formatDateTime(file.updated_time),
mime: !file.is_directory ? (file.mime_type || 'binary') : '',
};
}
throw new ErrorMethodNotAllowed();
},
const files: any[] = [];
};
const fileFullPaths = await fileModel.itemFullPaths(paginatedFiles.items);
export default route;
if (parent.id !== root.id) {
const p = await fileModel.load(parent.parent_id);
files.push({
...await fileToViewItem(p, await fileModel.itemFullPaths([p])),
icon: 'fas fa-arrow-left',
name: '..',
});
}
for (const file of paginatedFiles.items) {
files.push(await fileToViewItem(file, fileFullPaths));
}
const view: View = defaultView('files');
view.content.paginatedFiles = { ...paginatedFiles, items: files };
view.content.paginationLinks = paginationLinks;
view.content.postUrl = `${config().baseUrl}/files`;
view.content.parentId = parent.id;
view.cssFiles = ['index/files'];
view.partials.push('pagination');
return view;
});
router.get('files/:id/content', async (path: SubPath, ctx: AppContext) => {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
let file: File = await fileModel.entityFromItemId(path.id);
file = await fileModel.loadWithContent(file.id);
if (!file) throw new ErrorNotFound();
return respondWithFileContent(ctx.response, file);
});
router.post('files', async (_path: SubPath, ctx: AppContext) => {
const sessionId = contextSessionId(ctx);
const body = await formParse(ctx.req);
const fields = body.fields;
const parentId = fields.parent_id;
const user = await ctx.models.session().sessionUser(sessionId);
if (fields.delete_all_button) {
const fileModel = ctx.models.file({ userId: ctx.owner.id });
const parent: File = await fileModel.entityFromItemId(parentId, { returnFullEntity: true });
await fileModel.deleteChildren(parent.id);
} else {
throw new Error('Invalid form button');
}
return redirect(ctx, await ctx.models.file({ userId: user.id }).fileUrl(parentId, ctx.query));
});
export default router;

View File

@@ -0,0 +1,34 @@
import routeHandler from '../../middleware/routeHandler';
import { beforeAllDb, afterAllTests, beforeEachDb, koaAppContext, createUserAndSession } from '../../utils/testing/testUtils';
describe('index_home', function() {
beforeAll(async () => {
await beforeAllDb('index_home');
});
afterAll(async () => {
await afterAllTests();
});
beforeEach(async () => {
await beforeEachDb();
});
test('should show the home page', async function() {
const { user, session } = await createUserAndSession();
const context = await koaAppContext({
sessionId: session.id,
request: {
method: 'GET',
url: '/home',
},
});
await routeHandler(context);
expect(context.response.body.indexOf(user.email) >= 0).toBe(true);
});
});

View File

@@ -1,21 +1,20 @@
import { SubPath, Route } from '../../utils/routeUtils';
import { SubPath } from '../../utils/routeUtils';
import Router from '../../utils/Router';
import { AppContext } from '../../utils/types';
import { contextSessionId } from '../../utils/requestUtils';
import { ErrorMethodNotAllowed } from '../../utils/errors';
import defaultView from '../../utils/defaultView';
const route: Route = {
const router: Router = new Router();
exec: async function(_path: SubPath, ctx: AppContext) {
contextSessionId(ctx);
router.get('home', async (_path: SubPath, ctx: AppContext) => {
contextSessionId(ctx);
if (ctx.method === 'GET') {
return defaultView('home');
}
if (ctx.method === 'GET') {
return defaultView('home');
}
throw new ErrorMethodNotAllowed();
},
throw new ErrorMethodNotAllowed();
});
};
export default route;
export default router;

View File

@@ -1,8 +1,8 @@
import { SubPath, Route, redirect } from '../../utils/routeUtils';
import { ErrorMethodNotAllowed } from '../../utils/errors';
import { SubPath, redirect } from '../../utils/routeUtils';
import Router from '../../utils/Router';
import { AppContext } from '../../utils/types';
import { formParse } from '../../utils/requestUtils';
import { baseUrl } from '../../config';
import config from '../../config';
import defaultView from '../../utils/defaultView';
import { View } from '../../services/MustacheService';
@@ -13,28 +13,24 @@ function makeView(error: any = null): View {
return view;
}
const route: Route = {
const router: Router = new Router();
exec: async function(_path: SubPath, ctx: AppContext) {
if (ctx.method === 'GET') {
return makeView();
}
router.public = true;
if (ctx.method === 'POST') {
try {
const body = await formParse(ctx.req);
router.get('login', async (_path: SubPath, _ctx: AppContext) => {
return makeView();
});
const session = await ctx.models.session().authenticate(body.fields.email, body.fields.password);
ctx.cookies.set('sessionId', session.id);
return redirect(ctx, `${baseUrl()}/home`);
} catch (error) {
return makeView(error);
}
}
router.post('login', async (_path: SubPath, ctx: AppContext) => {
try {
const body = await formParse(ctx.req);
throw new ErrorMethodNotAllowed();
},
const session = await ctx.models.session().authenticate(body.fields.email, body.fields.password);
ctx.cookies.set('sessionId', session.id);
return redirect(ctx, `${config().baseUrl}/home`);
} catch (error) {
return makeView(error);
}
});
};
export default route;
export default router;

View File

@@ -0,0 +1,37 @@
import routeHandler from '../../middleware/routeHandler';
import { beforeAllDb, afterAllTests, beforeEachDb, koaAppContext, models, createUserAndSession } from '../../utils/testing/testUtils';
describe('index_logout', function() {
beforeAll(async () => {
await beforeAllDb('index_logout');
});
afterAll(async () => {
await afterAllTests();
});
beforeEach(async () => {
await beforeEachDb();
});
test('should logout', async function() {
const { session } = await createUserAndSession();
const context = await koaAppContext({
sessionId: session.id,
request: {
method: 'POST',
url: '/logout',
},
});
expect(context.cookies.get('sessionId')).toBe(session.id);
expect(!!(await models().session().load(session.id))).toBe(true);
await routeHandler(context);
expect(!context.cookies.get('sessionId')).toBe(true);
expect(!!(await models().session().load(session.id))).toBe(false);
});
});

View File

@@ -1,20 +1,16 @@
import { SubPath, Route, redirect } from '../../utils/routeUtils';
import { ErrorMethodNotAllowed } from '../../utils/errors';
import { SubPath, redirect } from '../../utils/routeUtils';
import Router from '../../utils/Router';
import { AppContext } from '../../utils/types';
import { baseUrl } from '../../config';
import config from '../../config';
import { contextSessionId } from '../../utils/requestUtils';
const route: Route = {
const router = new Router();
exec: async function(_path: SubPath, ctx: AppContext) {
if (ctx.method === 'POST') {
// TODO: also delete the session from the database
ctx.cookies.set('sessionId', '');
return redirect(ctx, `${baseUrl()}/login`);
}
router.post('logout', async (_path: SubPath, ctx: AppContext) => {
const sessionId = contextSessionId(ctx, false);
ctx.cookies.set('sessionId', '');
await ctx.models.session().logout(sessionId);
return redirect(ctx, `${config().baseUrl}/login`);
});
throw new ErrorMethodNotAllowed();
},
};
export default route;
export default router;

View File

@@ -0,0 +1,46 @@
import { NotificationLevel } from '../../db';
import routeHandler from '../../middleware/routeHandler';
import { beforeAllDb, afterAllTests, beforeEachDb, koaAppContext, models, createUserAndSession } from '../../utils/testing/testUtils';
describe('index_notification', function() {
beforeAll(async () => {
await beforeAllDb('index_notification');
});
afterAll(async () => {
await afterAllTests();
});
beforeEach(async () => {
await beforeEachDb();
});
test('should update notification', async function() {
const { user, session } = await createUserAndSession();
const model = models().notification({ userId: user.id });
await model.add('my_notification', NotificationLevel.Normal, 'testing notification');
const notification = await model.loadByKey('my_notification');
expect(notification.read).toBe(0);
const context = await koaAppContext({
sessionId: session.id,
request: {
method: 'PATCH',
url: `/notifications/${notification.id}`,
body: {
read: 1,
},
},
});
await routeHandler(context);
expect((await model.loadByKey('my_notification')).read).toBe(1);
});
});

View File

@@ -1,33 +1,25 @@
import { SubPath, Route } from '../../utils/routeUtils';
import { SubPath } from '../../utils/routeUtils';
import Router from '../../utils/Router';
import { AppContext } from '../../utils/types';
import { bodyFields, contextSessionId } from '../../utils/requestUtils';
import { ErrorMethodNotAllowed, ErrorNotFound } from '../../utils/errors';
import { bodyFields } from '../../utils/requestUtils';
import { ErrorNotFound } from '../../utils/errors';
import { Notification } from '../../db';
const route: Route = {
const router = new Router();
exec: async function(path: SubPath, ctx: AppContext) {
contextSessionId(ctx);
router.patch('notifications/:id', async (path: SubPath, ctx: AppContext) => {
const fields: Notification = await bodyFields(ctx.req);
const notificationId = path.id;
const model = ctx.models.notification({ userId: ctx.owner.id });
const existingNotification = await model.load(notificationId);
if (!existingNotification) throw new ErrorNotFound();
if (path.id && ctx.method === 'PATCH') {
const fields: Notification = await bodyFields(ctx.req);
const notificationId = path.id;
const model = ctx.models.notification({ userId: ctx.owner.id });
const existingNotification = await model.load(notificationId);
if (!existingNotification) throw new ErrorNotFound();
const toSave: Notification = {};
if ('read' in fields) toSave.read = fields.read;
if (!Object.keys(toSave).length) return;
const toSave: Notification = {};
if ('read' in fields) toSave.read = fields.read;
if (!Object.keys(toSave).length) return;
toSave.id = notificationId;
await model.save(toSave);
});
toSave.id = notificationId;
await model.save(toSave);
return;
}
throw new ErrorMethodNotAllowed();
},
};
export default route;
export default router;

View File

@@ -1,9 +1,10 @@
import { SubPath, Route, redirect } from '../../utils/routeUtils';
import { AppContext } from '../../utils/types';
import { contextSessionId, formParse } from '../../utils/requestUtils';
import { ErrorMethodNotAllowed, ErrorUnprocessableEntity } from '../../utils/errors';
import { SubPath, redirect } from '../../utils/routeUtils';
import Router from '../../utils/Router';
import { AppContext, HttpMethod } from '../../utils/types';
import { formParse } from '../../utils/requestUtils';
import { ErrorUnprocessableEntity } from '../../utils/errors';
import { User } from '../../db';
import { baseUrl } from '../../config';
import config from '../../config';
import { View } from '../../services/MustacheService';
import defaultView from '../../utils/defaultView';
@@ -31,103 +32,79 @@ function userIsMe(path: SubPath): boolean {
return path.id === 'me';
}
const endPoints = {
const router = new Router();
'GET': {
'users': async function(_path: SubPath, ctx: AppContext) {
const userModel = ctx.models.user({ userId: ctx.owner.id });
const users = await userModel.all();
router.get('users', async (_path: SubPath, ctx: AppContext) => {
const userModel = ctx.models.user({ userId: ctx.owner.id });
const users = await userModel.all();
const view: View = defaultView('users');
view.content.users = users;
return view;
},
const view: View = defaultView('users');
view.content.users = users;
return view;
});
'users/:id': async function(path: SubPath, ctx: AppContext, user: User = null, error: any = null) {
const owner = ctx.owner;
const isMe = userIsMe(path);
const isNew = userIsNew(path);
const userModel = ctx.models.user({ userId: owner.id });
const userId = userIsMe(path) ? owner.id : path.id;
router.get('users/:id', async (path: SubPath, ctx: AppContext, user: User = null, error: any = null) => {
const owner = ctx.owner;
const isMe = userIsMe(path);
const isNew = userIsNew(path);
const userModel = ctx.models.user({ userId: owner.id });
const userId = userIsMe(path) ? owner.id : path.id;
user = !isNew ? user || await userModel.load(userId) : null;
user = !isNew ? user || await userModel.load(userId) : null;
let postUrl = '';
let postUrl = '';
if (isNew) {
postUrl = `${baseUrl()}/users/new`;
} else if (isMe) {
postUrl = `${baseUrl()}/users/me`;
if (isNew) {
postUrl = `${config().baseUrl}/users/new`;
} else if (isMe) {
postUrl = `${config().baseUrl}/users/me`;
} else {
postUrl = `${config().baseUrl}/users/${user.id}`;
}
const view: View = defaultView('user');
view.content.user = user;
view.content.isNew = isNew;
view.content.buttonTitle = isNew ? 'Create user' : 'Update profile';
view.content.error = error;
view.content.postUrl = postUrl;
view.content.showDeleteButton = !isNew && !!owner.is_admin && owner.id !== user.id;
view.partials.push('errorBanner');
return view;
});
router.alias(HttpMethod.POST, 'users/:id', 'users');
router.post('users', async (path: SubPath, ctx: AppContext) => {
let user: User = {};
const userId = userIsMe(path) ? ctx.owner.id : path.id;
try {
const body = await formParse(ctx.req);
const fields = body.fields;
if (userIsMe(path)) fields.id = userId;
user = makeUser(userIsNew(path), fields);
const userModel = ctx.models.user({ userId: ctx.owner.id });
if (fields.post_button) {
if (userIsNew(path)) {
await userModel.save(userModel.fromApiInput(user));
} else {
postUrl = `${baseUrl()}/users/${user.id}`;
}
const view: View = defaultView('user');
view.content.user = user;
view.content.isNew = isNew;
view.content.buttonTitle = isNew ? 'Create user' : 'Update profile';
view.content.error = error;
view.content.postUrl = postUrl;
view.content.showDeleteButton = !isNew && !!owner.is_admin && owner.id !== user.id;
view.partials.push('errorBanner');
return view;
},
},
'POST': {
'users': async function(path: SubPath, ctx: AppContext) {
let user: User = {};
const userId = userIsMe(path) ? ctx.owner.id : path.id;
try {
const body = await formParse(ctx.req);
const fields = body.fields;
if (userIsMe(path)) fields.id = userId;
user = makeUser(userIsNew(path), fields);
const userModel = ctx.models.user({ userId: ctx.owner.id });
if (fields.post_button) {
if (userIsNew(path)) {
await userModel.save(userModel.fromApiInput(user));
} else {
await userModel.save(userModel.fromApiInput(user), { isNew: false });
}
} else if (fields.delete_button) {
await userModel.delete(path.id);
} else {
throw new Error('Invalid form button');
}
return redirect(ctx, `${baseUrl()}/users${userIsMe(path) ? '/me' : ''}`);
} catch (error) {
return endPoints.GET['users/:id'](path, ctx, user, error);
}
},
},
};
const route: Route = {
exec: async function(path: SubPath, ctx: AppContext) {
contextSessionId(ctx);
if (ctx.method === 'GET') {
if (path.id) {
return endPoints.GET['users/:id'](path, ctx);
} else {
return endPoints.GET['users'](path, ctx);
await userModel.save(userModel.fromApiInput(user), { isNew: false });
}
} else if (fields.delete_button) {
await userModel.delete(path.id);
} else {
throw new Error('Invalid form button');
}
if (ctx.method === 'POST') {
return endPoints.POST['users'](path, ctx);
}
return redirect(ctx, `${config().baseUrl}/users${userIsMe(path) ? '/me' : ''}`);
} catch (error) {
const endPoint = router.findEndPoint(HttpMethod.GET, 'users/:id');
return endPoint(path, ctx, user, error);
}
});
throw new ErrorMethodNotAllowed();
},
};
export default route;
export default router;

View File

@@ -1,4 +1,4 @@
import { Routes } from '../utils/routeUtils';
import { Routers } from '../utils/routeUtils';
import apiSessions from './api/sessions';
import apiPing from './api/ping';
@@ -11,7 +11,7 @@ import indexFilesRoute from './index/files';
import indexNotificationsRoute from './index/notifications';
import defaultRoute from './default';
const routes: Routes = {
const routes: Routers = {
'api/ping': apiPing,
'api/sessions': apiSessions,
'api/files': apiFiles,

View File

@@ -1,6 +1,6 @@
import * as Mustache from 'mustache';
import * as fs from 'fs-extra';
import config, { baseUrl } from '../config';
import config from '../config';
export interface RenderOptions {
partials?: any;
@@ -30,7 +30,7 @@ class MustacheService {
private get defaultLayoutOptions(): any {
return {
baseUrl: baseUrl(),
baseUrl: config().baseUrl,
};
}
@@ -41,7 +41,7 @@ class MustacheService {
private resolvesFilePaths(type: string, paths: string[]): string[] {
const output: string[] = [];
for (const path of paths) {
output.push(`${baseUrl()}/${type}/${path}.${type}`);
output.push(`${config().baseUrl}/${type}/${path}.${type}`);
}
return output;
}

View File

@@ -33,7 +33,7 @@ export async function createDb(config: DatabaseConfig, options: CreateDbOptions
await execCommand(cmd.join(' '));
} else if (config.client === 'sqlite3') {
const filePath = sqliteFilePath(config);
const filePath = sqliteFilePath(config.name);
if (await fs.pathExists(filePath)) {
if (options.dropIfExists) {
@@ -71,6 +71,6 @@ export async function dropDb(config: DatabaseConfig, options: DropDbOptions = nu
throw error;
}
} else if (config.client === 'sqlite3') {
await fs.remove(sqliteFilePath(config));
await fs.remove(sqliteFilePath(config.name));
}
}

View File

@@ -0,0 +1,62 @@
import { ErrorMethodNotAllowed, ErrorNotFound } from './errors';
import { HttpMethod } from './types';
import { RouteResponseFormat, RouteHandler } from './routeUtils';
export default class Router {
public public: boolean = false;
public responseFormat: RouteResponseFormat = null;
private routes_: Record<string, Record<string, RouteHandler>> = {};
private aliases_: Record<string, Record<string, string>> = {};
public findEndPoint(method: HttpMethod, schema: string): RouteHandler {
if (this.aliases_[method]?.[schema]) { return this.findEndPoint(method, this.aliases_[method]?.[schema]); }
if (!this.routes_[method]) { throw new ErrorMethodNotAllowed(`Not allowed: ${method} ${schema}`); }
const endPoint = this.routes_[method][schema];
if (!endPoint) { throw new ErrorNotFound(`Not found: ${method} ${schema}`); }
let endPointFn = endPoint;
for (let i = 0; i < 1000; i++) {
if (typeof endPointFn === 'string') {
endPointFn = this.routes_[method]?.[endPointFn];
} else {
return endPointFn;
}
}
throw new ErrorNotFound(`Could not resolve: ${method} ${schema}`);
}
public alias(method: HttpMethod, path: string, target: string) {
if (!this.aliases_[method]) { this.aliases_[method] = {}; }
this.aliases_[method][path] = target;
}
public get(path: string, handler: RouteHandler) {
if (!this.routes_.GET) { this.routes_.GET = {}; }
this.routes_.GET[path] = handler;
}
public post(path: string, handler: RouteHandler) {
if (!this.routes_.POST) { this.routes_.POST = {}; }
this.routes_.POST[path] = handler;
}
public patch(path: string, handler: RouteHandler) {
if (!this.routes_.PATCH) { this.routes_.PATCH = {}; }
this.routes_.PATCH[path] = handler;
}
public del(path: string, handler: RouteHandler) {
if (!this.routes_.DELETE) { this.routes_.DELETE = {}; }
this.routes_.DELETE[path] = handler;
}
public put(path: string, handler: RouteHandler) {
if (!this.routes_.PUT) { this.routes_.PUT = {}; }
this.routes_.PUT[path] = handler;
}
}

View File

@@ -18,7 +18,7 @@ describe('routeUtils', function() {
const link = t[2];
const addressingType = t[3];
const parsed = parseSubPath(path);
const parsed = parseSubPath('', path);
expect(parsed.id).toBe(id);
expect(parsed.link).toBe(link);
expect(parsed.addressingType).toBe(addressingType);

View File

@@ -1,5 +1,6 @@
import { File, ItemAddressingType } from '../db';
import { ErrorBadRequest } from './errors';
import Router from './Router';
import { AppContext } from './types';
const { ltrimSlashes, rtrimSlashes } = require('@joplin/lib/path-utils');
@@ -22,13 +23,10 @@ export enum RouteResponseFormat {
Json = 'json',
}
export interface Route {
exec: Function;
responseFormat?: RouteResponseFormat;
}
export type RouteHandler = (path: SubPath, ctx: AppContext, ...args: any[])=> Promise<any>;
export interface Routes {
[key: string]: Route;
export interface Routers {
[key: string]: Router;
}
export interface SubPath {
@@ -36,10 +34,11 @@ export interface SubPath {
link: string;
addressingType: ItemAddressingType;
raw: string;
schema: string;
}
export interface MatchedRoute {
route: Route;
route: Router;
basePath: string;
subPath: SubPath;
}
@@ -113,7 +112,7 @@ export function isPathBasedAddressing(fileId: string): boolean {
//
// root:/Documents/MyFile.md:/content
// ABCDEFG/content
export function parseSubPath(p: string): SubPath {
export function parseSubPath(basePath: string, p: string): SubPath {
p = rtrimSlashes(ltrimSlashes(p));
const output: SubPath = {
@@ -121,6 +120,7 @@ export function parseSubPath(p: string): SubPath {
link: '',
addressingType: ItemAddressingType.Id,
raw: p,
schema: '',
};
const colonIndex1 = p.indexOf(':');
@@ -141,12 +141,17 @@ export function parseSubPath(p: string): SubPath {
if (s.length >= 2) output.link = s[1];
}
if (basePath) {
const schema = [basePath];
if (output.id) schema.push(':id');
if (output.link) schema.push(output.link);
output.schema = schema.join('/');
}
return output;
}
export function routeResponseFormat(match: MatchedRoute, context: AppContext): RouteResponseFormat {
// if (context.query && context.query.response_format === 'json') return RouteResponseFormat.Json;
const rawPath = context.path;
if (match && match.route.responseFormat) return match.route.responseFormat;
@@ -160,7 +165,7 @@ export function routeResponseFormat(match: MatchedRoute, context: AppContext): R
// - The base path: "api/files"
// - The ID: "SOME_ID"
// - The link: "content"
export function findMatchingRoute(path: string, routes: Routes): MatchedRoute {
export function findMatchingRoute(path: string, routes: Routers): MatchedRoute {
const splittedPath = path.split('/');
// Because the path starts with "/", we remove the first element, which is
@@ -179,7 +184,7 @@ export function findMatchingRoute(path: string, routes: Routes): MatchedRoute {
return {
route: routes[basePath],
basePath: basePath,
subPath: parseSubPath(`/${splittedPath.join('/')}`),
subPath: parseSubPath(basePath, `/${splittedPath.join('/')}`),
};
}
}
@@ -190,7 +195,7 @@ export function findMatchingRoute(path: string, routes: Routes): MatchedRoute {
return {
route: routes[basePath],
basePath: basePath,
subPath: parseSubPath(`/${splittedPath.join('/')}`),
subPath: parseSubPath(basePath, `/${splittedPath.join('/')}`),
};
}
@@ -198,7 +203,7 @@ export function findMatchingRoute(path: string, routes: Routes): MatchedRoute {
return {
route: routes[''],
basePath: '',
subPath: parseSubPath(`/${splittedPath.join('/')}`),
subPath: parseSubPath('', `/${splittedPath.join('/')}`),
};
}

View File

@@ -1,9 +1,8 @@
import { User, Session, DbConnection, connectDb, disconnectDb, File, truncateTables } from '../../db';
import { User, Session, DbConnection, connectDb, disconnectDb, File, truncateTables, sqliteFilePath } from '../../db';
import { createDb } from '../../tools/dbTools';
import modelFactory from '../../models/factory';
import baseConfig from '../../config-tests';
import { AppContext, Config, Env } from '../types';
import { initConfig } from '../../config';
import { AppContext, Env } from '../types';
import config, { initConfig } from '../../config';
import FileModel from '../../models/FileModel';
import Logger from '@joplin/lib/Logger';
import FakeCookies from './koa/FakeCookies';
@@ -34,18 +33,16 @@ export async function tempDir(): Promise<string> {
return tempDir_;
}
let createdDbName_: string = null;
export async function beforeAllDb(unitName: string) {
const config: Config = {
...baseConfig,
database: {
...baseConfig.database,
name: unitName,
},
};
createdDbName_ = unitName;
initConfig(config);
await createDb(config.database, { dropIfExists: true });
db_ = await connectDb(config.database);
initConfig({
SQLITE_DATABASE: createdDbName_,
});
await createDb(config().database, { dropIfExists: true });
db_ = await connectDb(config().database);
}
export async function afterAllTests() {
@@ -58,6 +55,12 @@ export async function afterAllTests() {
await fs.remove(tempDir_);
tempDir_ = null;
}
if (createdDbName_) {
const filePath = sqliteFilePath(createdDbName_);
await fs.remove(filePath);
createdDbName_ = null;
}
}
export async function beforeEachDb() {

View File

@@ -25,8 +25,13 @@ export interface AppContext extends Koa.Context {
owner: User;
}
export enum DatabaseConfigClient {
PostgreSQL = 'pg',
SQLite = 'sqlite3',
}
export interface DatabaseConfig {
client: string;
client: DatabaseConfigClient;
name: string;
host?: string;
port?: number;
@@ -40,8 +45,19 @@ export interface Config {
rootDir: string;
viewDir: string;
layoutDir: string;
// Not that, for now, nothing is being logged to file. Log is just printed
// to stdout, which is then handled by Docker own log mechanism
logDir: string;
database: DatabaseConfig;
baseUrl: string;
}
export enum HttpMethod {
GET = 'GET',
POST = 'POST',
DELETE = 'DELETE',
PATCH = 'PATCH',
HEAD = 'HEAD',
}
export type KoaNext = ()=> Promise<void>;

View File

@@ -13,8 +13,8 @@
</head>
<body class="page-{{{pageName}}}">
{{> navbar}}
{{> notifications}}
<main class="main">
{{> notifications}}
{{{contentHtml}}}
</main>
</body>

View File

@@ -1,9 +1,11 @@
{{#global.notifications}}
<div class="notification is-{{level}}" id="notification-{{id}}">
<button data-close-url="{{closeUrl}}" data-id="{{id}}" class="delete close-notification-button"></button>
{{{messageHtml}}}
</div>
{{/global.notifications}}
{{#global.hasNotifications}}
{{#global.notifications}}
<div class="notification is-{{level}}" id="notification-{{id}}">
<button data-close-url="{{closeUrl}}" data-id="{{id}}" class="delete close-notification-button"></button>
{{{messageHtml}}}
</div>
{{/global.notifications}}
{{/global.hasNotifications}}
<script>
onDocumentReady(function() {

View File

@@ -194,8 +194,6 @@ async function main() {
let manifests: any = {};
// TODO: validate plugin ID when publishing
for (const npmPackage of npmPackages) {
try {
const packageName = npmPackage.name;

View File

@@ -1,35 +1,28 @@
import * as fs from 'fs-extra';
const { execCommand, execCommandVerbose, rootDir, gitPullTry } = require('./tool-utils.js');
const { execCommand2, rootDir, gitPullTry } = require('./tool-utils.js');
const serverDir = `${rootDir}/packages/server`;
const readmePath = `${serverDir}/README.md`;
async function updateReadmeLinkVersion(version: string) {
const content = await fs.readFile(readmePath, 'utf8');
const newContent = content.replace(/server-v(.*?).tar.gz/g, `server-${version}.tar.gz`);
if (content === newContent) throw new Error(`Could not change version number in ${readmePath}`);
await fs.writeFile(readmePath, newContent, 'utf8');
}
async function main() {
process.chdir(serverDir);
console.info(`Running from: ${process.cwd()}`);
await gitPullTry();
const version = (await execCommand('npm version patch')).trim();
process.chdir(serverDir);
const version = (await execCommand2('npm version patch')).trim();
const versionShort = version.substr(1);
const tagName = `server-${version}`;
console.info(`New version number: ${version}`);
process.chdir(rootDir);
console.info(`Running from: ${process.cwd()}`);
await updateReadmeLinkVersion(version);
await execCommand2(`docker build -t "joplin/server:${versionShort}" -f Dockerfile.server .`);
await execCommand2(`docker tag "joplin/server:${versionShort}" "joplin/server:latest"`);
await execCommand2(`docker push joplin/server:${versionShort}`);
await execCommand2('docker push joplin/server:latest');
await execCommandVerbose('git', ['add', '-A']);
await execCommandVerbose('git', ['commit', '-m', `Server release ${version}`]);
await execCommandVerbose('git', ['tag', tagName]);
await execCommandVerbose('git', ['push']);
await execCommandVerbose('git', ['push', '--tags']);
await execCommand2('git add -A');
await execCommand2(`git commit -m 'Server release ${version}'`);
await execCommand2(`git tag ${tagName}`);
await execCommand2('git push');
await execCommand2('git push --tags');
}
main().catch((error) => {

View File

@@ -2,6 +2,7 @@ const fetch = require('node-fetch');
const fs = require('fs-extra');
const execa = require('execa');
const { execSync } = require('child_process');
const { splitCommandString } = require('@joplin/lib/string-utils');
const toolUtils = {};
@@ -55,6 +56,29 @@ toolUtils.execCommandVerbose = function(commandName, args = []) {
return promise;
};
// There's lot of execCommandXXX functions, but eventually all scripts should
// use the one below, which supports:
//
// - Printing the command being executed
// - Printing the output in real time (piping to stdout)
// - Returning the command result as string
toolUtils.execCommand2 = async function(command, options = null) {
options = {
showInput: true,
showOutput: true,
...options,
};
if (options.showInput) console.info(`> ${command}`);
const args = splitCommandString(command);
const executableName = args[0];
args.splice(0, 1);
const promise = execa(executableName, args);
if (options.showOutput) promise.stdout.pipe(process.stdout);
const result = await promise;
return result.stdout;
};
toolUtils.execCommandWithPipes = function(executable, args) {
const spawn = require('child_process').spawn;