first commit
This commit is contained in:
commit
6f25e684e0
75 changed files with 29674 additions and 0 deletions
59
.env.dist
Normal file
59
.env.dist
Normal file
|
@ -0,0 +1,59 @@
|
|||
# Server port and host
|
||||
SERVER_PORT=3001
|
||||
SERVER_HOST=localhost
|
||||
|
||||
# External URL to access server API
|
||||
SERVER_URL=http://localhost:3001
|
||||
|
||||
# Secret key. Should be unique for each instance
|
||||
RICOCHET_SECRET=YourSuperSecretHere
|
||||
|
||||
# Configure the JSON store backend
|
||||
# Available backends: memory, nedb, mongodb
|
||||
# Use nedb or mongodb for persistent storage
|
||||
JSON_STORE_BACKEND=memory
|
||||
|
||||
# nedb JSON store backend configuration
|
||||
NEDB_DIRNAME=/path/to/data
|
||||
|
||||
# mongodb JSON store backend configuration
|
||||
MONGODB_URI=
|
||||
MONGODB_DATABASE=
|
||||
|
||||
# memory, disk or s3 storage are available
|
||||
FILE_STORE_BACKEND=memory
|
||||
|
||||
# disk file store configuration
|
||||
DISK_DESTINATION=/path/to/dir/
|
||||
|
||||
# S3 file store configuration
|
||||
S3_ACCESS_KEY=
|
||||
S3_SECRET_KEY=
|
||||
S3_ENDPOINT=
|
||||
S3_BUCKET=
|
||||
S3_REGION=
|
||||
# Do we proxy the file through this server
|
||||
S3_PROXY=
|
||||
# CDN configuration
|
||||
S3_CDN=
|
||||
# Signed url or public url
|
||||
S3_SIGNED_URL=
|
||||
|
||||
# Only for testing purpose
|
||||
S3_BUCKET_TEST=
|
||||
|
||||
# Smtp server configuration
|
||||
EMAIL_HOST=fake
|
||||
EMAIL_PORT=
|
||||
EMAIL_USER=
|
||||
EMAIL_PASSWORD=
|
||||
EMAIL_FROM=no-reply@example.com
|
||||
|
||||
# Enable new site registration. 0 to disable.
|
||||
SITE_REGISTRATION_ENABLED=1
|
||||
|
||||
# Use pino for logging ? Set to 1 to enable
|
||||
USE_PINO=0
|
||||
|
||||
# email whitelist file path
|
||||
WHITELIST_PATH =
|
15
.eslintrc.js
Normal file
15
.eslintrc.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
module.exports = {
|
||||
env: {
|
||||
es2021: true,
|
||||
node: true,
|
||||
jest: true,
|
||||
browser: true,
|
||||
},
|
||||
extends: 'eslint:recommended',
|
||||
parserOptions: {
|
||||
ecmaVersion: 12,
|
||||
sourceType: 'module',
|
||||
},
|
||||
rules: {},
|
||||
ignorePatterns: ['src/__test__/*'],
|
||||
};
|
27
.github/workflows/node.js.yml
vendored
Normal file
27
.github/workflows/node.js.yml
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
|
||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
|
||||
|
||||
name: Node.js CI
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [14.x, 16.x]
|
||||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- run: npm ci
|
||||
- run: npm run ci
|
22
.github/workflows/npm-publish.yml
vendored
Normal file
22
.github/workflows/npm-publish.yml
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
name: Node.js Publish
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [created]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
publish-npm:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16
|
||||
registry-url: https://registry.npmjs.org/
|
||||
- run: npm ci
|
||||
- run: npm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
|
||||
|
||||
|
6
.gitignore
vendored
Normal file
6
.gitignore
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
node_modules/
|
||||
coverage/
|
||||
server.log
|
||||
site.json
|
||||
dist/
|
||||
.env
|
11
.npmignore
Normal file
11
.npmignore
Normal file
|
@ -0,0 +1,11 @@
|
|||
/*
|
||||
|
||||
#!/dist
|
||||
!/src
|
||||
!package.json
|
||||
!/Readme.md
|
||||
!/CHANGELOG.md
|
||||
!/locales
|
||||
!/public
|
||||
|
||||
/src/__test__
|
1
.nvmrc
Normal file
1
.nvmrc
Normal file
|
@ -0,0 +1 @@
|
|||
16
|
206
CHANGELOG.md
Normal file
206
CHANGELOG.md
Normal file
|
@ -0,0 +1,206 @@
|
|||
|
||||
1.6.0 / 2022-10-17
|
||||
==================
|
||||
|
||||
* Improve email regex (#51)
|
||||
* Update vm2
|
||||
|
||||
1.5.2 / 2022-09-02
|
||||
==================
|
||||
|
||||
* Fix Encrypt module
|
||||
|
||||
1.5.1 / 2022-09-02
|
||||
==================
|
||||
|
||||
* Fix package main import
|
||||
|
||||
1.5.0 / 2022-06-20
|
||||
==================
|
||||
|
||||
* Switch to esmodule (#49)
|
||||
|
||||
1.4.0 / 2022-06-13
|
||||
==================
|
||||
|
||||
* Update dependencies (#48)
|
||||
* Modify configuration
|
||||
|
||||
1.3.2 / 2022-01-16
|
||||
==================
|
||||
|
||||
* Improve configuration
|
||||
|
||||
1.3.1 / 2021-11-14
|
||||
==================
|
||||
|
||||
* Fix ricochet.json path
|
||||
|
||||
1.3.0 / 2021-11-14
|
||||
==================
|
||||
|
||||
* Update readme and installation process (#47)
|
||||
|
||||
1.2.0 / 2021-11-13
|
||||
==================
|
||||
|
||||
* Add admin page for site registration (#46)
|
||||
|
||||
1.1.5 / 2021-11-02
|
||||
==================
|
||||
|
||||
* Fix onboarding process (#45)
|
||||
|
||||
1.1.4 / 2021-11-02
|
||||
==================
|
||||
|
||||
* Add locales to npm package
|
||||
|
||||
1.1.3 / 2021-11-02
|
||||
==================
|
||||
|
||||
* Fix bad naming again
|
||||
|
||||
1.1.2 / 2021-11-02
|
||||
==================
|
||||
|
||||
* Update mongo pivotql compiler
|
||||
|
||||
1.1.1 / 2021-11-02
|
||||
==================
|
||||
|
||||
* Fix pivoql incompatibility (#44)
|
||||
|
||||
1.1.0 / 2021-11-01
|
||||
==================
|
||||
|
||||
* Split store backends (#43)
|
||||
* Support queries for list views (#42)
|
||||
* Add endpoints to create/update site configuration (#40)
|
||||
|
||||
1.0.0 / 2021-10-31
|
||||
==================
|
||||
|
||||
* Rename package
|
||||
* Add endpoints to create/update site configuration (#40)
|
||||
|
||||
0.10.1 / 2021-09-16
|
||||
===================
|
||||
|
||||
* Add CDN conf to s3 file backend
|
||||
|
||||
0.10.0 / 2021-09-15
|
||||
===================
|
||||
|
||||
* Remove client2client.io server
|
||||
|
||||
0.9.2 / 2021-09-14
|
||||
==================
|
||||
|
||||
* Add s3proxy configuration flag
|
||||
* Update documentation
|
||||
|
||||
0.9.1 / 2021-05-30
|
||||
==================
|
||||
|
||||
* Update client2client
|
||||
|
||||
0.9.0 / 2021-05-24
|
||||
==================
|
||||
|
||||
* Fix audit security
|
||||
* Upadte pm2
|
||||
* Update client2client.io to version 2.0.1
|
||||
|
||||
0.8.1 / 2021-05-07
|
||||
==================
|
||||
|
||||
* Allow require some modules in remote scripts
|
||||
|
||||
0.8.0 / 2021-04-13
|
||||
==================
|
||||
|
||||
/!\ Breaking changes: start install from scratch /!\
|
||||
|
||||
* Add hooks
|
||||
* Add file store under siteId prefix
|
||||
* Add way to redirect to storage url instead of proxy it
|
||||
* Add siteId as prefix instead of loading config from server
|
||||
* Refactor config file reading
|
||||
* Use lower case email
|
||||
|
||||
0.7.1 / 2021-03-31
|
||||
==================
|
||||
|
||||
* Increase cookie life
|
||||
|
||||
0.7.0 / 2021-03-11
|
||||
==================
|
||||
|
||||
* Add mongodb backend
|
||||
|
||||
0.6.3 / 2021-03-10
|
||||
==================
|
||||
|
||||
* Remove HOST to listen
|
||||
|
||||
0.6.2 / 2021-03-10
|
||||
==================
|
||||
|
||||
* Can now configure with PORT env
|
||||
|
||||
0.6.1 / 2021-03-06
|
||||
==================
|
||||
|
||||
* Add repl to access store manually from shell
|
||||
|
||||
0.6.0 / 2021-03-05
|
||||
==================
|
||||
|
||||
* Add cron tasks
|
||||
* Fix error code
|
||||
|
||||
0.5.1 / 2021-02-24
|
||||
==================
|
||||
|
||||
* Fix bad behaviour on execute calls that prevent game save
|
||||
|
||||
0.5.0 / 2021-02-21
|
||||
==================
|
||||
|
||||
* Add store migration to avoid manual manipulation
|
||||
* Refactor fileStore to extract backends
|
||||
* Add siteId prefix to store boxes to allow multiple site
|
||||
|
||||
BREAKING CHANGES:
|
||||
Now data are stored by site_id. The migration migrate the
|
||||
data but you need to manually delete old database file
|
||||
if you were using NeDB backend.
|
||||
|
||||
0.4.2 / 2021-02-18
|
||||
==================
|
||||
|
||||
* Update PM2
|
||||
* Add expirable cache
|
||||
|
||||
0.4.1 / 2021-02-13
|
||||
==================
|
||||
|
||||
* Add authentication check endpoint
|
||||
|
||||
0.4.0 / 2021-02-13
|
||||
==================
|
||||
|
||||
* Use referer also for ricochet origin
|
||||
* [Breaking] Remove end '/' from ricochet origin
|
||||
|
||||
0.3.1 / 2021-02-12
|
||||
==================
|
||||
|
||||
* Add socket.io 2.X client compat flag
|
||||
|
||||
0.3.0 / 2021-02-10
|
||||
==================
|
||||
|
||||
* Switch to socket.io v3.x
|
||||
|
21
LICENSE
Normal file
21
LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2020 Jérémie Pardou
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
464
Readme.md
Normal file
464
Readme.md
Normal file
|
@ -0,0 +1,464 @@
|
|||
# 💡 Ricochet.js
|
||||
|
||||
Ricochet.js is a multi-purpose JSON/File store with serverless capabilities.
|
||||
|
||||
Main features are:
|
||||
|
||||
- Deploy Ricochet.js once and for many website (multi-tenancy)
|
||||
- Use the ready to use general APIs:
|
||||
- A JSON store
|
||||
- A File store
|
||||
- Ability to calls remote javascript functions like [Serverless](https://en.wikipedia.org/wiki/Serverless_computing) or [FaaS](https://en.wikipedia.org/wiki/Function_as_a_service)
|
||||
application
|
||||
- Avoid frontend/backend version disconnect by deploying your backend code alongside
|
||||
to your frontend code on the same CDN/Server.
|
||||
- 0 knowledge password-less authentification service
|
||||
- Cloud ready, choose your stores:
|
||||
- JSON : Memory, NeDB (Disk), MongoDB, more coming...
|
||||
- File : Memory, Disk, S3 compatible, more coming...
|
||||
- Can manage multiple site with only one backend
|
||||
- Easily scalable
|
||||
- Works on edges
|
||||
|
||||
Some use cases:
|
||||
|
||||
- You don't want to deploy your backend server each time you make a backend modification
|
||||
- You need a simple backend with only some specific code
|
||||
- You want to store structured data and files
|
||||
- You want frontend and backend code to be updated at same time
|
||||
|
||||
## ❓Why Ricochet.js?
|
||||
|
||||
When you create a web application, you nearly always need a server mainly for
|
||||
3 reasons:
|
||||
|
||||
- you need to persist structured and binary data
|
||||
- you need to execute some code that can't be modified or must not be accessible
|
||||
by the client for security reason.
|
||||
- You want some periodic tasks to be executed.
|
||||
|
||||
Ricochet.js propose features to fullfil this requirements in an elegant way.
|
||||
|
||||
First a *Rest API* to store key-values document, so you can store your structured data.
|
||||
And for each stored resource, you can associate binary files like images, or documents.
|
||||
|
||||
When you need *custom code*, you can bundle javascript code that will be
|
||||
executed in secured context on server side with access to this two stores.
|
||||
|
||||
Finally you can *schedule* hourly or daily actions.
|
||||
|
||||
To use Ricochet.js you need a running instance of the server. You have two option:
|
||||
|
||||
- Using an hosted version (jump to [project initialization](#⚡-initialize-your-project) section)
|
||||
- Running your own instance, continue with the next section
|
||||
|
||||
## 💫 Start your own local instance of Ricochet.js
|
||||
|
||||
First you need to define a random secret string and store it the
|
||||
`RICOCHET_SECRET` env variable or in `.env` file if you prefer.
|
||||
|
||||
The following command helps you to create such a file.
|
||||
|
||||
```sh
|
||||
echo RICOCHET_SECRET=`cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1` > .env
|
||||
```
|
||||
|
||||
Now you can start a Ricochet.js server by using `npx` (🚨 you should have npm version >=7
|
||||
to support *mongodb* or *nedb* store backend):
|
||||
|
||||
```sh
|
||||
npx ricochetjs
|
||||
```
|
||||
|
||||
Or install Ricochet.js globally and launch the server:
|
||||
|
||||
```sh
|
||||
npm install -g ricochetjs
|
||||
# then
|
||||
ricochetjs
|
||||
```
|
||||
|
||||
By default, data are *stored in memory* so if you restart the server, all data
|
||||
are lost. The default configuration is for *development purpose only*.
|
||||
See [server configuration](#server-configuration) for more customization and how
|
||||
to use persistent storages.
|
||||
|
||||
Now the server is running so you can create a new ricochet *site*. To do it,
|
||||
visit the Ricochet.js URL with a browser. By default `http://localhost:4000`.
|
||||
|
||||
Fill the left form with wanted information and click the `Create` button.
|
||||
|
||||
The result should look like the following image:
|
||||
|
||||
![](images/key.png)
|
||||
|
||||
From the response you **MUST save** the `key` value, this key is used to encrypt
|
||||
your server side code hosted alongside with your frontend code.
|
||||
This is the **ONLY** chance to get it so keep it for later and **keep it secret**.
|
||||
|
||||
In the meantime you should have received a mail with a link you must visit
|
||||
to confirm the site creation. This is a security measure to prevent abuse. Click
|
||||
the link to validate the *site* creation. If you've not yet configured any mail
|
||||
provider, check out the server logs to read the confirmation link.
|
||||
|
||||
Now, your server is ready and a site exists. You can follow the next steps to create
|
||||
a new site project.
|
||||
|
||||
## ⚡ Initialize your backend project
|
||||
|
||||
Since you have a Ricochet.js instance up and running, you can use the
|
||||
[project starter](https://github.com/jrmi/ricochetjs-starter) to initialize
|
||||
your backend.
|
||||
|
||||
### Starter usage
|
||||
|
||||
Use `degit` to make your own copy of the starter repository where you want
|
||||
(A good place can be in the backend folder of your project):
|
||||
|
||||
```sh
|
||||
npx degit https://github.com/jrmi/ricochetjs-starter
|
||||
```
|
||||
|
||||
Then install dependencies:
|
||||
|
||||
```sh
|
||||
npm install
|
||||
```
|
||||
|
||||
Create a `.env` file from the `.env.dist` file and customize it by adding your
|
||||
previously generated site key with Ricochet.js.
|
||||
|
||||
You can serve the default project by executing:
|
||||
|
||||
```sh
|
||||
npm run serve
|
||||
```
|
||||
|
||||
or if you use an external instance of Ricochet.js, you can use the tunnel version:
|
||||
|
||||
```sh
|
||||
npm run tunnel
|
||||
```
|
||||
|
||||
### Test with curl
|
||||
|
||||
To test the script, a Ricochet.js server must be running.
|
||||
|
||||
In the following example we assume that you use your local Ricochet.js instance
|
||||
available on `http://localhost:4000` but you can replace this URL by any ricochet
|
||||
instance that have access to your backend package server. We also assume that your
|
||||
backend server is on `http://localhost:9000` but if you use a tunnel, use the
|
||||
address given by the npm command.
|
||||
|
||||
You can use `curl` to test the API:
|
||||
|
||||
```sh
|
||||
curl -X POST -H "Content-Type: application/json
|
||||
X-Ricochet-Origin: http://localhost:9000" -d '{"some":"data"}' http://localhost:4000/exampleSite/store/publicData/
|
||||
```
|
||||
|
||||
And get the of the `publicData` box:
|
||||
|
||||
```sh
|
||||
curl -X GET -H "Content-Type: application/json
|
||||
X-Ricochet-Origin: http://localhost:9000" http://localhost:4000/exampleSite/store/publicData/
|
||||
```
|
||||
|
||||
### Starter customization
|
||||
|
||||
You can freely modify `src/index.js` file to declare your store, hooks,
|
||||
custom functions, ...
|
||||
|
||||
Remember that the server bundle will be encrypted and should be used by
|
||||
ricochet server with corresponding *site* configuration.
|
||||
|
||||
Remember to also define a `SECRET` environment variable for the server
|
||||
(Can be defined in same `.env` file if you start the server from here).
|
||||
|
||||
The server should be listening on `http://localhost:4000`.
|
||||
|
||||
### Deploy your project
|
||||
|
||||
Since you finish your code, you must bundle it to prepare deployment:
|
||||
|
||||
```sh
|
||||
npm run build
|
||||
```
|
||||
|
||||
Yes, that's true, you are bundling the backend code with webpack!
|
||||
|
||||
This bundle can now be deployed on any content delivery network and can
|
||||
(should?) be deployed alongside with your frontend code.
|
||||
|
||||
## 💪 How does it work?
|
||||
|
||||
Each time you call an API you should have at least one of this HTTP header:
|
||||
*x-ricochet-origin*, *referer*, *origin*. These headers are used to determine the website
|
||||
where the backend code is stored. Let's call it the `<ricochetOrigin>`. By default
|
||||
if you use a *browser*, *referer* or *origin* should be included by default.
|
||||
|
||||
On the first call of any API endpoint for a specific *siteId*, the file
|
||||
`<ricochetOrigin>/ricochet.json` is downloaded, decrypted and executed by the
|
||||
Ricochet.js server.
|
||||
|
||||
This is the encrypted server side bundle that configure Ricochet.js for this *siteId*.
|
||||
|
||||
This file MUST exists before being able to call any Rest API.
|
||||
|
||||
The script must define and export a main function that has access to
|
||||
Ricochet.js server context. The main function is called with an object as
|
||||
parameters that contains the following properties:
|
||||
|
||||
- **store**: Allow to access the JSON store.
|
||||
- **hooks**: Add some hooks to the store.
|
||||
- **functions**: Add arbitrary custom function to the API.
|
||||
- **schedules**: Schedules hourly or daily function calls.
|
||||
|
||||
All this parameters are explained in next sections.
|
||||
|
||||
This script is executed on *Ricochet.js* server so don't rely on browser
|
||||
capabilities.
|
||||
|
||||
This script allow you to configure the ricochet server for your *siteId* in a
|
||||
declarative way.
|
||||
|
||||
Once you have initialized your site with the setup script (the `ricochet.json` file)
|
||||
you can use the [rest API](#🔌-rest-api) to store data, files or call
|
||||
custom functions.
|
||||
|
||||
## 📞 Server API
|
||||
|
||||
### Store
|
||||
|
||||
To access JSON store from the *setup* function in your `ricochet.json` file, you can use the `store` parameter.
|
||||
|
||||
This a store instance scoped to the current *siteId*. You have access to the
|
||||
following methods:
|
||||
|
||||
**store.createOrUpdate(boxId, options)**: create, if not exist, or update a *boxId* store. Options are:
|
||||
|
||||
| Name | Description | Default |
|
||||
| -------- | ----------------------------------------------------------------------------- | --------- |
|
||||
| security | Security model of the box. Values are string: "public", "readOnly", "private" | "private" |
|
||||
|
||||
**store.list(boxId, options)**: list box content. Options are:
|
||||
|
||||
| Name | Description | Default |
|
||||
| ---------- | ----------------------------------------------------------------------------------------------------------------------- | ------- |
|
||||
| sort | Name of sort field | "_id" |
|
||||
| asc | Ascending order ? | true |
|
||||
| skip | How many result to skip | 0 |
|
||||
| limit | Limit result count. | 50 |
|
||||
| onlyFields | Limit result to this fields. | [] |
|
||||
| q | A query to filter results. The query must be written in the [pivotql](https://github.com/jrmi/pivotql/) query language. | "" |
|
||||
|
||||
**store.save(boxId, id, data)**: Create or update the given id resource with given data.
|
||||
|
||||
**store.update(boxId, id, data)**: Update the resource. Fails if not existing.
|
||||
|
||||
**store.delete(boxId, id)** try to delete the corresponding resource.
|
||||
|
||||
### Hooks
|
||||
|
||||
Hooks allows you to customize the way data are accessed for one specific
|
||||
box or for all.
|
||||
You can add a hook by pushing a function to the `hooks` array from parameters.
|
||||
|
||||
By using hooks you can customize behavior of the generic Rest APIs to change
|
||||
way they work.
|
||||
|
||||
### Custom functions
|
||||
|
||||
Custom functions can be defined by adding a function to the `function` object.
|
||||
The key will be the endpoint and the value the executed callback. The key is the
|
||||
name of the function and the value must be a function executed when the query
|
||||
is received.
|
||||
|
||||
Then you can call the function later using the following endpoint.
|
||||
|
||||
### ANY on /:siteId/execute/:functionName/
|
||||
|
||||
Returns the value returned by the function.
|
||||
|
||||
### Schedules
|
||||
|
||||
Define daily or hourly schedules by pushing functions to this object for the
|
||||
key `daily` or `hourly`.
|
||||
|
||||
[More details coming soon...]
|
||||
|
||||
## 🔌 Rest API
|
||||
|
||||
This section describe the Rest api of Ricochet.js.
|
||||
|
||||
### GET on /:siteId/store/:boxId/
|
||||
|
||||
To list available resources in this box.
|
||||
|
||||
### POST on /:siteId/store/:boxId/
|
||||
|
||||
With a JSON payload.
|
||||
|
||||
To create a new ressource in `boxId`
|
||||
|
||||
### GET on /:siteId/store/:boxId/:resourceId
|
||||
|
||||
**returns:** previously saved `resourceId` from `boxId`.
|
||||
|
||||
### PUT on /:siteId/store/:boxId/:resourceId
|
||||
|
||||
With a JSON payload to update the resource with this Id.
|
||||
|
||||
### POST on /:siteId/store/:boxId/:resourceId/file
|
||||
|
||||
To add a file to this resource.
|
||||
|
||||
**Returns** the file Path for later uses.
|
||||
|
||||
### GET on /:siteId/store/:oxId/:resourceId/file
|
||||
|
||||
List the files associated to this ressource.
|
||||
|
||||
### ANY on /:siteId/execute/:functionName/:id?
|
||||
|
||||
Execute a previously defined in *setup* custom function and return
|
||||
the result to caller.
|
||||
|
||||
The functions have access to some globally defined variables receives an object with following properties:
|
||||
|
||||
- `store` the very same store API used for JSON store API. Allow you to do some
|
||||
protected operation
|
||||
- `method` the http verb used
|
||||
- `query` a dict of query parameters
|
||||
- `body` the request payload
|
||||
- `id` the optionnal `id` if providen
|
||||
|
||||
### POST on /:siteId/auth/
|
||||
|
||||
By posting a JSON containing a user email:
|
||||
|
||||
```json
|
||||
{"userEmail": "user@example.com"}
|
||||
```
|
||||
|
||||
an email will be sent to this address containing a link to authenticate to the platform.
|
||||
|
||||
This link is: `<ricochetOrigin>/login/:userId/:token`
|
||||
|
||||
You frontend should handle this url and extract the `userId` and the `token` to authentify the user.
|
||||
|
||||
`userId` is the unique user identifier corresponding to the used email adress.
|
||||
|
||||
The `token` is valid during 1 hour.
|
||||
|
||||
### POST on /:siteId/auth/verify/:userId/:token
|
||||
|
||||
Allow the client to verify the token and authenticate against the service.
|
||||
|
||||
### GET on /:siteId/auth/check
|
||||
|
||||
Allow the client to verify if a user is authenticated. Returns `403` http code if not authenticated.
|
||||
|
||||
### POST on /_register/
|
||||
|
||||
To register new site. A mail is send each time you want to create a website to confirm the creation.
|
||||
|
||||
The json content should look like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"siteId": "the new site Id",
|
||||
"name": "Name displayed in mail",
|
||||
"owner": "owner email address for security, confirmation mails are send here",
|
||||
"emailFrom": "email address displayed in email sent for this site"
|
||||
}
|
||||
```
|
||||
|
||||
In the response you'll get an extra `key` property. You MUST save it for later use.
|
||||
This is the ONLY chance to get it. This is the encryption key you need to crypt
|
||||
your `ricochet.json` file.
|
||||
|
||||
### PATCH on /_register/:siteId
|
||||
|
||||
To update a site configuration. To confirm the modification, a mail is send to the site owner.
|
||||
|
||||
The json content should look like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "Name displayed in mail",
|
||||
"emailFrom": "email address displayed in email sent for this site"
|
||||
}
|
||||
```
|
||||
|
||||
You can't modify owner email (yet?).
|
||||
|
||||
## ⚙️ Server configuration
|
||||
|
||||
You can configure your instance by settings environment variables or using
|
||||
`.env` file:
|
||||
|
||||
| Name | description | default value |
|
||||
| ------------------------- | ------------------------------------------------------------------------------------------ | ------------- |
|
||||
| SERVER_HOST | '0.0.0.0' to listen from all interfaces. | 127.0.0.1 |
|
||||
| SERVER_PORT | Server listen on this port. | 4000 |
|
||||
| SERVER_NAME | Server name displayed on mail for example. | Ricochet.js |
|
||||
| RICOCHET_SECRET | Secret to hash password and cookie. Keep it safe. | |
|
||||
| SITE_REGISTRATION_ENABLED | Set to `0` to disable site registration. | 1 |
|
||||
| FILE_STORAGE | Configure file store type. Allowed values: 'memory', 'disk', 's3'. | memory |
|
||||
| STORE_BACKEND | Configure JSON store provider. Allowed values: 'memory', 'nedb', 'mongodb'. | memory |
|
||||
| EMAIL_* | To configure email provider. Put "fake" in EMAIL_HOST to log mail instead of sending them. | |
|
||||
|
||||
Note: "memory" stores are for development purpose only and remember that you
|
||||
loose all your data each time you stop the server.
|
||||
|
||||
Note: for "mongodb" backend, you need to install `npm install mongodb@3`.
|
||||
Note: for "nedb" backend, you need to install `npm install @seald-io/nedb`.
|
||||
|
||||
If you use *disk file store* you need to configure this variables:
|
||||
|
||||
| Name | description | default value |
|
||||
| ---------------- | --------------------------- | ------------------ |
|
||||
| DISK_DESTINATION | Base path of the file store | /tmp/ricochet_file |
|
||||
|
||||
If you use *S3 file store* configure also this variables:
|
||||
|
||||
| Name | description | default value |
|
||||
| ------------- | -------------------------------------------------------------------------- | ------------- |
|
||||
| S3_ACCESS_KEY | S3 access key | |
|
||||
| SB_SECRET_KEY | S3 secret key | |
|
||||
| S3_ENDPOINT | S3 endpoint | |
|
||||
| S3_BUCKET | S3 bucket | |
|
||||
| S3_REGION | S3 Region | |
|
||||
| S3_PROXY | Set to "1" to enable to proxy file (otherwise it's a redirect to the file) | 0 |
|
||||
| S3_SIGNED_URL | Set to "0" to disabled usage of signed URL | true |
|
||||
| S3_CDN | Set the CDN prefix to enable it | |
|
||||
|
||||
For *nedb* JSON store provider:
|
||||
|
||||
| Name | description | default value |
|
||||
| -------------------- | ----------------------------- | ------------- |
|
||||
| NEDB_BACKEND_DIRNAME | NeDB base path for DB storage | |
|
||||
|
||||
For *mongodb* JSON store provider:
|
||||
|
||||
| Name | description | default value |
|
||||
| ---------------- | ------------------------- | ------------- |
|
||||
| MONGODB_URI | Mongodb configuration URI | |
|
||||
| MONGODB_DATABASE | Database to use | |
|
||||
|
||||
## 🛠 Prepare ricochet.js for development
|
||||
|
||||
Clone the repository then install dependencies:
|
||||
|
||||
```sh
|
||||
npm ci
|
||||
```
|
||||
|
||||
Create `.env` file from `.env.dist` file and change the values.
|
||||
|
||||
and start the instance in dev mode:
|
||||
|
||||
```sh
|
||||
npm run dev
|
||||
```
|
19
docker/Dockerfile
Normal file
19
docker/Dockerfile
Normal file
|
@ -0,0 +1,19 @@
|
|||
FROM node:14
|
||||
|
||||
ENV NODE_ENV=production
|
||||
ENV NPM_CONFIG_PREFIX=/home/node/.npm-global
|
||||
ENV PATH=$PATH:/home/node/.npm-global/bin
|
||||
|
||||
RUN echo "{}" > site.json && chown node:node site.json
|
||||
|
||||
RUN npm install -g ricochet.js pino-tiny pino-tee
|
||||
|
||||
WORKDIR /home/node
|
||||
USER node
|
||||
|
||||
CMD ricochet | pino-tee info ./ricochet.log | pino-tiny
|
||||
|
||||
EXPOSE 4000
|
||||
|
||||
# docker build -t "ricochet:latest" .
|
||||
# docker run -it --rm -e "SECRET=12345" --name "my-ricochet" ricochet:latest
|
7
i18next-parser.config.js
Normal file
7
i18next-parser.config.js
Normal file
|
@ -0,0 +1,7 @@
|
|||
module.exports = {
|
||||
defaultValue: '_NOT_TRANSLATED_',
|
||||
locales: ['en', 'fr'],
|
||||
output: 'locales/$LOCALE/$NAMESPACE.json',
|
||||
|
||||
sort: true,
|
||||
};
|
BIN
images/key.png
Normal file
BIN
images/key.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 26 KiB |
11
locales/en/translation.json
Normal file
11
locales/en/translation.json
Normal file
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"Auth mail html message": "<p>Hello,<p>\n\n<p>Here is the link that allows you to log in to {{siteName}}:</p>\n\n<a ref=\"{url}\">{{url}}</a>\n\n<p>Please click on the link or copy and paste it into your browser.</p>\n\n<p>Yours sincerely,</p>\n\n<p>{{siteName}} team.</p>",
|
||||
"Auth mail text message": "Hello,\n\nHere is the link that allows you to log in {{siteName}}:\n\n{{url}}\n\nPlease copy and paste it into your browser.\n\nYours sincerely,\n\n{{siteName}} team.",
|
||||
"Your authentication link": "[{{siteName}}] Your authentication link",
|
||||
"Please confirm site creation": "Confirm site creation",
|
||||
"Site creation text message": "Hello,\n\ndo you agree to create the site \"{{siteId}}\" on {{siteName}}? Click or copy/paste into your browser the following link to confirm your action:\n\n{{url}}\n\nYours sincerely,\n\n{{siteName}} team.",
|
||||
"Site creation html message": "<p>Hello,<p>\n\n<p>do you agree to create the site \"{{siteId}}\" on {{siteName}}? Click or copy/paste into your browser the following link to confirm your action:</p>\n\n<a ref=\"{url}\">{{url}}</a>\n\n<p>Yours sincerely,</p>\n\n<p>{{siteName}} team.</p>",
|
||||
"Please confirm site update": "Confirm site update",
|
||||
"Site update text message": "Hello,\n\ndo you agree to update the site \"{{siteId}}\" on {{siteName}}? Click or copy/paste into your browser the following link to confirm your action:\n\n{{url}}\n\nYours sincerely,\n\n{{siteName}} team.",
|
||||
"Site update html message": "<p>Hello,<p>\n\n<p>do you agree to update the site \"{{siteId}}\" on {{siteName}}? Click or copy/paste into your browser the following link to confirm your action:</p>\n\n<a ref=\"{url}\">{{url}}</a>\n\n<p>Yours sincerely,</p>\n\n<p>{{siteName}} team.</p>"
|
||||
}
|
11
locales/fr/translation.json
Normal file
11
locales/fr/translation.json
Normal file
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"Auth mail html message": "<p>Bonjour,<p>\n\n<p>Voici le lien qui vous permet de vous connecter à {{siteName}} :</p>\n\n<a href=\"{{url}}\">{{url}}</a>\n\n<p>Cliquez sur le lien ou copiez et collez le dans votre navigateur.</p>\n\n<p>Cordialement,</p>\n\n\n<p>L'équipe de {{siteName}}.</p>",
|
||||
"Auth mail text message": "Bonjour,\n\nVoici le lien qui vous permet de vous connecter à {{siteName}} :\n\n{{url}}\n\nVeuillez le copier-coller dans votre navigateur.\n\nCordialement,\n\nL'équipe de {{siteName}}.",
|
||||
"Your authentication link": "[{{siteName}}] Votre lien d'authentification",
|
||||
"Please confirm site creation": "Confirmation de création d'un site",
|
||||
"Site creation text message": "Bonjour,\n\nÊtes-vous d'accord pour créer le site « {{siteId}} » sur {{siteName}} ? Copiez-collez le lien suivant dans votre navigateur pour confirmer votre action :\n\n{{url}}\n\nCordialement,\n\nL'équipe de {{siteName}}.",
|
||||
"Site creation html message": "<p>Bonjour,<p>\n\n<p>Êtes-vous d'accord pour créer le site « {{siteId}} » sur {{siteName}} ? Cliquez ou copiez-collez le lien suivant dans votre navigateur afin de confirmer votre action :</p>\n\n<a href=\"{{url}}\">{{url}}</a>\n\n<p>Cordialement,</p>\n\n\n<p>L'équipe de {{siteName}}.</p>",
|
||||
"Please confirm site update": "Confirmation de mise à jour d'un site",
|
||||
"Site update text message": "Bonjour,\n\nÊtes-vous d'accord pour mettre à jour la configuration du site « {{siteId}} » sur {{siteName}} ? Copiez-collez le lien dans votre navigateur pour confirmer votre action :\n\n{{url}}\n\nCordialement,\n\nL'équipe de {{siteName}}.",
|
||||
"Site update html message": "<p>Bonjour,<p>\n\n<p>Êtes-vous d'accord pour mettre à jour la configuration du site « {{siteId}} » {{siteName}} ? Cliquez ou copiez/collez le lien suivant dans votre navigateur afin de confirmer votre action :</p>\n\n<a href=\"{{url}}\">{{url}}</a>\n\n<p>Cordialement,</p>\n\n\n<p>L'équipe de {{siteName}}.</p>"
|
||||
}
|
23355
package-lock.json
generated
Normal file
23355
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
92
package.json
Normal file
92
package.json
Normal file
|
@ -0,0 +1,92 @@
|
|||
{
|
||||
"name": "ricochetjs",
|
||||
"version": "1.6.0",
|
||||
"description": "Multi-purpose deploy once prototyping backend",
|
||||
"bin": {
|
||||
"ricochetjs": "src/cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^14 || ^16 || >=18"
|
||||
},
|
||||
"type": "module",
|
||||
"module": "src/index.js",
|
||||
"exports": {
|
||||
".": "./src/index.js",
|
||||
"./encrypt-webpack-plugin": "./src/EncryptPlugin.js"
|
||||
},
|
||||
"scripts": {
|
||||
"clean": "rimraf dist",
|
||||
"lint": "eslint src",
|
||||
"dev": "nodemon src/cli.js | pino-tiny",
|
||||
"cli": "node src/cli.js",
|
||||
"test": "NODE_OPTIONS=--experimental-vm-modules npx jest --watch",
|
||||
"test:server": "PORT=5000 npx serve src/__test__/test.files",
|
||||
"coverage": "NODE_OPTIONS=--experimental-vm-modules npx jest --coverage",
|
||||
"generateKey": "node src/cli.js --generate-key",
|
||||
"version": "git changelog -n -t $npm_package_version && git add CHANGELOG.md",
|
||||
"ci": "npx start-server-and-test test:server http://localhost:5000 coverage",
|
||||
"i18n:parser": "npx i18next-parser 'src/**/*.js'"
|
||||
},
|
||||
"keywords": [
|
||||
"server",
|
||||
"json",
|
||||
"store",
|
||||
"backend",
|
||||
"prototyping"
|
||||
],
|
||||
"author": "Jérémie Pardou",
|
||||
"repository": "https://github.com/jrmi/ricochet.js",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.107.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.107.0",
|
||||
"body-parser": "^1.19.0",
|
||||
"cookie-session": "^2.0.0",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^10.0.0",
|
||||
"easy-no-password": "^1.2.2",
|
||||
"express": "^4.18.1",
|
||||
"express-request-language": "^1.1.15",
|
||||
"i18next": "^19.8.4",
|
||||
"i18next-fs-backend": "^1.0.7",
|
||||
"i18next-http-middleware": "^3.1.0",
|
||||
"mime-types": "^2.1.27",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"multer-s3": "^3.0.1",
|
||||
"nanoid": "^4.0.0",
|
||||
"node-cache": "^5.1.2",
|
||||
"node-schedule": "^2.0.0",
|
||||
"nodemailer": "^6.7.5",
|
||||
"pino": "^6.7.0",
|
||||
"pino-http": "^5.3.0",
|
||||
"pivotql-compiler-javascript": "^0.2.1",
|
||||
"pivotql-compiler-mongodb": "^0.4.2",
|
||||
"pivotql-parser-expression": "^0.4.2",
|
||||
"vm2": "^3.9.11",
|
||||
"yargs": "^17.5.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@seald-io/nedb": "~2.2.0",
|
||||
"mongodb": "^4.0.0",
|
||||
"webpack-sources": "~2.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@seald-io/nedb": "^2.2.0",
|
||||
"eslint": "^8.17.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"eslint-plugin-prettier": "^4.0.0",
|
||||
"jest": "^26.6.3",
|
||||
"mongodb": "^4.7.0",
|
||||
"nodemon": "^2.0.6",
|
||||
"pino-pretty": "^4.3.0",
|
||||
"pino-tiny": "^1.0.0",
|
||||
"prettier": "^2.2.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"supertest": "^4.0.2",
|
||||
"tempy": "^0.7.1"
|
||||
},
|
||||
"jest": {
|
||||
"transform": {},
|
||||
"testEnvironment": "node"
|
||||
}
|
||||
}
|
62
public/404.html
Normal file
62
public/404.html
Normal file
|
@ -0,0 +1,62 @@
|
|||
<!doctype html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Page Not Found</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<style>
|
||||
* {
|
||||
line-height: 1.2;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
html {
|
||||
color: #888;
|
||||
display: table;
|
||||
font-family: sans-serif;
|
||||
height: 100%;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
display: table-cell;
|
||||
vertical-align: middle;
|
||||
margin: 2em auto;
|
||||
}
|
||||
|
||||
h1 {
|
||||
color: #555;
|
||||
font-size: 2em;
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
p {
|
||||
margin: 0 auto;
|
||||
width: 280px;
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 280px) {
|
||||
|
||||
body,
|
||||
p {
|
||||
width: 95%;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 1.5em;
|
||||
margin: 0 0 0.3em;
|
||||
}
|
||||
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h1>Page Not Found</h1>
|
||||
<p>Sorry, but the page you were trying to view does not exist.</p>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
<!-- IE needs 512+ bytes: https://docs.microsoft.com/archive/blogs/ieinternals/friendly-http-error-pages -->
|
45
public/css/main.css
Normal file
45
public/css/main.css
Normal file
|
@ -0,0 +1,45 @@
|
|||
:root {
|
||||
--bg-color: #ffffff;
|
||||
--bg-secondary-color: #f3f3f6;
|
||||
--color-primary: #14854f;
|
||||
--color-lightGrey: #d2d6dd;
|
||||
--color-grey: #747681;
|
||||
--color-darkGrey: #3f4144;
|
||||
--color-error: #d43939;
|
||||
--color-success: #28bd14;
|
||||
--grid-maxWidth: 120rem;
|
||||
--grid-gutter: 2rem;
|
||||
--font-size: 1.6rem;
|
||||
--font-color: #333333;
|
||||
--font-family-sans: sans-serif;
|
||||
--font-family-mono: monaco, 'Consolas', 'Lucida Console', monospace;
|
||||
}
|
||||
|
||||
body.dark {
|
||||
--bg-color: #04293a;
|
||||
--bg-secondary-color: #131316;
|
||||
--font-color: #f5f5f5;
|
||||
--color-grey: rgb(37, 102, 90);
|
||||
--color-darkGrey: #777;
|
||||
}
|
||||
|
||||
.card .content {
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.card .text-error {
|
||||
padding: 1em;
|
||||
}
|
||||
|
||||
.card form {
|
||||
padding-bottom: 1em;
|
||||
}
|
||||
|
||||
.card .field {
|
||||
margin-bottom: 0.5em;
|
||||
}
|
||||
|
||||
.card .help-text {
|
||||
font-size: 1em;
|
||||
color: var(--color-darkGrey);
|
||||
}
|
BIN
public/favicon.ico
Normal file
BIN
public/favicon.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 766 B |
BIN
public/icon.png
Normal file
BIN
public/icon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.9 KiB |
0
public/img/.gitignore
vendored
Normal file
0
public/img/.gitignore
vendored
Normal file
29
public/index.html
Normal file
29
public/index.html
Normal file
|
@ -0,0 +1,29 @@
|
|||
<!doctype html>
|
||||
<html class="no-js" lang="">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Ricochet.js admin</title>
|
||||
<meta name="description" content="">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
|
||||
<meta property="og:title" content="">
|
||||
<meta property="og:type" content="">
|
||||
<meta property="og:url" content="">
|
||||
<meta property="og:image" content="">
|
||||
|
||||
<link rel="manifest" href="site.webmanifest">
|
||||
<link rel="apple-touch-icon" href="icon.png">
|
||||
|
||||
<link rel="stylesheet" href="https://unpkg.com/chota@latest">
|
||||
<link rel="stylesheet" href="css/main.css">
|
||||
|
||||
<meta name="theme-color" content="#fafafa">
|
||||
</head>
|
||||
|
||||
<body class="dark">
|
||||
<div id="root" />
|
||||
<script type="module" src="js/main.js"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
184
public/js/main.js
Normal file
184
public/js/main.js
Normal file
|
@ -0,0 +1,184 @@
|
|||
import {
|
||||
html,
|
||||
render,
|
||||
} from 'https://unpkg.com/htm@latest/preact/index.mjs?module';
|
||||
import {
|
||||
useState,
|
||||
useEffect,
|
||||
} from 'https://unpkg.com/preact@latest/hooks/dist/hooks.module.js?module';
|
||||
|
||||
function SiteForm({ create = false }) {
|
||||
const [newSite, setNewSite] = useState({});
|
||||
const [error, setError] = useState(null);
|
||||
const [siteKey, setSiteKey] = useState(null);
|
||||
const [siteUpdated, setSiteUpdated] = useState(false);
|
||||
|
||||
const onChange = (att) => (e) => {
|
||||
setNewSite((prev) => ({ ...prev, [att]: e.target.value }));
|
||||
};
|
||||
|
||||
const onClick = async (e) => {
|
||||
e.preventDefault();
|
||||
setSiteUpdated(false);
|
||||
setError(null);
|
||||
|
||||
const result = await fetch(
|
||||
create ? '/_register/' : `/_register/${newSite.siteId}`,
|
||||
{
|
||||
method: create ? 'POST' : 'PATCH',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(newSite),
|
||||
}
|
||||
);
|
||||
|
||||
if (result.status === 400) {
|
||||
const { message } = await result.json();
|
||||
setError(message);
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.status === 403) {
|
||||
const { message } = await result.json();
|
||||
setError(message);
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.status === 404) {
|
||||
const { message } = await result.json();
|
||||
setError(message);
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.status >= 300) {
|
||||
setError('Unknown error, try again later...');
|
||||
return;
|
||||
}
|
||||
|
||||
setNewSite({});
|
||||
|
||||
if (create) {
|
||||
const { key } = await result.json();
|
||||
setSiteKey(key);
|
||||
} else {
|
||||
setSiteUpdated(true);
|
||||
}
|
||||
};
|
||||
|
||||
return html`<div class="card">
|
||||
<header>
|
||||
<h2>${create ? 'Create new site' : 'Update site'}</h2>
|
||||
</header>
|
||||
<div class="content">
|
||||
<form>
|
||||
<div class="field">
|
||||
<label>
|
||||
Site Id:
|
||||
<input value=${newSite.siteId || ''} onChange=${onChange('siteId')}
|
||||
/></label>
|
||||
<p class="help-text">Only letters and '_' are accepted.</p>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>
|
||||
Name:
|
||||
<input value=${newSite.name || ''} onChange=${onChange('name')}
|
||||
/></label>
|
||||
<p class="help-text">This name will appears in sent email.</p>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>
|
||||
Email from:
|
||||
<input
|
||||
value=${newSite.emailFrom || ''}
|
||||
onChange=${onChange('emailFrom')}
|
||||
/>
|
||||
</label>
|
||||
<p class="help-text">
|
||||
All sent email for this site will have this origin.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
${create &&
|
||||
html`<div class="field">
|
||||
<label>
|
||||
Owner:
|
||||
<input value=${newSite.owner || ''} onChange=${onChange('owner')} />
|
||||
</label>
|
||||
<p class="help-text">
|
||||
This is the site owner email. Confirmation links are sent to this
|
||||
address.
|
||||
</p>
|
||||
</div>`}
|
||||
</form>
|
||||
|
||||
${error && html`<p class="text-error">${error}</p>`}
|
||||
${siteKey &&
|
||||
html`<p class="text-success">
|
||||
Success! Please, save the site encryption key:
|
||||
<input value=${siteKey} onChange=${(e) => e.preventDefault()} />
|
||||
this is the last opportunity to read it.
|
||||
</p>
|
||||
<p class="text-success">
|
||||
Now you must confirm the site creation before using it, you will
|
||||
receive an email at the 'owner' address with a confirmation link.
|
||||
</p>`}
|
||||
${siteUpdated &&
|
||||
html` <p class="text-success">
|
||||
Success! Now you must confirm the site update by visiting the
|
||||
confirmation link we have just sent to the owner email.
|
||||
</p>`}
|
||||
</div>
|
||||
<footer>
|
||||
<button
|
||||
class="button primary"
|
||||
onClick=${onClick}
|
||||
disabled=${!newSite.siteId}
|
||||
>
|
||||
${create ? 'Create site' : 'Update site'}
|
||||
</button>
|
||||
</footer>
|
||||
</div>`;
|
||||
}
|
||||
|
||||
function App() {
|
||||
const [settings, setSettings] = useState({});
|
||||
|
||||
useEffect(() => {
|
||||
let mounted = true;
|
||||
const updateRegistration = async () => {
|
||||
const result = await (await fetch('/site/settings')).json();
|
||||
console.log(result);
|
||||
if (mounted) setSettings(result);
|
||||
};
|
||||
updateRegistration();
|
||||
return () => (mounted = false);
|
||||
}, []);
|
||||
|
||||
return html`<div class="container">
|
||||
<div class="row">
|
||||
<div class="col-4" />
|
||||
<div class="col"><h1>Ricochet.js admin</h1></div>
|
||||
</div>
|
||||
${settings.registrationEnabled &&
|
||||
html`<div class="row">
|
||||
<div class="col-2" />
|
||||
<div class="col-4">
|
||||
<${SiteForm} create />
|
||||
</div>
|
||||
<div class="col-4">
|
||||
<${SiteForm} />
|
||||
</div>
|
||||
</div>`}
|
||||
${settings.registrationEnabled === false &&
|
||||
html`<div class="row">
|
||||
<div class="col-4" />
|
||||
<div class="col">
|
||||
<h2 class="text-error">Site registration is disabled</h2>
|
||||
</div>
|
||||
</div>`}
|
||||
</div>`;
|
||||
}
|
||||
|
||||
render(html`<${App} />`, document.body);
|
5
public/robots.txt
Normal file
5
public/robots.txt
Normal file
|
@ -0,0 +1,5 @@
|
|||
# www.robotstxt.org/
|
||||
|
||||
# Allow crawling of all content
|
||||
User-agent: *
|
||||
Disallow:
|
14
public/site.webmanifest
Normal file
14
public/site.webmanifest
Normal file
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"short_name": "Ricochet.js",
|
||||
"name": "Ricochet.js admin",
|
||||
"icons": [
|
||||
{
|
||||
"src": "icon.png",
|
||||
"type": "image/png",
|
||||
"sizes": "192x192"
|
||||
}
|
||||
],
|
||||
"start_url": "/",
|
||||
"background_color": "#fafafa",
|
||||
"theme_color": "#fafafa"
|
||||
}
|
BIN
public/tile-wide.png
Normal file
BIN
public/tile-wide.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.8 KiB |
BIN
public/tile.png
Normal file
BIN
public/tile.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.4 KiB |
30
src/EncryptPlugin.js
Normal file
30
src/EncryptPlugin.js
Normal file
|
@ -0,0 +1,30 @@
|
|||
import webpackSources from 'webpack-sources';
|
||||
import { encrypt } from './crypt.js';
|
||||
|
||||
const { RawSource } = webpackSources;
|
||||
|
||||
export const RICOCHET_FILE = process.env.RICOCHET_FILE || 'ricochet.json';
|
||||
|
||||
class EncryptPlugin {
|
||||
constructor({ algorithm = 'aes-256-cbc', key }) {
|
||||
this.algorithm = algorithm;
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
apply(compiler) {
|
||||
compiler.hooks.compilation.tap('EncryptPlugin', (compilation) => {
|
||||
compilation.hooks.afterProcessAssets.tap('EncryptPlugin', () => {
|
||||
console.log(`Encrypt ${RICOCHET_FILE} content.`);
|
||||
compilation.updateAsset(RICOCHET_FILE, (rawSource) => {
|
||||
return new RawSource(
|
||||
JSON.stringify(
|
||||
encrypt(rawSource.buffer(), this.key, this.algorithm)
|
||||
)
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default EncryptPlugin;
|
67
src/__test__/auth.test.js
Normal file
67
src/__test__/auth.test.js
Normal file
|
@ -0,0 +1,67 @@
|
|||
import request from 'supertest';
|
||||
import express from 'express';
|
||||
import { jest } from '@jest/globals';
|
||||
|
||||
import auth from '../authentication';
|
||||
|
||||
describe('Authentication test', () => {
|
||||
let query;
|
||||
let onSendToken;
|
||||
let onLogin;
|
||||
let onLogout;
|
||||
|
||||
beforeEach(() => {
|
||||
onSendToken = jest.fn(() => Promise.resolve());
|
||||
onLogin = jest.fn();
|
||||
onLogout = jest.fn();
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
app.use(
|
||||
auth({
|
||||
secret: 'My test secret key',
|
||||
onSendToken,
|
||||
onLogin,
|
||||
onLogout,
|
||||
})
|
||||
);
|
||||
query = request(app);
|
||||
});
|
||||
|
||||
it('should get and verify token', async () => {
|
||||
await query
|
||||
.post('/auth/')
|
||||
.set('X-Auth-Host', 'http://localhost:5000/')
|
||||
.send({ userEmail: 'test@yopmail' })
|
||||
.expect(200);
|
||||
|
||||
const userId = onSendToken.mock.calls[0][0].userId;
|
||||
const token = onSendToken.mock.calls[0][0].token;
|
||||
|
||||
await query.get(`/auth/verify/${userId}/${token}`).expect(200);
|
||||
|
||||
expect(onLogin).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should failed verify token', async () => {
|
||||
await query.get(`/auth/verify/fakeuserid/badtoken`).expect(403);
|
||||
|
||||
expect(onLogin).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should login and logout', async () => {
|
||||
await query
|
||||
.post('/auth/')
|
||||
.set('X-Auth-Host', 'http://localhost:5000/')
|
||||
.send({ userEmail: 'test@yopmail' })
|
||||
.expect(200);
|
||||
|
||||
const userId = onSendToken.mock.calls[0][0].userId;
|
||||
const token = onSendToken.mock.calls[0][0].token;
|
||||
|
||||
await query.get(`/auth/verify/${userId}/${token}`).expect(200);
|
||||
|
||||
await query.get(`/auth/logout/`).expect(200);
|
||||
|
||||
expect(onLogout).toHaveBeenCalled();
|
||||
});
|
||||
});
|
59
src/__test__/execute.test.js
Normal file
59
src/__test__/execute.test.js
Normal file
|
@ -0,0 +1,59 @@
|
|||
import request from 'supertest';
|
||||
import express from 'express';
|
||||
import { jest } from '@jest/globals';
|
||||
|
||||
import execute from '../execute';
|
||||
|
||||
let delta = 0;
|
||||
|
||||
// Fake date
|
||||
jest.spyOn(global.Date, 'now').mockImplementation(() => {
|
||||
delta += 1;
|
||||
const second = delta < 10 ? '0' + delta : '' + delta;
|
||||
return new Date(`2020-03-14T11:01:${second}.135Z`).valueOf();
|
||||
});
|
||||
|
||||
describe('Execute Test', () => {
|
||||
let app;
|
||||
let query;
|
||||
let execFunction;
|
||||
|
||||
beforeAll(() => {
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
execFunction = jest.fn(({ body, method, query, id, response }) => {
|
||||
const result = { hello: true, method, query, body, console, id };
|
||||
try {
|
||||
result.response = response;
|
||||
} catch {
|
||||
console.log('-');
|
||||
}
|
||||
return result;
|
||||
});
|
||||
const functions = { mytestfunction: execFunction };
|
||||
app.use(execute({ functions }));
|
||||
query = request(app);
|
||||
});
|
||||
|
||||
it('should execute remote function', async () => {
|
||||
await query.get(`/execute/missingfunction`).expect(404);
|
||||
|
||||
const result = await query.get(`/execute/mytestfunction/`).expect(200);
|
||||
|
||||
expect(result.body).toEqual(expect.objectContaining({ hello: true }));
|
||||
expect(result.body.method).toBe('GET');
|
||||
|
||||
const result2 = await query
|
||||
.post(`/execute/mytestfunction`)
|
||||
.set('X-SPC-Host', 'http://localhost:5000/')
|
||||
.send({ test: 42 })
|
||||
.expect(200);
|
||||
expect(result2.body.method).toBe('POST');
|
||||
expect(result2.body.body).toEqual(expect.objectContaining({ test: 42 }));
|
||||
});
|
||||
|
||||
it('should run remote function with id', async () => {
|
||||
const result = await query.get(`/execute/mytestfunction/42`).expect(200);
|
||||
expect(result.body).toEqual(expect.objectContaining({ id: '42' }));
|
||||
});
|
||||
});
|
249
src/__test__/fileStore.test.js
Normal file
249
src/__test__/fileStore.test.js
Normal file
|
@ -0,0 +1,249 @@
|
|||
import request from 'supertest';
|
||||
import express from 'express';
|
||||
import { jest } from '@jest/globals';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import tempy from 'tempy';
|
||||
import aws from '@aws-sdk/client-s3';
|
||||
|
||||
import { getDirname } from '../utils.js';
|
||||
import fileStore from '../fileStore';
|
||||
import {
|
||||
MemoryFileBackend,
|
||||
DiskFileBackend,
|
||||
S3FileBackend,
|
||||
} from '../fileStore/backends';
|
||||
|
||||
import { S3_ACCESS_KEY, S3_SECRET_KEY, S3_ENDPOINT } from '../settings';
|
||||
|
||||
const { S3, DeleteObjectsCommand, ListObjectsCommand } = aws;
|
||||
|
||||
const __dirname = getDirname(import.meta.url);
|
||||
|
||||
jest.mock('nanoid', () => {
|
||||
let count = 0;
|
||||
return {
|
||||
customAlphabet: () =>
|
||||
jest.fn(() => {
|
||||
return 'nanoid_' + count++;
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
const tempDestination = tempy.directory({ prefix: 'test__' });
|
||||
|
||||
const fileStores = [
|
||||
['memory', MemoryFileBackend(), { prefix: 'pref' }],
|
||||
[
|
||||
'disk',
|
||||
DiskFileBackend({
|
||||
destination: tempDestination,
|
||||
prefix: 'pref',
|
||||
}),
|
||||
{ prefix: 'pref' },
|
||||
],
|
||||
];
|
||||
|
||||
const S3_BUCKET_TEST = process.env.S3_BUCKET_TEST;
|
||||
|
||||
if (S3_BUCKET_TEST) {
|
||||
fileStores.push([
|
||||
's3',
|
||||
S3FileBackend({
|
||||
bucket: process.env.S3_BUCKET_TEST,
|
||||
secretKey: S3_SECRET_KEY,
|
||||
accessKey: S3_ACCESS_KEY,
|
||||
endpoint: S3_ENDPOINT,
|
||||
}),
|
||||
{
|
||||
prefix: 'pref',
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
describe.each(fileStores)(
|
||||
'Backend <%s> file store',
|
||||
(backendType, backend, options) => {
|
||||
let app;
|
||||
let query;
|
||||
|
||||
beforeAll(() => {
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
app.use(
|
||||
'/:siteId/pref/:boxId/:id/file',
|
||||
(req, _, next) => {
|
||||
req.siteId = req.params.siteId;
|
||||
req.boxId = req.params.boxId;
|
||||
req.resourceId = req.params.id;
|
||||
next();
|
||||
},
|
||||
fileStore(backend, options)
|
||||
);
|
||||
query = request(app);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Clean files
|
||||
if (backendType === 'disk') {
|
||||
try {
|
||||
fs.rmdirSync(tempDestination, { recursive: true });
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
return;
|
||||
}
|
||||
// Clean bucket
|
||||
if (backendType === 's3') {
|
||||
const { bucket, secretKey, accessKey, endpoint } = {
|
||||
bucket: process.env.S3_BUCKET_TEST,
|
||||
secretKey: S3_SECRET_KEY,
|
||||
accessKey: S3_ACCESS_KEY,
|
||||
endpoint: S3_ENDPOINT,
|
||||
};
|
||||
|
||||
const s3 = new S3({
|
||||
secretAccessKey: secretKey,
|
||||
accessKeyId: accessKey,
|
||||
endpoint: endpoint,
|
||||
});
|
||||
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
};
|
||||
|
||||
const data = await s3.send(new ListObjectsCommand(params));
|
||||
if (data.Contents) {
|
||||
const deleteParams = {
|
||||
Bucket: bucket,
|
||||
Delete: { Objects: data.Contents.map(({ Key }) => ({ Key })) },
|
||||
};
|
||||
await s3.send(new DeleteObjectsCommand(deleteParams));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (backendType === 'memory') {
|
||||
return;
|
||||
}
|
||||
});
|
||||
|
||||
it('should store file', async () => {
|
||||
const boxId = 'box010';
|
||||
const res = await query
|
||||
.post(`/mysiteid/pref/${boxId}/1234/file/`)
|
||||
.attach('file', path.resolve(__dirname, 'testFile.txt'))
|
||||
.expect(200);
|
||||
|
||||
expect(res.text).toEqual(
|
||||
expect.stringContaining(`mysiteid/pref/${boxId}/1234/file/`)
|
||||
);
|
||||
});
|
||||
|
||||
it('should retreive image file', async () => {
|
||||
const boxId = 'box020';
|
||||
const res = await query
|
||||
.post(`/mysiteid/pref/${boxId}/1234/file/`)
|
||||
.attach('file', path.resolve(__dirname, 'test.png'))
|
||||
.expect(200);
|
||||
|
||||
const fileUrl = res.text;
|
||||
|
||||
const fileRes = await query
|
||||
.get(`/${fileUrl}`)
|
||||
.buffer(false)
|
||||
.redirects(1)
|
||||
.expect(200);
|
||||
|
||||
expect(fileRes.type).toBe('image/png');
|
||||
expect(fileRes.body.length).toBe(6174);
|
||||
});
|
||||
|
||||
it('should retreive text file', async () => {
|
||||
const boxId = 'box025';
|
||||
const res = await query
|
||||
.post(`/mysiteid/pref/${boxId}/1234/file/`)
|
||||
.set('Content-Type', 'text/plain')
|
||||
.attach('file', path.resolve(__dirname, 'testFile.txt'))
|
||||
.expect(200);
|
||||
|
||||
const fileUrl = res.text;
|
||||
|
||||
const fileRes = await query
|
||||
.get(`/${fileUrl}`)
|
||||
.buffer(false)
|
||||
.redirects(1)
|
||||
.expect(200);
|
||||
|
||||
expect(fileRes.type).toBe('text/plain');
|
||||
});
|
||||
|
||||
it('should list files', async () => {
|
||||
const boxId = 'box030';
|
||||
|
||||
const fileListEmpty = await query
|
||||
.get(`/mysiteid/pref/${boxId}/1235/file/`)
|
||||
.expect(200);
|
||||
|
||||
expect(Array.isArray(fileListEmpty.body)).toBe(true);
|
||||
expect(fileListEmpty.body.length).toBe(0);
|
||||
|
||||
const res = await query
|
||||
.post(`/mysiteid/pref/${boxId}/1235/file/`)
|
||||
.attach('file', path.resolve(__dirname, 'testFile.txt'))
|
||||
.expect(200);
|
||||
|
||||
const res2 = await query
|
||||
.post(`/mysiteid/pref/${boxId}/1235/file/`)
|
||||
.attach('file', path.resolve(__dirname, 'test.png'))
|
||||
.expect(200);
|
||||
|
||||
const fileList = await query
|
||||
.get(`/mysiteid/pref/${boxId}/1235/file/`)
|
||||
.expect(200);
|
||||
|
||||
expect(Array.isArray(fileList.body)).toBe(true);
|
||||
expect(fileList.body.length).toBe(2);
|
||||
expect(fileList.body[0]).toEqual(
|
||||
expect.stringContaining(`mysiteid/pref/${boxId}/1235/file/`)
|
||||
);
|
||||
});
|
||||
|
||||
it('should delete file', async () => {
|
||||
const boxId = 'box040';
|
||||
const res = await query
|
||||
.post(`/mysiteid/pref/${boxId}/1234/file/`)
|
||||
.attach('file', path.resolve(__dirname, 'test.png'))
|
||||
.expect(200);
|
||||
|
||||
const fileUrl = res.text;
|
||||
|
||||
await query.delete(`/${fileUrl}`).buffer(false).expect(200);
|
||||
|
||||
const fileList = await query
|
||||
.get(`/mysiteid/pref/${boxId}/1234/file/`)
|
||||
.expect(200);
|
||||
expect(fileList.body.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 404', async () => {
|
||||
const boxId = 'box050';
|
||||
|
||||
await query.get(`/mysiteid/pref/${boxId}/1234/file/nofile`).expect(404);
|
||||
await query
|
||||
.delete(`/mysiteid/pref/${boxId}/1234/file/nofile`)
|
||||
.expect(404);
|
||||
|
||||
// To create box
|
||||
await query
|
||||
.post(`/mysiteid/pref/${boxId}/1234/file/`)
|
||||
.attach('file', path.resolve(__dirname, 'test.png'))
|
||||
.expect(200);
|
||||
|
||||
await query.get(`/mysiteid/pref/${boxId}/1234/file/nofile2`).expect(404);
|
||||
await query
|
||||
.delete(`/mysiteid/pref/${boxId}/1234/file/nofile2`)
|
||||
.expect(404);
|
||||
});
|
||||
}
|
||||
);
|
64
src/__test__/remote.test.js
Normal file
64
src/__test__/remote.test.js
Normal file
|
@ -0,0 +1,64 @@
|
|||
import request from 'supertest';
|
||||
import express from 'express';
|
||||
|
||||
import remote from '../remote';
|
||||
import origin from '../origin';
|
||||
|
||||
describe('Remote Test', () => {
|
||||
let app;
|
||||
let query;
|
||||
|
||||
beforeEach(() => {
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
app.use(
|
||||
(req, _, next) => {
|
||||
req.siteId = 'mysiteid';
|
||||
next();
|
||||
},
|
||||
origin(),
|
||||
remote({
|
||||
setupPath: 'scripts/mysetup.js',
|
||||
context: { content: {} },
|
||||
})
|
||||
);
|
||||
query = request(app);
|
||||
});
|
||||
|
||||
it('should allow calls with Origin, X-Ricochet-Origin, Referer header', async () => {
|
||||
await query.get(`/ping`).expect(400);
|
||||
await query
|
||||
.get(`/ping`)
|
||||
.set('X-Ricochet-Origin', 'http://localhost:5000')
|
||||
.expect(200);
|
||||
await query.get(`/ping`).set('Origin', 'http://localhost:5000').expect(200);
|
||||
await query
|
||||
.get(`/ping`)
|
||||
.set('Referer', 'http://localhost:5000/test/toto')
|
||||
.expect(200);
|
||||
});
|
||||
|
||||
it('should fails to parse setup', async () => {
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
app.use(
|
||||
(req, _, next) => {
|
||||
req.ricochetOrigin = 'http://localhost:5000';
|
||||
next();
|
||||
},
|
||||
remote({
|
||||
setupPath: 'scripts/bad.js',
|
||||
})
|
||||
);
|
||||
query = request(app);
|
||||
|
||||
const result = await query
|
||||
.get(`/`)
|
||||
.set('X-Ricochet-Origin', 'http://localhost:5000')
|
||||
.expect(500);
|
||||
|
||||
expect(result.body.message).toEqual(
|
||||
expect.stringContaining('Unexpected identifier')
|
||||
);
|
||||
});
|
||||
});
|
84
src/__test__/remoteCode.test.js
Normal file
84
src/__test__/remoteCode.test.js
Normal file
|
@ -0,0 +1,84 @@
|
|||
import { jest } from '@jest/globals';
|
||||
|
||||
import RemoteCode from '../remoteCode';
|
||||
|
||||
const REMOTE = 'http://localhost:5000/';
|
||||
|
||||
const SITEID = 'mysiteid';
|
||||
|
||||
describe('Remote Test', () => {
|
||||
let remoteCode;
|
||||
let preProcess;
|
||||
|
||||
beforeEach(() => {
|
||||
preProcess = jest.fn((script) => {
|
||||
return script;
|
||||
});
|
||||
remoteCode = new RemoteCode({
|
||||
disableCache: false,
|
||||
preProcess,
|
||||
});
|
||||
});
|
||||
|
||||
it('should call remote function', async () => {
|
||||
const content = { hello: true };
|
||||
const result = await remoteCode.exec(SITEID, REMOTE, 'scripts/mysetup.js', {
|
||||
content,
|
||||
});
|
||||
expect(result).toEqual('foo');
|
||||
|
||||
expect(content).toEqual(
|
||||
expect.objectContaining({ hello: true, response: 42 })
|
||||
);
|
||||
|
||||
// Hit cache
|
||||
const result2 = await remoteCode.exec(
|
||||
SITEID,
|
||||
REMOTE,
|
||||
'scripts/mysetup.js',
|
||||
{
|
||||
content,
|
||||
}
|
||||
);
|
||||
expect(result2).toEqual('foo');
|
||||
|
||||
// Clear cache
|
||||
remoteCode.clearCache(REMOTE);
|
||||
|
||||
const result3 = await remoteCode.exec(
|
||||
SITEID,
|
||||
REMOTE,
|
||||
'scripts/mysetup.js',
|
||||
{
|
||||
content,
|
||||
}
|
||||
);
|
||||
expect(result3).toEqual('foo');
|
||||
});
|
||||
|
||||
it('should filter requirements', async () => {
|
||||
const http = await remoteCode.exec(
|
||||
SITEID,
|
||||
REMOTE,
|
||||
'scripts/mysetupWithRequire.js'
|
||||
);
|
||||
const httpReal = await import('http');
|
||||
expect(http['STATUS_CODES']).toEqual(httpReal['STATUS_CODES']);
|
||||
|
||||
try {
|
||||
await remoteCode.exec(SITEID, REMOTE, 'scripts/mysetupWithBadRequire.js');
|
||||
} catch (e) {
|
||||
expect(e.code).toMatch('ENOTFOUND');
|
||||
}
|
||||
});
|
||||
|
||||
it("shouldn't call missing remote function", async () => {
|
||||
try {
|
||||
await remoteCode.exec(SITEID, REMOTE, 'notexisting');
|
||||
} catch (e) {
|
||||
expect(e).toMatch(
|
||||
'Script notexisting not found on remote http://localhost:5000'
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
222
src/__test__/site.test.js
Normal file
222
src/__test__/site.test.js
Normal file
|
@ -0,0 +1,222 @@
|
|||
import request from 'supertest';
|
||||
import express from 'express';
|
||||
import { jest } from '@jest/globals';
|
||||
|
||||
import site from '../site';
|
||||
import { MemoryBackend } from '../store/backends';
|
||||
|
||||
jest.mock('nanoid', () => {
|
||||
let count = 0;
|
||||
return {
|
||||
customAlphabet: () =>
|
||||
jest.fn(() => {
|
||||
return 'nanoid_' + count++;
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
describe('Site endpoint tests', () => {
|
||||
let query;
|
||||
let storeBackend;
|
||||
let onSiteCreation;
|
||||
let onSiteUpdate;
|
||||
let lastConfirm;
|
||||
|
||||
beforeEach(() => {
|
||||
onSiteCreation = jest.fn(({ confirmPath }) => {
|
||||
lastConfirm = confirmPath;
|
||||
});
|
||||
onSiteUpdate = jest.fn(({ confirmPath }) => {
|
||||
lastConfirm = confirmPath;
|
||||
});
|
||||
storeBackend = MemoryBackend();
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
app.use(
|
||||
site({
|
||||
configFile: './site.json',
|
||||
storeBackend,
|
||||
onSiteCreation,
|
||||
onSiteUpdate,
|
||||
serverUrl: '',
|
||||
})
|
||||
);
|
||||
query = request(app);
|
||||
});
|
||||
|
||||
it('should create a site', async () => {
|
||||
const result = await query
|
||||
.post('/_register/')
|
||||
.send({
|
||||
siteId: 'test',
|
||||
owner: 'test@yopmail.com',
|
||||
name: 'Site test',
|
||||
emailFrom: 'from@ricochet.net',
|
||||
extraData: 'data',
|
||||
})
|
||||
.expect(200);
|
||||
|
||||
expect(result.body).toEqual(
|
||||
expect.objectContaining({
|
||||
name: 'Site test',
|
||||
owner: 'test@yopmail.com',
|
||||
emailFrom: 'from@ricochet.net',
|
||||
})
|
||||
);
|
||||
expect(typeof result.body.key).toBe('string');
|
||||
expect(result.body.key.length).toBe(44);
|
||||
expect(result.body.token).toBeUndefined();
|
||||
expect(result.body.extraData).toBe(undefined);
|
||||
|
||||
expect(onSiteCreation).toHaveBeenCalled();
|
||||
expect(onSiteUpdate).not.toHaveBeenCalled();
|
||||
|
||||
expect(lastConfirm).toEqual(
|
||||
expect.stringContaining('/_register/test/confirm/')
|
||||
);
|
||||
|
||||
const sites = await storeBackend.list('_site');
|
||||
expect(sites.length).toBe(0);
|
||||
|
||||
const pending = await storeBackend.list('_pending');
|
||||
expect(pending.length).toBe(1);
|
||||
|
||||
await query.get(lastConfirm).expect(200);
|
||||
|
||||
const sitesAfter = await storeBackend.list('_site');
|
||||
expect(sitesAfter.length).toBe(1);
|
||||
|
||||
expect(sitesAfter[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
name: 'Site test',
|
||||
owner: 'test@yopmail.com',
|
||||
emailFrom: 'from@ricochet.net',
|
||||
})
|
||||
);
|
||||
|
||||
const pendingAfter = await storeBackend.list('_pending');
|
||||
expect(pendingAfter.length).toBe(0);
|
||||
|
||||
// We can't confirm twice
|
||||
await query.get(lastConfirm).expect(403);
|
||||
});
|
||||
|
||||
it('should not create an existing site', async () => {
|
||||
await storeBackend.save('_site', 'mytestsite', {
|
||||
owner: 'test@yopmail',
|
||||
name: 'Site test',
|
||||
emailFrom: 'from@ricochet.net',
|
||||
key: 'mykey',
|
||||
});
|
||||
|
||||
await query
|
||||
.post('/_register')
|
||||
.send({
|
||||
siteId: 'mytestsite',
|
||||
owner: 'test@yopmail',
|
||||
name: 'Site test',
|
||||
emailFrom: 'from@ricochet.net',
|
||||
})
|
||||
.expect(403);
|
||||
});
|
||||
|
||||
it('should not create a site with bad characters', async () => {
|
||||
await query
|
||||
.post('/_register/')
|
||||
.send({
|
||||
siteId: 'toto4+',
|
||||
owner: 'test@yopmail',
|
||||
name: 'Site test',
|
||||
emailFrom: 'from@ricochet.net',
|
||||
})
|
||||
.expect(400);
|
||||
await query
|
||||
.post('/_register/')
|
||||
.send({
|
||||
siteId: 'toto4é',
|
||||
owner: 'test@yopmail',
|
||||
name: 'Site test',
|
||||
emailFrom: 'from@ricochet.net',
|
||||
})
|
||||
.expect(400);
|
||||
await query
|
||||
.post('/_register/')
|
||||
.send({
|
||||
siteId: '_toto',
|
||||
owner: 'test@yopmail',
|
||||
name: 'Site test',
|
||||
emailFrom: 'from@ricochet.net',
|
||||
})
|
||||
.expect(400);
|
||||
await query
|
||||
.post('/_register/')
|
||||
.send({
|
||||
siteId: 'toto-titi',
|
||||
owner: 'test@yopmail',
|
||||
name: 'Site test',
|
||||
emailFrom: 'from@ricochet.net',
|
||||
})
|
||||
.expect(400);
|
||||
});
|
||||
|
||||
it('should not update a missing site', async () => {
|
||||
await query
|
||||
.patch('/_register/mytestsite')
|
||||
.send({
|
||||
owner: 'test@yopmail',
|
||||
name: 'Site test',
|
||||
emailFrom: 'from@ricochet.net',
|
||||
})
|
||||
.expect(404);
|
||||
});
|
||||
|
||||
it('should update an existing site', async () => {
|
||||
await storeBackend.save('_site', 'mytestsite', {
|
||||
owner: 'test@yopmail',
|
||||
name: 'Site test',
|
||||
emailFrom: 'from@ricochet.net',
|
||||
key: 'mykey',
|
||||
});
|
||||
|
||||
const result = await query
|
||||
.patch('/_register/mytestsite')
|
||||
.send({
|
||||
owner: 'falseOwner@mail.com', // We shouldn't be able to modify that
|
||||
name: 'New name',
|
||||
emailFrom: 'from2@ricochet.net',
|
||||
token: 'falseToken',
|
||||
key: 'falseKey',
|
||||
})
|
||||
.expect(200);
|
||||
|
||||
expect(result.body.token).toBeUndefined();
|
||||
expect(result.body.key).toBeUndefined();
|
||||
|
||||
expect(lastConfirm).toEqual(
|
||||
expect.stringContaining('/_register/mytestsite/confirm/')
|
||||
);
|
||||
|
||||
const pending = await storeBackend.list('_pending');
|
||||
expect(pending.length).toBe(1);
|
||||
|
||||
await query.get(lastConfirm).expect(200);
|
||||
|
||||
const pendingAfter = await storeBackend.list('_pending');
|
||||
expect(pendingAfter.length).toBe(0);
|
||||
|
||||
const sites = await storeBackend.list('_site');
|
||||
expect(sites.length).toBe(1);
|
||||
|
||||
expect(sites[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
_id: 'mytestsite',
|
||||
name: 'New name',
|
||||
owner: 'test@yopmail',
|
||||
emailFrom: 'from2@ricochet.net',
|
||||
key: 'mykey',
|
||||
})
|
||||
);
|
||||
// We can't confirm twice
|
||||
await query.get(lastConfirm).expect(403);
|
||||
});
|
||||
});
|
542
src/__test__/store.test.js
Normal file
542
src/__test__/store.test.js
Normal file
|
@ -0,0 +1,542 @@
|
|||
import request from 'supertest';
|
||||
import express from 'express';
|
||||
import path from 'path';
|
||||
import { jest } from '@jest/globals';
|
||||
|
||||
import { getDirname } from '../utils.js';
|
||||
import store from '../store';
|
||||
import { MemoryBackend } from '../store/backends';
|
||||
import { MemoryFileBackend } from '../fileStore/backends';
|
||||
|
||||
/*jest.mock('nanoid', () => {
|
||||
let count = 0;
|
||||
return {
|
||||
customAlphabet: () =>
|
||||
jest.fn(() => {
|
||||
return 'nanoid_' + count++;
|
||||
}),
|
||||
};
|
||||
});*/
|
||||
|
||||
const __dirname = getDirname(import.meta.url);
|
||||
|
||||
let delta = 0;
|
||||
|
||||
// Fake date
|
||||
jest.spyOn(global.Date, 'now').mockImplementation(() => {
|
||||
delta += 1;
|
||||
const second = delta < 10 ? '0' + delta : '' + delta;
|
||||
return new Date(`2020-03-14T11:01:${second}.135Z`).valueOf();
|
||||
});
|
||||
|
||||
describe('Store Test', () => {
|
||||
let query;
|
||||
let backend;
|
||||
|
||||
beforeAll(() => {
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
app.use(express.urlencoded({ extended: true }));
|
||||
backend = MemoryBackend();
|
||||
app.use(
|
||||
'/:siteId',
|
||||
(req, _, next) => {
|
||||
req.siteId = req.params.siteId;
|
||||
next();
|
||||
},
|
||||
store({
|
||||
backend,
|
||||
})
|
||||
);
|
||||
query = request(app);
|
||||
});
|
||||
|
||||
it('should get empty box', async () => {
|
||||
const box = 'myboxid_test1';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'public',
|
||||
});
|
||||
|
||||
await query
|
||||
.get(`/fakeSiteId/store/${box}/`)
|
||||
.expect(200, [])
|
||||
.expect('Content-Type', /json/);
|
||||
});
|
||||
|
||||
it('should add resource', async () => {
|
||||
const box = 'myboxid_test2';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'public',
|
||||
});
|
||||
|
||||
const res = await query
|
||||
.post(`/fakeSiteId/store/${box}/`)
|
||||
.send({ test: true, value: 42 })
|
||||
.expect(200);
|
||||
|
||||
expect(res.body).toEqual(
|
||||
expect.objectContaining({ test: true, value: 42 })
|
||||
);
|
||||
expect(typeof res.body._id).toEqual('string');
|
||||
expect(res.body._createdOn).toBeGreaterThanOrEqual(1584183661135);
|
||||
|
||||
const res2 = await query
|
||||
.get(`/fakeSiteId/store/${box}/`)
|
||||
.expect(200)
|
||||
.expect('Content-Type', /json/);
|
||||
|
||||
expect(res.body).toEqual(res2.body[0]);
|
||||
|
||||
// Test object creation with id
|
||||
const resWithId = await query
|
||||
.post(`/fakeSiteId/store/${box}/myid`)
|
||||
.send({ foo: 'bar', bar: 'foo' })
|
||||
.expect(200);
|
||||
|
||||
expect(resWithId.body._id).toBe('myid');
|
||||
});
|
||||
|
||||
it('should get a resource', async () => {
|
||||
const box = 'myboxid_test3';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'public',
|
||||
});
|
||||
|
||||
const res = await query
|
||||
.post(`/fakeSiteId/store/${box}/`)
|
||||
.send({ test: true, value: 42 })
|
||||
.expect(200);
|
||||
|
||||
let resourceId = res.body._id;
|
||||
|
||||
const res2 = await query
|
||||
.get(`/fakeSiteId/store/${box}/${resourceId}`)
|
||||
.expect(200)
|
||||
.expect('Content-Type', /json/);
|
||||
|
||||
expect(res.body).toEqual(res2.body);
|
||||
});
|
||||
|
||||
it('should update a resource', async () => {
|
||||
const box = 'myboxid_test4';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'public',
|
||||
});
|
||||
|
||||
const res = await query
|
||||
.post(`/fakeSiteId/store/${box}/`)
|
||||
.send({ test: true, value: 40 })
|
||||
.expect(200);
|
||||
|
||||
let resourceId = res.body._id;
|
||||
|
||||
const res2 = await query
|
||||
.put(`/fakeSiteId/store/${box}/${resourceId}`)
|
||||
.send({ value: 42 })
|
||||
.expect(200);
|
||||
|
||||
const res3 = await query
|
||||
.get(`/fakeSiteId/store/${box}/${resourceId}`)
|
||||
.expect(200);
|
||||
|
||||
expect(res3.body.value).toEqual(42);
|
||||
|
||||
const replaceWithId = await query
|
||||
.post(`/fakeSiteId/store/${box}/${resourceId}`)
|
||||
.send({ value: 52 })
|
||||
.expect(200);
|
||||
|
||||
expect(replaceWithId.body).not.toEqual(
|
||||
expect.objectContaining({ test: true })
|
||||
);
|
||||
});
|
||||
|
||||
it('should delete a resource', async () => {
|
||||
const box = 'myboxid_test5';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'public',
|
||||
});
|
||||
|
||||
const res = await query
|
||||
.post(`/fakeSiteId/store/${box}/`)
|
||||
.send({ test: true, value: 40 })
|
||||
.expect(200);
|
||||
|
||||
let resourceId = res.body._id;
|
||||
|
||||
const res2 = await query
|
||||
.del(`/fakeSiteId/store/${box}/${resourceId}`)
|
||||
.expect(200)
|
||||
.expect('Content-Type', /json/);
|
||||
|
||||
const res3 = await query.get(`/fakeSiteId/store/${box}/`).expect(200, []);
|
||||
});
|
||||
|
||||
it('should return 404', async () => {
|
||||
const box = 'boxId_400';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'public',
|
||||
});
|
||||
|
||||
await query.get(`/fakeSiteId/store/${box}/noresource`).expect(404);
|
||||
|
||||
await query.delete(`/fakeSiteId/store/${box}/noresource`).expect(404);
|
||||
});
|
||||
|
||||
it('should return 403', async () => {
|
||||
let box = 'boxId_500';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'readOnly',
|
||||
});
|
||||
|
||||
await query.get(`/fakeSiteId/store/${box}/`).expect(200);
|
||||
|
||||
await query
|
||||
.post(`/fakeSiteId/store/${box}/`)
|
||||
.send({ test: true, value: 40 })
|
||||
.expect(403);
|
||||
|
||||
box = 'boxId_550';
|
||||
await backend.createOrUpdateBox(box);
|
||||
|
||||
await query.get(`/fakeSiteId/store/${box}/`).expect(403);
|
||||
});
|
||||
|
||||
it('should store and get a file', async () => {
|
||||
let box = 'boxId_600';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'public',
|
||||
});
|
||||
|
||||
await query
|
||||
.post(`/fakeSiteId/store/${box}/1234`)
|
||||
.send({ test: true, value: 42 })
|
||||
.expect(200);
|
||||
|
||||
const res = await query
|
||||
.post(`/fakeSiteId/store/${box}/1234/file/`)
|
||||
.attach('file', path.resolve(__dirname, 'testFile.txt'))
|
||||
.expect(200);
|
||||
|
||||
const fileUrl = res.text;
|
||||
|
||||
const fileRes = await query
|
||||
.get(`/${fileUrl}`)
|
||||
.buffer(false)
|
||||
.redirects(1)
|
||||
.expect(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Store Hook Tests', () => {
|
||||
let query;
|
||||
let backend;
|
||||
let hooks;
|
||||
|
||||
beforeAll(() => {
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
app.use(express.urlencoded({ extended: true }));
|
||||
backend = MemoryBackend();
|
||||
hooks = {};
|
||||
app.use(
|
||||
'/:siteId',
|
||||
(req, _, next) => {
|
||||
req.siteId = req.params.siteId;
|
||||
next();
|
||||
},
|
||||
store({
|
||||
backend,
|
||||
hooks,
|
||||
})
|
||||
);
|
||||
query = request(app);
|
||||
});
|
||||
|
||||
it('should call hooks for list', async () => {
|
||||
let box = 'boxId_1000';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'public',
|
||||
});
|
||||
|
||||
hooks.before = [
|
||||
jest.fn((context) => context),
|
||||
jest.fn((context) => context),
|
||||
];
|
||||
hooks.after = [
|
||||
jest.fn((context) => context),
|
||||
jest.fn((context) => context),
|
||||
];
|
||||
|
||||
await query.get(`/fakeSiteId/store/${box}/`).expect(200);
|
||||
|
||||
expect(hooks.before[0]).toHaveBeenCalled();
|
||||
expect(hooks.before[1]).toHaveBeenCalled();
|
||||
expect(hooks.after[0]).toHaveBeenCalled();
|
||||
expect(hooks.after[1]).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should call hooks for post & get & delete', async () => {
|
||||
let box = 'boxId_1100';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'public',
|
||||
});
|
||||
|
||||
hooks.before = [
|
||||
jest.fn((context) => context),
|
||||
jest.fn((context) => context),
|
||||
];
|
||||
hooks.after = [
|
||||
jest.fn((context) => context),
|
||||
jest.fn((context) => context),
|
||||
];
|
||||
|
||||
await query
|
||||
.post(`/fakeSiteId/store/${box}/1234`)
|
||||
.send({ test: true, value: 42 })
|
||||
.expect(200);
|
||||
|
||||
expect(hooks.before[0]).toHaveBeenCalledTimes(1);
|
||||
expect(hooks.before[1]).toHaveBeenCalledTimes(1);
|
||||
expect(hooks.after[0]).toHaveBeenCalledTimes(1);
|
||||
expect(hooks.after[1]).toHaveBeenCalledTimes(1);
|
||||
|
||||
jest.clearAllMocks();
|
||||
|
||||
await query.get(`/fakeSiteId/store/${box}/1234`).expect(200);
|
||||
|
||||
expect(hooks.before[0]).toHaveBeenCalled();
|
||||
expect(hooks.before[1]).toHaveBeenCalled();
|
||||
expect(hooks.after[0]).toHaveBeenCalled();
|
||||
expect(hooks.after[1]).toHaveBeenCalled();
|
||||
|
||||
jest.clearAllMocks();
|
||||
|
||||
await query.delete(`/fakeSiteId/store/${box}/1234`).expect(200);
|
||||
|
||||
expect(hooks.before[0]).toHaveBeenCalled();
|
||||
expect(hooks.before[1]).toHaveBeenCalled();
|
||||
expect(hooks.after[0]).toHaveBeenCalled();
|
||||
expect(hooks.after[1]).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('hooks should modify post', async () => {
|
||||
let box = 'boxId_1200';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'public',
|
||||
});
|
||||
|
||||
hooks.before = [
|
||||
jest.fn((context) => ({ ...context, body: { value: 256 } })),
|
||||
jest.fn((context) => ({
|
||||
...context,
|
||||
body: { ...context.body, foo: 'bar' },
|
||||
})),
|
||||
];
|
||||
hooks.after = [
|
||||
jest.fn((context) => context),
|
||||
jest.fn((context) => context),
|
||||
];
|
||||
|
||||
await query
|
||||
.post(`/fakeSiteId/store/${box}/1234`)
|
||||
.send({ test: true, value: 42 })
|
||||
.expect(200);
|
||||
|
||||
const result = await query.get(`/fakeSiteId/store/${box}/1234`).expect(200);
|
||||
|
||||
expect(result.body).toEqual(
|
||||
expect.objectContaining({ value: 256, foo: 'bar' })
|
||||
);
|
||||
});
|
||||
|
||||
it('hooks should modify get', async () => {
|
||||
let box = 'boxId_1300';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'public',
|
||||
});
|
||||
|
||||
hooks.before = [
|
||||
jest.fn((context) => context),
|
||||
jest.fn((context) => context),
|
||||
];
|
||||
hooks.after = [
|
||||
jest.fn((context) => ({ ...context, response: { value: 256 } })),
|
||||
jest.fn((context) => ({
|
||||
...context,
|
||||
response: { ...context.response, foo: 'bar' },
|
||||
})),
|
||||
];
|
||||
|
||||
await query
|
||||
.post(`/fakeSiteId/store/${box}/1234`)
|
||||
.send({ test: true, value: 42 })
|
||||
.expect(200);
|
||||
|
||||
const result = await query.get(`/fakeSiteId/store/${box}/1234`).expect(200);
|
||||
|
||||
expect(result.body).toEqual(
|
||||
expect.objectContaining({ value: 256, foo: 'bar' })
|
||||
);
|
||||
});
|
||||
|
||||
it('hooks should force access to private store', async () => {
|
||||
let box = 'boxId_1400';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'private',
|
||||
});
|
||||
|
||||
hooks.before = [
|
||||
jest.fn((context) => ({ ...context, allow: true })),
|
||||
jest.fn((context) => context),
|
||||
];
|
||||
|
||||
await query
|
||||
.post(`/fakeSiteId/store/${box}/1234`)
|
||||
.send({ test: true, value: 42 })
|
||||
.expect(200);
|
||||
|
||||
await query.get(`/fakeSiteId/store/${box}`).expect(200);
|
||||
await query.get(`/fakeSiteId/store/${box}/1234`).expect(200);
|
||||
await query.delete(`/fakeSiteId/store/${box}/1234`).expect(200);
|
||||
});
|
||||
|
||||
it('should store even if private box', async () => {
|
||||
let box = 'boxId_1500';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'private',
|
||||
});
|
||||
|
||||
await query
|
||||
.post(`/fakeSiteId/store/${box}/1234/file/`)
|
||||
.attach('file', path.resolve(__dirname, 'testFile.txt'))
|
||||
.expect(403);
|
||||
|
||||
hooks.beforeFile = [
|
||||
jest.fn((context) => ({ ...context, allow: true })),
|
||||
jest.fn((context) => context),
|
||||
];
|
||||
|
||||
await query
|
||||
.post(`/fakeSiteId/store/${box}/1234/file/`)
|
||||
.attach('file', path.resolve(__dirname, 'testFile.txt'))
|
||||
.expect(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Store File Test', () => {
|
||||
let query;
|
||||
let backend;
|
||||
let fileBackend;
|
||||
|
||||
beforeAll(() => {
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
app.use(express.urlencoded({ extended: true }));
|
||||
backend = MemoryBackend();
|
||||
fileBackend = MemoryFileBackend();
|
||||
app.use(
|
||||
'/:siteId',
|
||||
(req, _, next) => {
|
||||
req.siteId = req.params.siteId;
|
||||
next();
|
||||
},
|
||||
store({
|
||||
backend,
|
||||
fileBackend,
|
||||
})
|
||||
);
|
||||
query = request(app);
|
||||
});
|
||||
|
||||
it('should store even if resource missing', async () => {
|
||||
let box = 'boxId_600';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'public',
|
||||
});
|
||||
|
||||
const res = await query
|
||||
.post(`/fakeSiteId/store/${box}/1234/file/`)
|
||||
.attach('file', path.resolve(__dirname, 'testFile.txt'))
|
||||
.expect(200);
|
||||
|
||||
const fileUrl = res.text;
|
||||
|
||||
await query.get(`/${fileUrl}`).buffer(false).redirects(1).expect(200);
|
||||
});
|
||||
|
||||
it('should store and get a file', async () => {
|
||||
let box = 'boxId_600';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'public',
|
||||
});
|
||||
|
||||
await query
|
||||
.post(`/fakeSiteId/store/${box}/1234`)
|
||||
.send({ test: true, value: 42 })
|
||||
.expect(200);
|
||||
|
||||
const res = await query
|
||||
.post(`/fakeSiteId/store/${box}/1234/file/`)
|
||||
.attach('file', path.resolve(__dirname, 'testFile.txt'))
|
||||
.expect(200);
|
||||
|
||||
const fileUrl = res.text;
|
||||
|
||||
await query.get(`/${fileUrl}`).buffer(false).redirects(1).expect(200);
|
||||
});
|
||||
|
||||
it('should not allow to store a file on readOnly store', async () => {
|
||||
let box = 'boxId_600';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'readOnly',
|
||||
});
|
||||
|
||||
const fakeFile = { filename: 'test.txt', mimetype: 'text/plain' };
|
||||
|
||||
await query
|
||||
.post(`/fakeSiteId/store/${box}/1234/file/`)
|
||||
.attach('file', path.resolve(__dirname, 'testFile.txt'))
|
||||
.expect(403);
|
||||
|
||||
const fileName = await fileBackend.store(
|
||||
'fakeSiteId',
|
||||
box,
|
||||
'1234',
|
||||
fakeFile
|
||||
);
|
||||
|
||||
await query
|
||||
.get(`/fakeSiteId/store/${box}/1234/file/${fileName}`)
|
||||
.buffer(false)
|
||||
.redirects(1)
|
||||
.expect(200);
|
||||
});
|
||||
|
||||
it('should not allow to store and get a file on private store', async () => {
|
||||
let box = 'boxId_600';
|
||||
await backend.createOrUpdateBox(`_fakeSiteId__${box}`, {
|
||||
security: 'private',
|
||||
});
|
||||
|
||||
const fakeFile = { filename: 'test.txt', mimetype: 'text/plain' };
|
||||
|
||||
await query
|
||||
.post(`/fakeSiteId/store/${box}/1234/file/`)
|
||||
.attach('file', path.resolve(__dirname, 'testFile.txt'))
|
||||
.expect(403);
|
||||
|
||||
const fileName = await fileBackend.store(
|
||||
'fakeSiteId',
|
||||
box,
|
||||
'1234',
|
||||
fakeFile
|
||||
);
|
||||
|
||||
await query
|
||||
.get(`/fakeSiteId/store/${box}/1234/file/${fileName}`)
|
||||
.buffer(false)
|
||||
.redirects(1)
|
||||
.expect(403);
|
||||
});
|
||||
});
|
373
src/__test__/storeBackend.test.js
Normal file
373
src/__test__/storeBackend.test.js
Normal file
|
@ -0,0 +1,373 @@
|
|||
import { jest } from '@jest/globals';
|
||||
|
||||
import {
|
||||
MemoryBackend,
|
||||
NeDBBackend,
|
||||
MongoDBBackend,
|
||||
wrapBackend,
|
||||
} from '../store/backends';
|
||||
|
||||
import { MONGODB_URI } from '../settings';
|
||||
|
||||
const MONGODB_DATABASE_TEST = process.env.MONGODB_DATABASE_TEST;
|
||||
|
||||
jest.mock('nanoid', () => {
|
||||
let count = 0;
|
||||
return {
|
||||
customAlphabet: () =>
|
||||
jest.fn(() => {
|
||||
return 'nanoid_' + count++;
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
let delta = 0;
|
||||
|
||||
// Fake date
|
||||
jest.spyOn(global.Date, 'now').mockImplementation(() => {
|
||||
delta += 1;
|
||||
return new Date(1584183719135 + delta * 1000).valueOf();
|
||||
});
|
||||
|
||||
const backends = [
|
||||
['memory', MemoryBackend()],
|
||||
//['NeDb', NeDBBackend({ filename: null, inMemoryOnly: true })],
|
||||
];
|
||||
|
||||
if (MONGODB_DATABASE_TEST) {
|
||||
/*backends.push([
|
||||
'MongoDB',
|
||||
MongoDBBackend({ uri: MONGODB_URI, database: MONGODB_DATABASE_TEST }),
|
||||
]);*/
|
||||
}
|
||||
|
||||
describe.each(backends)(
|
||||
'Store backend <%s> tests',
|
||||
(backendName, rawBackend) => {
|
||||
let backend;
|
||||
|
||||
beforeAll(async () => {
|
||||
backend = wrapBackend(rawBackend, 'siteid', 'userid');
|
||||
|
||||
if (backendName === 'MongoDB') {
|
||||
const { MongoClient, ServerApiVersion } = await import('mongodb');
|
||||
const _client = new MongoClient(MONGODB_URI, {
|
||||
serverApi: ServerApiVersion.v1,
|
||||
});
|
||||
|
||||
await _client.connect();
|
||||
await _client.db(MONGODB_DATABASE_TEST).dropDatabase();
|
||||
await _client.close();
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (backendName === 'MongoDB') {
|
||||
rawBackend._close();
|
||||
}
|
||||
});
|
||||
|
||||
it('should get empty box', async () => {
|
||||
const box = 'boxid1';
|
||||
|
||||
await expect(backend.list(box, {})).rejects.toThrow();
|
||||
|
||||
// Create box
|
||||
await backend.createOrUpdateBox(box, { option: 1 });
|
||||
|
||||
const res = await backend.list(box, {});
|
||||
expect(res).toEqual([]);
|
||||
});
|
||||
|
||||
it('should add resource', async () => {
|
||||
const box = 'boxid2';
|
||||
// Create box
|
||||
await backend.createOrUpdateBox(box, { option: 1 });
|
||||
|
||||
const res = await backend.save(box, null, { value: 42, test: true });
|
||||
expect(res).toEqual(expect.objectContaining({ test: true, value: 42 }));
|
||||
|
||||
// Is return value ok
|
||||
expect(res._id).toBeDefined();
|
||||
expect(res._createdOn).toBeGreaterThanOrEqual(1584183661135);
|
||||
|
||||
// Is get working
|
||||
const res2 = await backend.get(box, res._id);
|
||||
expect(res2).toEqual(res);
|
||||
|
||||
// Is list updated
|
||||
const res3 = await backend.list(box);
|
||||
expect(res3[0]).toEqual(res);
|
||||
});
|
||||
|
||||
it('should add resource with specified id', async () => {
|
||||
const box = 'boxId10';
|
||||
// Create box
|
||||
await backend.createOrUpdateBox(box, { option: 1 });
|
||||
|
||||
const res = await backend.save(box, 'myid', {
|
||||
value: 42,
|
||||
test: true,
|
||||
_createdOn: 1,
|
||||
});
|
||||
expect(res).toEqual(expect.objectContaining({ test: true, value: 42 }));
|
||||
|
||||
// Is return value ok
|
||||
expect(res._id).toBe('myid');
|
||||
expect(res._createdOn).toBeGreaterThanOrEqual(1584183661135);
|
||||
|
||||
// Is get working
|
||||
const res2 = await backend.get(box, 'myid');
|
||||
expect(res2).toEqual(res);
|
||||
});
|
||||
|
||||
it('should save resource with specified id', async () => {
|
||||
const box = 'boxId15';
|
||||
|
||||
// Create box
|
||||
await backend.createOrUpdateBox(box, { option: 1 });
|
||||
|
||||
const res = await backend.save(box, 'myid', { value: 42, test: true });
|
||||
expect(res).toEqual(expect.objectContaining({ test: true, value: 42 }));
|
||||
|
||||
// Is return value ok
|
||||
expect(res._id).toBe('myid');
|
||||
expect(res._createdOn).toBeGreaterThanOrEqual(1584183661135);
|
||||
|
||||
const resAfterSave = await backend.save(box, 'myid', {
|
||||
value: 45,
|
||||
foo: 18,
|
||||
});
|
||||
expect(resAfterSave).toEqual(
|
||||
expect.objectContaining({ foo: 18, value: 45 })
|
||||
);
|
||||
expect(resAfterSave).not.toEqual(expect.objectContaining({ test: true }));
|
||||
expect(resAfterSave._createdOn).toEqual(res._createdOn);
|
||||
expect(resAfterSave._updatedOn).toBeGreaterThanOrEqual(1584183661135);
|
||||
});
|
||||
|
||||
it('should add tree resources', async () => {
|
||||
const box = 'boxid20';
|
||||
// Create box
|
||||
await backend.createOrUpdateBox(box, { option: 1 });
|
||||
|
||||
const first = await backend.save(box, null, {
|
||||
value: 40,
|
||||
test: false,
|
||||
});
|
||||
expect(first).toEqual(
|
||||
expect.objectContaining({ test: false, value: 40 })
|
||||
);
|
||||
|
||||
const second = await backend.save(box, null, {
|
||||
value: 42,
|
||||
test: true,
|
||||
});
|
||||
expect(second).toEqual(
|
||||
expect.objectContaining({ test: true, value: 42 })
|
||||
);
|
||||
|
||||
const third = await backend.save(box, null, { value: 44 });
|
||||
expect(third).toEqual(expect.objectContaining({ value: 44 }));
|
||||
|
||||
// Is get working
|
||||
const firstGet = await backend.get(box, first._id);
|
||||
expect(firstGet).toEqual(first);
|
||||
const secondGet = await backend.get(box, second._id);
|
||||
expect(secondGet).toEqual(second);
|
||||
|
||||
// Is list updated
|
||||
const allResources = await backend.list(box, { sort: '_createdOn' });
|
||||
expect(allResources[0]).toEqual(first);
|
||||
expect(allResources[1]).toEqual(second);
|
||||
expect(allResources[2]).toEqual(third);
|
||||
expect(allResources.length).toBe(3);
|
||||
});
|
||||
|
||||
it('should update resource', async () => {
|
||||
const box = 'boxid3';
|
||||
// Create box
|
||||
await backend.createOrUpdateBox(box, { option: 1 });
|
||||
|
||||
const res = await backend.save(box, null, { value: 40, test: true });
|
||||
expect(res.value).toBe(40);
|
||||
|
||||
const modified = await backend.update(box, res._id, { value: 42 });
|
||||
|
||||
const afterModification = await backend.get(box, res._id);
|
||||
expect(afterModification).toEqual(modified);
|
||||
expect(afterModification.value).toBe(42);
|
||||
|
||||
expect(afterModification._createdOn).toEqual(res._createdOn);
|
||||
expect(afterModification._updatedOn).toBeGreaterThanOrEqual(
|
||||
res._createdOn
|
||||
);
|
||||
});
|
||||
|
||||
it('should delete resource', async () => {
|
||||
const box = 'boxid4';
|
||||
// Create box
|
||||
await backend.createOrUpdateBox(box, { option: 1 });
|
||||
|
||||
const predel = await backend.delete(box, 'noid');
|
||||
expect(predel).toBe(0);
|
||||
|
||||
const res = await backend.save(box, null, { value: 42, test: true });
|
||||
|
||||
const allResources = await backend.list(box);
|
||||
expect(allResources.length).toBe(1);
|
||||
|
||||
const del = await backend.delete(box, res._id);
|
||||
expect(del).toBe(1);
|
||||
|
||||
const allResourcesAfterDelete = await backend.list(box);
|
||||
expect(allResourcesAfterDelete.length).toBe(0);
|
||||
|
||||
const nodel = await backend.delete(box, res._id);
|
||||
expect(nodel).toBe(0);
|
||||
});
|
||||
|
||||
it('should list resources', async () => {
|
||||
const box = 'boxId50';
|
||||
// Create box
|
||||
await backend.createOrUpdateBox(box, { option: 1 });
|
||||
|
||||
const first = await backend.save(box, null, { value: 40, test: false });
|
||||
const second = await backend.save(box, null, { value: 44, test: true });
|
||||
const third = await backend.save(box, null, { value: 42 });
|
||||
const last = await backend.save(box, null, { value: 42 });
|
||||
|
||||
// Is sort working
|
||||
const allResources = await backend.list(box, {
|
||||
sort: '_createdOn',
|
||||
});
|
||||
expect(allResources[0]).toEqual(first);
|
||||
expect(allResources[2]).toEqual(third);
|
||||
|
||||
// Is sort working
|
||||
const allResourcesReverse = await backend.list(box, {
|
||||
sort: '_createdOn',
|
||||
asc: false,
|
||||
});
|
||||
expect(allResourcesReverse[3]).toEqual(first);
|
||||
expect(allResourcesReverse[0]).toEqual(last);
|
||||
|
||||
const allResourcesReverse2 = await backend.list(box, {
|
||||
sort: 'value',
|
||||
asc: false,
|
||||
});
|
||||
expect(allResourcesReverse2[3]).toEqual(first);
|
||||
expect(allResourcesReverse2[0]).toEqual(second);
|
||||
|
||||
// Is limit working
|
||||
const limitedResources = await backend.list(box, {
|
||||
sort: '_createdOn',
|
||||
limit: 1,
|
||||
});
|
||||
expect(limitedResources[0]).toEqual(first);
|
||||
expect(limitedResources.length).toBe(1);
|
||||
|
||||
// Is skip working
|
||||
const skippedResources = await backend.list(box, {
|
||||
sort: '_createdOn',
|
||||
limit: 1,
|
||||
skip: 1,
|
||||
});
|
||||
expect(skippedResources[0]).toEqual(second);
|
||||
expect(skippedResources.length).toBe(1);
|
||||
|
||||
// Is onlyFields working
|
||||
const filteredResources = await backend.list(box, {
|
||||
sort: '_createdOn',
|
||||
onlyFields: ['value'],
|
||||
});
|
||||
expect(filteredResources[0]).not.toEqual(
|
||||
expect.objectContaining({ test: false })
|
||||
);
|
||||
|
||||
// Test queries
|
||||
console.log('si');
|
||||
const foundResources = await backend.list(box, {
|
||||
q: 'value > 42',
|
||||
});
|
||||
|
||||
expect(foundResources.length).toBe(1);
|
||||
|
||||
const foundResources2 = await backend.list(box, {
|
||||
q: 'value > 42 and test = false',
|
||||
});
|
||||
|
||||
expect(foundResources2.length).toBe(0);
|
||||
|
||||
await expect(
|
||||
backend.list(box, {
|
||||
q: 'value > 42 and test = false bla bl',
|
||||
})
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should check security', async () => {
|
||||
const box = 'boxId51';
|
||||
// Create box
|
||||
await backend.createOrUpdateBox(box, { option: 1 });
|
||||
|
||||
const first = await backend.save(box, null, { value: 40, test: false });
|
||||
const second = await backend.save(box, null, { value: 44, test: true });
|
||||
const third = await backend.save(box, null, { value: 42 });
|
||||
|
||||
const result = await backend.checkSecurity(box, first._id, 'nokey');
|
||||
// FIXME
|
||||
});
|
||||
|
||||
it('should throw error', async () => {
|
||||
const box = 'boxId60';
|
||||
|
||||
await expect(backend.get(box, 'noid')).rejects.toThrow();
|
||||
await expect(
|
||||
backend.update(box, 'noid', { value: 'titi' })
|
||||
).rejects.toThrow();
|
||||
|
||||
// Create box
|
||||
await backend.createOrUpdateBox(box, { option: 1 });
|
||||
|
||||
await expect(backend.get(box, 'noid')).rejects.toThrow();
|
||||
await expect(
|
||||
backend.update(box, 'noid', { value: 'titi' })
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should check security', async () => {
|
||||
const box = 'boxId70';
|
||||
|
||||
// Create box
|
||||
await backend.createOrUpdateBox(box, { security: 'private' });
|
||||
|
||||
await expect(backend.checkSecurity(box, null)).resolves.toBe(false);
|
||||
await expect(backend.checkSecurity(box, null, true)).resolves.toBe(false);
|
||||
|
||||
// Update box to be readOnly
|
||||
await backend.createOrUpdateBox(box, { security: 'readOnly' });
|
||||
|
||||
await expect(backend.checkSecurity(box, null)).resolves.toBe(true);
|
||||
await expect(backend.checkSecurity(box, null, true)).resolves.toBe(false);
|
||||
|
||||
// Update box to be public
|
||||
await backend.createOrUpdateBox(box, { security: 'public' });
|
||||
|
||||
await expect(backend.checkSecurity(box, null)).resolves.toBe(true);
|
||||
await expect(backend.checkSecurity(box, null, true)).resolves.toBe(true);
|
||||
|
||||
// update box to be default privacy
|
||||
await backend.createOrUpdateBox(box);
|
||||
|
||||
await expect(backend.checkSecurity(box, null)).resolves.toBe(false);
|
||||
await expect(backend.checkSecurity(box, null, true)).resolves.toBe(false);
|
||||
|
||||
// update box to be bad value privacy
|
||||
await backend.createOrUpdateBox(box, { security: 'nosecurity' });
|
||||
|
||||
await expect(backend.checkSecurity(box, null)).resolves.toBe(false);
|
||||
await expect(backend.checkSecurity(box, null, true)).resolves.toBe(false);
|
||||
});
|
||||
}
|
||||
);
|
1
src/__test__/test.files/badconfigfile.json
Normal file
1
src/__test__/test.files/badconfigfile.json
Normal file
|
@ -0,0 +1 @@
|
|||
crapycrap
|
4
src/__test__/test.files/myconfig.json
Normal file
4
src/__test__/test.files/myconfig.json
Normal file
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"siteId": "mysiteid",
|
||||
"scriptPath": "/scripts/"
|
||||
}
|
1
src/__test__/test.files/scripts/bad.js
Normal file
1
src/__test__/test.files/scripts/bad.js
Normal file
|
@ -0,0 +1 @@
|
|||
no js here !!!
|
10
src/__test__/test.files/scripts/mysetup.js
Normal file
10
src/__test__/test.files/scripts/mysetup.js
Normal file
|
@ -0,0 +1,10 @@
|
|||
var main = ({ content }) => {
|
||||
content.response = 42;
|
||||
return 'foo';
|
||||
};
|
||||
|
||||
Object.defineProperty(exports, '__esModule', {
|
||||
value: true,
|
||||
});
|
||||
|
||||
exports.default = main;
|
12
src/__test__/test.files/scripts/mysetupWithBadRequire.js
Normal file
12
src/__test__/test.files/scripts/mysetupWithBadRequire.js
Normal file
|
@ -0,0 +1,12 @@
|
|||
const fs = require('fs');
|
||||
|
||||
var main = () => {
|
||||
console.log('fs');
|
||||
return 'foo';
|
||||
};
|
||||
|
||||
Object.defineProperty(exports, '__esModule', {
|
||||
value: true,
|
||||
});
|
||||
|
||||
exports.default = main;
|
11
src/__test__/test.files/scripts/mysetupWithRequire.js
Normal file
11
src/__test__/test.files/scripts/mysetupWithRequire.js
Normal file
|
@ -0,0 +1,11 @@
|
|||
const http = require('http');
|
||||
|
||||
var main = () => {
|
||||
return http;
|
||||
};
|
||||
|
||||
Object.defineProperty(exports, '__esModule', {
|
||||
value: true,
|
||||
});
|
||||
|
||||
exports.default = main;
|
15
src/__test__/test.files/scripts/mytestfunction.js
Normal file
15
src/__test__/test.files/scripts/mytestfunction.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
var main = ({ body, method, query, id, response }) => {
|
||||
const result = { hello: true, method, query, body, console, id };
|
||||
try {
|
||||
result.response = response;
|
||||
} catch {
|
||||
console.log('Missing result');
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
Object.defineProperty(exports, '__esModule', {
|
||||
value: true,
|
||||
});
|
||||
|
||||
exports.default = main;
|
BIN
src/__test__/test.png
Normal file
BIN
src/__test__/test.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 6 KiB |
1
src/__test__/testFile.txt
Normal file
1
src/__test__/testFile.txt
Normal file
|
@ -0,0 +1 @@
|
|||
test file content
|
128
src/authentication.js
Normal file
128
src/authentication.js
Normal file
|
@ -0,0 +1,128 @@
|
|||
import easyNoPassword from 'easy-no-password';
|
||||
import express from 'express';
|
||||
import crypto from 'crypto';
|
||||
|
||||
import log from './log.js';
|
||||
import { throwError, errorGuard, errorMiddleware } from './error.js';
|
||||
import { isInWhiteList } from './whitelist.js'
|
||||
|
||||
import { WHITELIST_PATH } from './settings.js'
|
||||
|
||||
const sha256 = (data) => {
|
||||
return crypto.createHash('sha256').update(data, 'binary').digest('hex');
|
||||
};
|
||||
|
||||
// Auth Middleware
|
||||
export const authentication = ({
|
||||
prefix = 'auth',
|
||||
secret,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
onSendToken = ({ remote, userEmail, userId, token, req }) =>
|
||||
Promise.resolve(),
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
onLogin = (req, userId) => {},
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
onLogout = (req) => {},
|
||||
} = {}) => {
|
||||
const router = express.Router();
|
||||
|
||||
const enp = easyNoPassword(secret);
|
||||
|
||||
// Verify token
|
||||
router.get(
|
||||
`/${prefix}/verify/:userId/:token`,
|
||||
errorGuard(async (req, res) => {
|
||||
const {
|
||||
params: { token, userId },
|
||||
} = req;
|
||||
|
||||
const isValid = await new Promise((resolve, reject) => {
|
||||
enp.isValid(token, userId, (err, isValid) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
resolve(isValid);
|
||||
});
|
||||
});
|
||||
|
||||
if (!isValid) {
|
||||
throwError('Token invalid or has expired', 403);
|
||||
} else {
|
||||
onLogin(userId, req);
|
||||
res.json({ message: 'success' });
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// Allow to check authentification
|
||||
router.get(
|
||||
`/${prefix}/check`,
|
||||
errorGuard(async (req, res) => {
|
||||
if (req.session.userId) {
|
||||
res.json({ message: 'success' });
|
||||
} else {
|
||||
throwError('Not authenticated', 403);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// Get token
|
||||
router.post(
|
||||
`/${prefix}/`,
|
||||
errorGuard(async (req, res, next) => {
|
||||
const {
|
||||
body: { userEmail },
|
||||
ricochetOrigin,
|
||||
} = req;
|
||||
|
||||
if (!userEmail) {
|
||||
throwError("Missing mandatory 'email' parameter", 400);
|
||||
}
|
||||
|
||||
if (! await isInWhiteList(WHITELIST_PATH, userEmail)) {
|
||||
log.warn(userEmail + " not in whitelist.", 403);
|
||||
throwError(userEmail + " not in whitelist.", 403);
|
||||
}
|
||||
|
||||
const userId = sha256(userEmail.toLowerCase());
|
||||
|
||||
enp.createToken(userId, (err, token) => {
|
||||
if (err) {
|
||||
throwError('Unknown error', 500);
|
||||
}
|
||||
return onSendToken({
|
||||
remote: ricochetOrigin,
|
||||
userEmail,
|
||||
userId,
|
||||
token,
|
||||
req,
|
||||
}).then(
|
||||
() => {
|
||||
res.json({ message: 'Token sent' });
|
||||
},
|
||||
(e) => {
|
||||
log.error({ error: e }, 'Error while sending email');
|
||||
const errorObject = new Error(e);
|
||||
errorObject.statusCode = 503;
|
||||
next(errorObject);
|
||||
}
|
||||
);
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// Logout
|
||||
router.get(
|
||||
`/${prefix}/logout/`,
|
||||
errorGuard(async (req, res) => {
|
||||
onLogout(req);
|
||||
res.json({ message: 'logged out' });
|
||||
})
|
||||
);
|
||||
|
||||
router.use(errorMiddleware);
|
||||
|
||||
return router;
|
||||
};
|
||||
|
||||
export default authentication;
|
65
src/cli.js
Executable file
65
src/cli.js
Executable file
|
@ -0,0 +1,65 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
import yargs from 'yargs/yargs';
|
||||
import { hideBin } from 'yargs/helpers';
|
||||
import startServer from './server.js';
|
||||
import { generateKey } from './crypt.js';
|
||||
import repl from 'repl';
|
||||
import { getStoreBackend, wrapBackend } from './store/backends/index.js';
|
||||
|
||||
import {
|
||||
STORE_BACKEND,
|
||||
STORE_PREFIX,
|
||||
NEDB_BACKEND_DIRNAME,
|
||||
} from './settings.js';
|
||||
|
||||
yargs(hideBin(process.argv))
|
||||
.usage('Usage: $0 [options]')
|
||||
.command(
|
||||
'$0',
|
||||
'Start the Ricochet.js server',
|
||||
() => {},
|
||||
(argv) => {
|
||||
if (argv.generateKey) {
|
||||
const key = generateKey();
|
||||
console.log(`Key: ${key}`);
|
||||
return;
|
||||
}
|
||||
console.log('Should start the server');
|
||||
startServer();
|
||||
}
|
||||
)
|
||||
.command(
|
||||
['generatekey', 'generateKey'],
|
||||
'Generate random encryption key',
|
||||
() => {},
|
||||
() => {
|
||||
const key = generateKey();
|
||||
console.log(`Key: ${key}`);
|
||||
}
|
||||
)
|
||||
.command(
|
||||
'shell <siteId>',
|
||||
'Open a shell for specified siteId',
|
||||
() => {},
|
||||
(argv) => {
|
||||
const siteId = argv.siteId;
|
||||
|
||||
const storeConfig = {
|
||||
prefix: STORE_PREFIX,
|
||||
dirname: NEDB_BACKEND_DIRNAME,
|
||||
};
|
||||
|
||||
// Create JSON wrapped store backend
|
||||
const storeBackend = getStoreBackend(STORE_BACKEND, storeConfig);
|
||||
const store = wrapBackend(storeBackend, siteId);
|
||||
|
||||
const r = repl.start('> ');
|
||||
r.context.store = store;
|
||||
}
|
||||
)
|
||||
.boolean(['generate-key'])
|
||||
.describe('generate-key', 'Generate random encryption key')
|
||||
.help('h')
|
||||
.version()
|
||||
.alias('h', 'help').argv;
|
35
src/crypt.js
Normal file
35
src/crypt.js
Normal file
|
@ -0,0 +1,35 @@
|
|||
import crypto from 'crypto';
|
||||
|
||||
export const encrypt = (buffer, key, algorithm = 'aes-256-cbc') => {
|
||||
const iv = crypto.randomBytes(16);
|
||||
let cipher = crypto.createCipheriv(algorithm, Buffer.from(key, 'base64'), iv);
|
||||
let encrypted = cipher.update(buffer);
|
||||
encrypted = Buffer.concat([encrypted, cipher.final()]);
|
||||
return {
|
||||
iv: iv.toString('base64'),
|
||||
encryptedData: encrypted.toString('base64'),
|
||||
};
|
||||
};
|
||||
|
||||
export const decrypt = (data, key, algorithm = 'aes-256-cbc') => {
|
||||
let iv = Buffer.from(data.iv, 'base64');
|
||||
let encryptedText = Buffer.from(data.encryptedData, 'base64');
|
||||
let decipher = crypto.createDecipheriv(
|
||||
algorithm,
|
||||
Buffer.from(key, 'base64'),
|
||||
iv
|
||||
);
|
||||
let decrypted = decipher.update(encryptedText);
|
||||
decrypted = Buffer.concat([decrypted, decipher.final()]);
|
||||
return decrypted.toString();
|
||||
};
|
||||
|
||||
export const generateKey = () =>
|
||||
Buffer.from(crypto.randomBytes(32)).toString('base64');
|
||||
|
||||
/* test
|
||||
const algo = 'aes-256-cbc';
|
||||
const key = Buffer.from(crypto.randomBytes(32)).toString('base64');
|
||||
const result = encrypt(Buffer.from('toto'), key, algo);
|
||||
const decrypted = decrypt(result, key, algo);
|
||||
*/
|
20
src/error.js
Normal file
20
src/error.js
Normal file
|
@ -0,0 +1,20 @@
|
|||
export const throwError = (message, code = 400) => {
|
||||
const errorObject = new Error(message);
|
||||
errorObject.statusCode = code;
|
||||
throw errorObject;
|
||||
};
|
||||
|
||||
export const errorGuard = (func) => async (req, res, next) => {
|
||||
try {
|
||||
return await func(req, res, next);
|
||||
} catch (error) {
|
||||
// console.log(error);
|
||||
next(error);
|
||||
}
|
||||
};
|
||||
|
||||
// Middleware to handle errors
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
export const errorMiddleware = (err, req, res, _next) => {
|
||||
res.status(err.statusCode || 500).json({ message: err.message });
|
||||
};
|
64
src/execute.js
Normal file
64
src/execute.js
Normal file
|
@ -0,0 +1,64 @@
|
|||
import express from 'express';
|
||||
|
||||
import { errorGuard, errorMiddleware } from './error.js';
|
||||
|
||||
/* Roadmap
|
||||
- Allow to register new site
|
||||
- Return public key if no key pair is given
|
||||
- Allow to sign code
|
||||
*/
|
||||
|
||||
// Execute Middleware
|
||||
export const exec = ({
|
||||
prefix = 'execute',
|
||||
context = {},
|
||||
functions = {},
|
||||
} = {}) => {
|
||||
const router = express.Router();
|
||||
|
||||
// Route all query to correct script
|
||||
router.all(
|
||||
`/${prefix}/:functionName/:id?`,
|
||||
errorGuard(async (req, res) => {
|
||||
const {
|
||||
body,
|
||||
params: { functionName, id },
|
||||
query,
|
||||
method,
|
||||
authenticatedUser = null,
|
||||
} = req;
|
||||
|
||||
let allFunctions = functions;
|
||||
if (typeof functions === 'function') {
|
||||
allFunctions = functions(req);
|
||||
}
|
||||
|
||||
if (!allFunctions[functionName]) {
|
||||
res.status(404).send('Not found');
|
||||
return;
|
||||
}
|
||||
|
||||
let contextAddition = context;
|
||||
|
||||
if (typeof context === 'function') {
|
||||
contextAddition = context(req);
|
||||
}
|
||||
|
||||
const result = await allFunctions[functionName]({
|
||||
query,
|
||||
body,
|
||||
method,
|
||||
id,
|
||||
userId: authenticatedUser,
|
||||
...contextAddition,
|
||||
});
|
||||
res.json(result);
|
||||
})
|
||||
);
|
||||
|
||||
router.use(errorMiddleware);
|
||||
|
||||
return router;
|
||||
};
|
||||
|
||||
export default exec;
|
105
src/fileStore/backends/disk.js
Normal file
105
src/fileStore/backends/disk.js
Normal file
|
@ -0,0 +1,105 @@
|
|||
import multer from 'multer';
|
||||
import mime from 'mime-types';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
import { uid } from '../../uid.js';
|
||||
|
||||
const DiskFileBackend = ({ destination }) => {
|
||||
const storage = multer.diskStorage({
|
||||
destination: (req, tt, cb) => {
|
||||
const destinationDir = path.join(
|
||||
destination,
|
||||
req.siteId,
|
||||
req.boxId,
|
||||
req.resourceId
|
||||
);
|
||||
|
||||
if (!fs.existsSync(destinationDir)) {
|
||||
fs.mkdirSync(destinationDir, { recursive: true });
|
||||
}
|
||||
cb(null, destinationDir);
|
||||
},
|
||||
filename: (req, file, cb) => {
|
||||
const ext = mime.extension(file.mimetype);
|
||||
const filename = `${uid()}.${ext}`;
|
||||
cb(null, filename);
|
||||
},
|
||||
});
|
||||
|
||||
const upload = multer({ storage: storage });
|
||||
|
||||
return {
|
||||
uploadManager: upload.single('file'),
|
||||
|
||||
async list(siteId, boxId, resourceId) {
|
||||
const dir = path.join(destination, siteId, boxId, resourceId);
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readdir(dir, (err, files) => {
|
||||
if (err) {
|
||||
/* istanbul ignore next */
|
||||
if (err.code === 'ENOENT') {
|
||||
resolve([]);
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
} else {
|
||||
resolve(files);
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
async store(siteId, boxId, resourceId, file) {
|
||||
// Nothing to do here. Already done by upload manager
|
||||
return file.filename;
|
||||
},
|
||||
|
||||
async exists(siteId, boxId, resourceId, filename) {
|
||||
const filePath = path.join(
|
||||
destination,
|
||||
siteId,
|
||||
boxId,
|
||||
resourceId,
|
||||
filename
|
||||
);
|
||||
return fs.existsSync(filePath);
|
||||
},
|
||||
|
||||
async get(siteId, boxId, resourceId, filename) {
|
||||
const filePath = path.join(
|
||||
destination,
|
||||
siteId,
|
||||
boxId,
|
||||
resourceId,
|
||||
filename
|
||||
);
|
||||
const stream = fs.createReadStream(filePath);
|
||||
|
||||
const mimetype = mime.lookup(filename);
|
||||
return { mimetype, stream };
|
||||
},
|
||||
|
||||
async delete(siteId, boxId, resourceId, filename) {
|
||||
const filePath = path.join(
|
||||
destination,
|
||||
siteId,
|
||||
boxId,
|
||||
resourceId,
|
||||
filename
|
||||
);
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.unlink(filePath, (err) => {
|
||||
if (err) {
|
||||
/* istanbul ignore next */
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export default DiskFileBackend;
|
38
src/fileStore/backends/index.js
Normal file
38
src/fileStore/backends/index.js
Normal file
|
@ -0,0 +1,38 @@
|
|||
import MemoryFileBackend from './memory.js';
|
||||
import DiskFileBackend from './disk.js';
|
||||
import S3FileBackend from './s3.js';
|
||||
|
||||
export { default as MemoryFileBackend } from './memory.js';
|
||||
export { default as DiskFileBackend } from './disk.js';
|
||||
export { default as S3FileBackend } from './s3.js';
|
||||
|
||||
export const getFileStoreBackend = (type, backendConfig) => {
|
||||
switch (type) {
|
||||
case 's3':
|
||||
return S3FileBackend(backendConfig);
|
||||
case 'disk':
|
||||
return DiskFileBackend(backendConfig);
|
||||
default:
|
||||
return MemoryFileBackend(backendConfig);
|
||||
}
|
||||
};
|
||||
|
||||
export const wrapBackend = (backend, siteId) => {
|
||||
return {
|
||||
async store(boxId, resourceId, file) {
|
||||
return await backend.store(siteId, boxId, resourceId, file);
|
||||
},
|
||||
async list(boxId, resourceId) {
|
||||
return await backend.list(siteId, boxId, resourceId);
|
||||
},
|
||||
async exists(boxId, resourceId, filename) {
|
||||
return await backend.exists(siteId, boxId, resourceId, filename);
|
||||
},
|
||||
async get(boxId, resourceId, filename, headers) {
|
||||
return await backend.get(siteId, boxId, resourceId, filename, headers);
|
||||
},
|
||||
async delete(boxId, resourceId, filename) {
|
||||
return await backend.delete(siteId, boxId, resourceId, filename);
|
||||
},
|
||||
};
|
||||
};
|
65
src/fileStore/backends/memory.js
Normal file
65
src/fileStore/backends/memory.js
Normal file
|
@ -0,0 +1,65 @@
|
|||
import multer from 'multer';
|
||||
import mime from 'mime-types';
|
||||
import { Duplex } from 'stream';
|
||||
|
||||
import { uid } from '../../uid.js';
|
||||
|
||||
const bufferToStream = (buffer) => {
|
||||
let stream = new Duplex();
|
||||
stream.push(buffer);
|
||||
stream.push(null);
|
||||
return stream;
|
||||
};
|
||||
|
||||
const MemoryFileBackend = () => {
|
||||
const fileMap = {};
|
||||
const upload = multer({ storage: multer.memoryStorage() });
|
||||
|
||||
return {
|
||||
uploadManager: upload.single('file'),
|
||||
|
||||
async list(siteId, boxId, resourceId) {
|
||||
const store = fileMap[`${siteId}/${boxId}/${resourceId}`] || {};
|
||||
return Object.keys(store);
|
||||
},
|
||||
|
||||
async store(siteId, boxId, resourceId, file) {
|
||||
const ext = mime.extension(file.mimetype);
|
||||
const filename = `${uid()}.${ext}`;
|
||||
|
||||
file.filename = filename;
|
||||
|
||||
const store = fileMap[`${siteId}/${boxId}/${resourceId}`] || {};
|
||||
|
||||
store[filename] = {
|
||||
buffer: file.buffer,
|
||||
mimetype: file.mimetype,
|
||||
};
|
||||
|
||||
fileMap[`${siteId}/${boxId}/${resourceId}`] = store;
|
||||
|
||||
return filename;
|
||||
},
|
||||
|
||||
async exists(siteId, boxId, resourceId, filename) {
|
||||
return (
|
||||
fileMap[`${siteId}/${boxId}/${resourceId}`] !== undefined &&
|
||||
fileMap[`${siteId}/${boxId}/${resourceId}`][filename] !== undefined
|
||||
);
|
||||
},
|
||||
|
||||
async get(siteId, boxId, resourceId, filename) {
|
||||
const fileBuffer = fileMap[`${siteId}/${boxId}/${resourceId}`][filename];
|
||||
return {
|
||||
mimetype: fileBuffer.mimetype,
|
||||
stream: bufferToStream(fileBuffer.buffer),
|
||||
};
|
||||
},
|
||||
|
||||
async delete(siteId, boxId, resourceId, filename) {
|
||||
delete fileMap[`${siteId}/${boxId}/${resourceId}`][filename];
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export default MemoryFileBackend;
|
171
src/fileStore/backends/s3.js
Normal file
171
src/fileStore/backends/s3.js
Normal file
|
@ -0,0 +1,171 @@
|
|||
import aws from '@aws-sdk/client-s3';
|
||||
import multer from 'multer';
|
||||
import multerS3 from 'multer-s3';
|
||||
import mime from 'mime-types';
|
||||
import s3RequestPresigner from '@aws-sdk/s3-request-presigner';
|
||||
|
||||
import { uid } from '../../uid.js';
|
||||
|
||||
const {
|
||||
S3Client,
|
||||
ListObjectsCommand,
|
||||
GetObjectCommand,
|
||||
DeleteObjectCommand,
|
||||
HeadObjectCommand,
|
||||
} = aws;
|
||||
const { getSignedUrl } = s3RequestPresigner;
|
||||
|
||||
// Help here https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html
|
||||
const S3FileBackend = ({
|
||||
bucket,
|
||||
secretKey,
|
||||
accessKey,
|
||||
endpoint,
|
||||
region,
|
||||
proxy = false,
|
||||
cdn = '',
|
||||
signedUrl = true,
|
||||
}) => {
|
||||
const s3 = new S3Client({
|
||||
secretAccessKey: secretKey,
|
||||
accessKeyId: accessKey,
|
||||
endpoint,
|
||||
region,
|
||||
});
|
||||
|
||||
const upload = multer({
|
||||
storage: multerS3({
|
||||
s3: s3,
|
||||
acl: 'public-read',
|
||||
bucket: bucket,
|
||||
//contentType: multerS3.AUTO_CONTENT_TYPE,
|
||||
contentType: (req, file, cb) => {
|
||||
cb(null, file.mimetype);
|
||||
},
|
||||
key: (req, file, cb) => {
|
||||
const keyPath = `${req.siteId}/${req.boxId}/${req.resourceId}`;
|
||||
|
||||
const ext = mime.extension(file.mimetype);
|
||||
const filename = `${uid()}.${ext}`;
|
||||
// Add filename to file
|
||||
file.filename = filename;
|
||||
cb(null, `${keyPath}/${filename}`);
|
||||
},
|
||||
}),
|
||||
limits: { fileSize: 1024 * 1024 * 5 }, // 5MB
|
||||
});
|
||||
|
||||
return {
|
||||
uploadManager: upload.single('file'),
|
||||
|
||||
async list(siteId, boxId, resourceId) {
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Delimiter: '/',
|
||||
Prefix: `${siteId}/${boxId}/${resourceId}/`,
|
||||
};
|
||||
|
||||
const data = await s3.send(new ListObjectsCommand(params));
|
||||
if (data.Contents === undefined) {
|
||||
return [];
|
||||
}
|
||||
const toRemove = new RegExp(`^${siteId}/${boxId}/${resourceId}/`);
|
||||
return data.Contents.map(({ Key }) => Key.replace(toRemove, ''));
|
||||
},
|
||||
|
||||
async store(siteId, boxId, resourceId, file) {
|
||||
return file.filename;
|
||||
},
|
||||
|
||||
async exists(siteId, boxId, resourceId, filename) {
|
||||
const headParams = {
|
||||
Bucket: bucket,
|
||||
Key: `${siteId}/${boxId}/${resourceId}/${filename}`,
|
||||
};
|
||||
|
||||
try {
|
||||
await s3.send(new HeadObjectCommand(headParams));
|
||||
return true;
|
||||
} catch (headErr) {
|
||||
if (headErr.name === 'NotFound') {
|
||||
return false;
|
||||
}
|
||||
throw headErr;
|
||||
}
|
||||
},
|
||||
|
||||
async get(
|
||||
siteId,
|
||||
boxId,
|
||||
resourceId,
|
||||
filename,
|
||||
{
|
||||
'if-none-match': IfNoneMatch,
|
||||
'if-match': IfMatch,
|
||||
'if-modified-since': IfModifiedSince,
|
||||
'if-unmodified-since': IfUnmodifiedSince,
|
||||
range: Range,
|
||||
}
|
||||
) {
|
||||
// Here we proxy the file if needed
|
||||
if (proxy) {
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: `${siteId}/${boxId}/${resourceId}/${filename}`,
|
||||
IfNoneMatch,
|
||||
IfUnmodifiedSince,
|
||||
IfModifiedSince,
|
||||
IfMatch,
|
||||
Range,
|
||||
};
|
||||
|
||||
const { Body } = await s3.send(new GetObjectCommand(params));
|
||||
|
||||
return {
|
||||
length: Body.headers['content-length'],
|
||||
mimetype: Body.headers['content-type'],
|
||||
eTag: Body.headers['etag'],
|
||||
lastModified: Body.headers['last-modified'],
|
||||
statusCode: Body.statusCode,
|
||||
stream: Body.statusCode === 304 ? null : Body,
|
||||
};
|
||||
}
|
||||
|
||||
// Here we have a cdn in front
|
||||
if (cdn) {
|
||||
return {
|
||||
redirectTo: `${cdn}/${siteId}/${boxId}/${resourceId}/${filename}`,
|
||||
};
|
||||
}
|
||||
|
||||
// We generate a signed url and we return it
|
||||
if (signedUrl) {
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: `${siteId}/${boxId}/${resourceId}/${filename}`,
|
||||
};
|
||||
const command = new GetObjectCommand(params);
|
||||
const url = await getSignedUrl(s3, command, { expiresIn: 60 * 5 });
|
||||
|
||||
return { redirectTo: url };
|
||||
}
|
||||
// Finally we just use public URL
|
||||
return {
|
||||
redirectTo: `${endpoint}/${siteId}/${boxId}/${resourceId}/${filename}`,
|
||||
};
|
||||
},
|
||||
|
||||
async delete(siteId, boxId, resourceId, filename) {
|
||||
const key = `${siteId}/${boxId}/${resourceId}/${filename}`;
|
||||
|
||||
const headParams = {
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
};
|
||||
|
||||
await s3.send(new DeleteObjectCommand(headParams));
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export default S3FileBackend;
|
151
src/fileStore/index.js
Normal file
151
src/fileStore/index.js
Normal file
|
@ -0,0 +1,151 @@
|
|||
import express from 'express';
|
||||
import { MemoryFileBackend, wrapBackend } from './backends/index.js';
|
||||
import { errorGuard } from '../error.js';
|
||||
|
||||
/* ROADMAP
|
||||
- Add security
|
||||
*/
|
||||
|
||||
// In ms
|
||||
//const FILE_CACHE_EXPIRATION = 60_000;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {object} options
|
||||
*/
|
||||
export const fileStorage = (backend = MemoryFileBackend(), { prefix }) => {
|
||||
const app = express.Router();
|
||||
|
||||
// Store a file
|
||||
app.post(
|
||||
`/`,
|
||||
backend.uploadManager,
|
||||
errorGuard(async (req, res) => {
|
||||
const { siteId, boxId, resourceId, file, authenticatedUser } = req;
|
||||
|
||||
const wrappedBackend = wrapBackend(backend, siteId, authenticatedUser);
|
||||
|
||||
const filename = await wrappedBackend.store(boxId, resourceId, file);
|
||||
|
||||
const pathPrefix = `${siteId}/${prefix}/${boxId}/${resourceId}/file`;
|
||||
|
||||
res.send(`${pathPrefix}/${filename}`);
|
||||
})
|
||||
);
|
||||
|
||||
// List stored file under namespace
|
||||
app.get(
|
||||
`/`,
|
||||
errorGuard(async (req, res) => {
|
||||
const { siteId, boxId, resourceId, authenticatedUser } = req;
|
||||
|
||||
const wrappedBackend = wrapBackend(backend, siteId, authenticatedUser);
|
||||
|
||||
const result = await wrappedBackend.list(boxId, resourceId);
|
||||
|
||||
const pathPrefix = `${siteId}/${prefix}/${boxId}/${resourceId}/file`;
|
||||
|
||||
res.json(result.map((filename) => `${pathPrefix}/${filename}`));
|
||||
})
|
||||
);
|
||||
|
||||
// Get one file
|
||||
app.get(
|
||||
`/:filename`,
|
||||
errorGuard(async (req, res, next) => {
|
||||
const {
|
||||
siteId,
|
||||
boxId,
|
||||
resourceId,
|
||||
authenticatedUser,
|
||||
params: { filename },
|
||||
} = req;
|
||||
|
||||
const wrappedBackend = wrapBackend(backend, siteId, authenticatedUser);
|
||||
|
||||
if (!(await wrappedBackend.exists(boxId, resourceId, filename))) {
|
||||
res.status(404).send('Not found');
|
||||
return;
|
||||
}
|
||||
|
||||
const {
|
||||
stream,
|
||||
redirectTo,
|
||||
mimetype,
|
||||
length,
|
||||
lastModified,
|
||||
eTag,
|
||||
statusCode = 200,
|
||||
} = await wrappedBackend.get(boxId, resourceId, filename, req.headers);
|
||||
|
||||
// Here the backend respond with another url so we redirect to it
|
||||
if (redirectTo) {
|
||||
res.redirect(redirectTo);
|
||||
return;
|
||||
}
|
||||
|
||||
if (length !== undefined) {
|
||||
res.set('Content-Length', length);
|
||||
}
|
||||
if (lastModified !== undefined) {
|
||||
res.set('Last-Modified', lastModified);
|
||||
}
|
||||
if (eTag !== undefined) {
|
||||
res.set('ETag', eTag);
|
||||
}
|
||||
res.set('Content-Type', mimetype);
|
||||
|
||||
// Set a minimal cache
|
||||
/* res.setHeader(
|
||||
'Cache-Control',
|
||||
'public, max-age=' + FILE_CACHE_EXPIRATION / 1000
|
||||
);
|
||||
res.setHeader(
|
||||
'Expires',
|
||||
new Date(Date.now() + FILE_CACHE_EXPIRATION).toUTCString()
|
||||
);*/
|
||||
|
||||
if (statusCode < 300) {
|
||||
res.status(statusCode);
|
||||
stream.on('error', next).pipe(res);
|
||||
} else {
|
||||
if (statusCode === 304) {
|
||||
res.status(statusCode);
|
||||
res.end();
|
||||
} else {
|
||||
res.status(statusCode);
|
||||
res.end('Unknown Error');
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// Delete an entry
|
||||
app.delete(
|
||||
`/:filename`,
|
||||
errorGuard(async (req, res) => {
|
||||
const {
|
||||
siteId,
|
||||
boxId,
|
||||
resourceId,
|
||||
authenticatedUser,
|
||||
params: { filename },
|
||||
} = req;
|
||||
|
||||
const wrappedBackend = wrapBackend(backend, siteId, authenticatedUser);
|
||||
|
||||
if (!(await wrappedBackend.exists(boxId, resourceId, filename))) {
|
||||
res.status(404).send('Not found');
|
||||
return;
|
||||
}
|
||||
|
||||
await wrappedBackend.delete(boxId, resourceId, filename);
|
||||
|
||||
res.json({ message: 'Deleted' });
|
||||
})
|
||||
);
|
||||
|
||||
return app;
|
||||
};
|
||||
|
||||
export default fileStorage;
|
11
src/index.js
Normal file
11
src/index.js
Normal file
|
@ -0,0 +1,11 @@
|
|||
export { default } from './middleware.js';
|
||||
|
||||
export let EncryptPlugin = null;
|
||||
|
||||
try{
|
||||
EncryptPlugin = require('./EncryptPlugin.js').default;
|
||||
}catch(e){
|
||||
if(e.code !== 'MODULE_NOT_FOUND'){
|
||||
throw e;
|
||||
}
|
||||
}
|
21
src/log.js
Normal file
21
src/log.js
Normal file
|
@ -0,0 +1,21 @@
|
|||
import pino from 'pino';
|
||||
|
||||
import { USE_PINO } from './settings.js';
|
||||
|
||||
const defaultLog = {
|
||||
debug: console.log,
|
||||
info: console.info,
|
||||
warn: console.warn,
|
||||
error: console.error,
|
||||
fatal: console.error,
|
||||
};
|
||||
|
||||
let log;
|
||||
|
||||
if (USE_PINO) {
|
||||
log = pino();
|
||||
} else {
|
||||
log = defaultLog;
|
||||
}
|
||||
|
||||
export default log;
|
387
src/middleware.js
Normal file
387
src/middleware.js
Normal file
|
@ -0,0 +1,387 @@
|
|||
import express from 'express';
|
||||
import cookieSession from 'cookie-session';
|
||||
import nodemailer from 'nodemailer';
|
||||
import schedule from 'node-schedule';
|
||||
|
||||
import { throwError } from './error.js';
|
||||
import log from './log.js';
|
||||
import store from './store/index.js';
|
||||
import site from './site.js';
|
||||
import origin from './origin.js';
|
||||
|
||||
import { getStoreBackend, wrapBackend } from './store/backends/index.js';
|
||||
import {
|
||||
getFileStoreBackend,
|
||||
wrapBackend as wrapFileBackend,
|
||||
} from './fileStore/backends/index.js';
|
||||
|
||||
import remote from './remote.js';
|
||||
import execute from './execute.js';
|
||||
import auth from './authentication.js';
|
||||
|
||||
import { decrypt } from './crypt.js';
|
||||
|
||||
export const ricochetMiddleware = ({
|
||||
secret,
|
||||
fakeEmail = false,
|
||||
storeBackend,
|
||||
fileStoreBackend,
|
||||
storePrefix,
|
||||
disableCache = false,
|
||||
setupPath,
|
||||
getTransporter,
|
||||
} = {}) => {
|
||||
const router = express.Router();
|
||||
|
||||
// Remote Function map
|
||||
const functionsBySite = {};
|
||||
// Schedule map
|
||||
const schedulesBySite = {};
|
||||
// Hooks map
|
||||
const hooksBySite = {};
|
||||
|
||||
const decryptPayload = (script, { siteConfig, siteId }) => {
|
||||
const data = JSON.parse(script);
|
||||
|
||||
if (!siteConfig[siteId]) {
|
||||
throwError(`Site ${siteId} not registered on ricochet.js`, 404);
|
||||
}
|
||||
|
||||
const { key } = siteConfig[siteId];
|
||||
try {
|
||||
const decrypted = decrypt(data, key);
|
||||
|
||||
return decrypted;
|
||||
} catch (e) {
|
||||
log.warn(
|
||||
{ error: e },
|
||||
`Fails to decrypt Ricochet setup file from ${remote}. Please check your encryption key.`
|
||||
);
|
||||
throwError(
|
||||
`Fails to decrypt Ricochet setup file from ${remote}. Please check your encryption key.`,
|
||||
500
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
// Remote code
|
||||
router.use(
|
||||
remote({
|
||||
context: (req) => {
|
||||
const { siteId, authenticatedUser } = req;
|
||||
const wrappedBackend = wrapBackend(
|
||||
storeBackend,
|
||||
siteId,
|
||||
authenticatedUser
|
||||
);
|
||||
const wrappedFileBackend = wrapFileBackend(
|
||||
fileStoreBackend,
|
||||
siteId,
|
||||
authenticatedUser
|
||||
);
|
||||
if (!functionsBySite[siteId]) {
|
||||
functionsBySite[siteId] = {};
|
||||
}
|
||||
if (!schedulesBySite[siteId]) {
|
||||
schedulesBySite[siteId] = { hourly: [], daily: [] };
|
||||
}
|
||||
if (!hooksBySite[siteId]) {
|
||||
hooksBySite[siteId] = {};
|
||||
}
|
||||
return {
|
||||
store: wrappedBackend,
|
||||
fileStore: wrappedFileBackend,
|
||||
functions: functionsBySite[siteId],
|
||||
schedules: schedulesBySite[siteId],
|
||||
hooks: hooksBySite[siteId],
|
||||
};
|
||||
},
|
||||
disableCache,
|
||||
setupPath,
|
||||
preProcess: decryptPayload,
|
||||
})
|
||||
);
|
||||
|
||||
const onSendToken = async ({ remote, userEmail, userId, token, req }) => {
|
||||
const { siteConfig, siteId, t } = req;
|
||||
|
||||
if (!siteConfig[siteId]) {
|
||||
throwError(`Site ${siteId} not registered on ricochet.js`, 404);
|
||||
}
|
||||
|
||||
const { name: siteName, emailFrom } = siteConfig[siteId];
|
||||
|
||||
log.debug(`Link to connect: ${remote}/login/${userId}/${token}`);
|
||||
// if fake host, link is only loggued
|
||||
if (fakeEmail) {
|
||||
log.info(
|
||||
t('Auth mail text message', {
|
||||
url: `${remote}/login/${userId}/${token}`,
|
||||
siteName: siteName,
|
||||
interpolation: { escapeValue: false },
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
await getTransporter().sendMail({
|
||||
from: emailFrom,
|
||||
to: userEmail,
|
||||
subject: t('Your authentication link', {
|
||||
siteName,
|
||||
interpolation: { escapeValue: false },
|
||||
}),
|
||||
text: t('Auth mail text message', {
|
||||
url: `${remote}/login/${userId}/${token}`,
|
||||
siteName,
|
||||
}),
|
||||
html: t('Auth mail html message', {
|
||||
url: `${remote}/login/${userId}/${token}`,
|
||||
siteName,
|
||||
}),
|
||||
});
|
||||
|
||||
log.info('Auth mail sent');
|
||||
};
|
||||
|
||||
const onLogin = (userId, req) => {
|
||||
req.session.userId = userId;
|
||||
};
|
||||
|
||||
const onLogout = (req) => {
|
||||
req.session = null;
|
||||
};
|
||||
|
||||
// Session middleware
|
||||
router.use(
|
||||
cookieSession({
|
||||
name: 'session',
|
||||
keys: [secret],
|
||||
httpOnly: true,
|
||||
|
||||
// Cookie Options
|
||||
maxAge: 10 * 24 * 60 * 60 * 1000, // 10 days
|
||||
sameSite: 'Lax',
|
||||
})
|
||||
);
|
||||
|
||||
// Re-set cookie on activity
|
||||
router.use((req, res, next) => {
|
||||
req.session.nowInMinutes = Math.floor(Date.now() / (60 * 1000));
|
||||
next();
|
||||
});
|
||||
|
||||
// authenticate middleware
|
||||
router.use((req, res, next) => {
|
||||
if (req.session.userId) {
|
||||
req.authenticatedUser = req.session.userId;
|
||||
} else {
|
||||
req.authenticatedUser = null;
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
// Auth middleware
|
||||
router.use(auth({ onSendToken, onLogin, onLogout, secret: secret }));
|
||||
|
||||
// JSON store
|
||||
router.use(
|
||||
store({
|
||||
prefix: storePrefix,
|
||||
backend: storeBackend,
|
||||
fileBackend: fileStoreBackend,
|
||||
hooks: (req) => {
|
||||
const { siteId } = req;
|
||||
return hooksBySite[siteId];
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
// Execute middleware
|
||||
router.use(
|
||||
execute({
|
||||
context: (req) => {
|
||||
const { siteId, authenticatedUser } = req;
|
||||
const wrappedBackend = wrapBackend(
|
||||
storeBackend,
|
||||
siteId,
|
||||
authenticatedUser
|
||||
);
|
||||
const wrappedFileBackend = wrapFileBackend(
|
||||
fileStoreBackend,
|
||||
siteId,
|
||||
authenticatedUser
|
||||
);
|
||||
return { store: wrappedBackend, fileStore: wrappedFileBackend };
|
||||
},
|
||||
functions: (req) => {
|
||||
const { siteId } = req;
|
||||
return functionsBySite[siteId];
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
// Schedule daily and hourly actions
|
||||
schedule.scheduleJob('22 * * * *', () => {
|
||||
log.info('Execute hourly actions');
|
||||
for (const key in schedulesBySite) {
|
||||
const { hourly } = schedulesBySite[key];
|
||||
hourly.forEach((callback) => {
|
||||
callback();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
schedule.scheduleJob('42 3 * * *', () => {
|
||||
log.info('Execute daily actions');
|
||||
for (const key in schedulesBySite) {
|
||||
const { daily } = schedulesBySite[key];
|
||||
daily.forEach((callback) => {
|
||||
callback();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
};
|
||||
|
||||
export const mainMiddleware = ({
|
||||
serverUrl,
|
||||
serverName,
|
||||
siteRegistrationEnabled,
|
||||
fileStoreConfig = {},
|
||||
storeConfig = {},
|
||||
configFile = './site.json',
|
||||
emailConfig = { host: 'fake' },
|
||||
...rest
|
||||
} = {}) => {
|
||||
const router = express.Router();
|
||||
const fakeEmail = emailConfig.host === 'fake';
|
||||
|
||||
let _transporter = null;
|
||||
|
||||
const getTransporter = () => {
|
||||
const transportConfig =
|
||||
emailConfig.host === 'fake'
|
||||
? {
|
||||
streamTransport: true,
|
||||
newline: 'unix',
|
||||
buffer: true,
|
||||
}
|
||||
: emailConfig;
|
||||
if (_transporter === null) {
|
||||
_transporter = nodemailer.createTransport({
|
||||
...transportConfig,
|
||||
});
|
||||
}
|
||||
return _transporter;
|
||||
};
|
||||
|
||||
// Store backends
|
||||
const storeBackend = getStoreBackend(storeConfig.type, storeConfig);
|
||||
const fileStoreBackend = getFileStoreBackend(fileStoreConfig.type, {
|
||||
url: fileStoreConfig.apiUrl,
|
||||
destination: fileStoreConfig.diskDestination,
|
||||
bucket: fileStoreConfig.s3Bucket,
|
||||
endpoint: fileStoreConfig.s3Endpoint,
|
||||
accessKey: fileStoreConfig.s3AccessKey,
|
||||
secretKey: fileStoreConfig.s3SecretKey,
|
||||
region: fileStoreConfig.s3Region,
|
||||
proxy: fileStoreConfig.s3Proxy,
|
||||
cdn: fileStoreConfig.s3Cdn,
|
||||
signedUrl: fileStoreConfig.s3SignedUrl,
|
||||
});
|
||||
|
||||
const onSiteCreation = async ({ req, site, confirmPath }) => {
|
||||
const { t } = req;
|
||||
const confirmURL = `${serverUrl}${confirmPath}`;
|
||||
|
||||
if (fakeEmail) {
|
||||
log.info(
|
||||
t('Site creation text message', {
|
||||
url: confirmURL,
|
||||
siteId: site._id,
|
||||
siteName: serverName,
|
||||
interpolation: { escapeValue: false },
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
await getTransporter().sendMail({
|
||||
from: emailConfig.from,
|
||||
to: site.owner,
|
||||
subject: t('Please confirm site creation'),
|
||||
text: t('Site creation text message', {
|
||||
url: confirmURL,
|
||||
siteId: site._id,
|
||||
siteName: serverName,
|
||||
}),
|
||||
html: t('Site creation html message', {
|
||||
url: confirmURL,
|
||||
siteId: site._id,
|
||||
siteName: serverName,
|
||||
}),
|
||||
});
|
||||
};
|
||||
|
||||
const onSiteUpdate = async ({ req, previous, confirmPath }) => {
|
||||
const { t } = req;
|
||||
const confirmURL = `${serverUrl}${confirmPath}`;
|
||||
|
||||
if (fakeEmail) {
|
||||
log.info(
|
||||
t('Site update text message', {
|
||||
url: confirmURL,
|
||||
siteId: previous._id,
|
||||
siteName: serverName,
|
||||
interpolation: { escapeValue: false },
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
await getTransporter().sendMail({
|
||||
from: emailConfig.from,
|
||||
to: previous.owner,
|
||||
subject: t('Please confirm site update'),
|
||||
text: t('Site update text message', {
|
||||
url: confirmURL,
|
||||
siteId: previous._id,
|
||||
siteName: serverName,
|
||||
}),
|
||||
html: t('Site update html message', {
|
||||
url: confirmURL,
|
||||
siteId: previous._id,
|
||||
siteName: serverName,
|
||||
}),
|
||||
});
|
||||
};
|
||||
|
||||
router.use(
|
||||
site({
|
||||
configFile,
|
||||
storeBackend,
|
||||
siteRegistrationEnabled,
|
||||
onSiteCreation,
|
||||
onSiteUpdate,
|
||||
})
|
||||
);
|
||||
|
||||
router.use(
|
||||
'/:siteId',
|
||||
(req, res, next) => {
|
||||
req.siteId = req.params.siteId;
|
||||
next();
|
||||
},
|
||||
origin(),
|
||||
ricochetMiddleware({
|
||||
fakeEmail,
|
||||
storePrefix: storeConfig.prefix,
|
||||
storeBackend,
|
||||
fileStoreBackend,
|
||||
getTransporter,
|
||||
...rest,
|
||||
})
|
||||
);
|
||||
return router;
|
||||
};
|
||||
|
||||
export default mainMiddleware;
|
23
src/origin.js
Normal file
23
src/origin.js
Normal file
|
@ -0,0 +1,23 @@
|
|||
const getRemoteFromQuery = ({
|
||||
headers: {
|
||||
'x-spc-host': spcHost = '',
|
||||
'x-ricochet-origin': ricochetOrigin,
|
||||
origin,
|
||||
referer,
|
||||
},
|
||||
}) => ricochetOrigin || (referer ? new URL(referer).origin : origin || spcHost);
|
||||
|
||||
const originMiddleware = () => (req, res, next) => {
|
||||
const remote = getRemoteFromQuery(req);
|
||||
|
||||
if (!remote) {
|
||||
res.status(400).json({
|
||||
message: 'One of X-Ricochet-Origin, Origin, Referer header is required',
|
||||
});
|
||||
}
|
||||
|
||||
req.ricochetOrigin = remote;
|
||||
next();
|
||||
};
|
||||
|
||||
export default originMiddleware;
|
68
src/remote.js
Normal file
68
src/remote.js
Normal file
|
@ -0,0 +1,68 @@
|
|||
import express from 'express';
|
||||
import log from './log.js';
|
||||
import RemoteCode from './remoteCode.js';
|
||||
|
||||
import { throwError, errorGuard, errorMiddleware } from './error.js';
|
||||
|
||||
// Remote setup Middleware
|
||||
export const remote = ({
|
||||
setupPath = 'setup.js',
|
||||
context = {},
|
||||
disableCache,
|
||||
preProcess,
|
||||
} = {}) => {
|
||||
const router = express.Router();
|
||||
|
||||
const setupLoaded = {};
|
||||
|
||||
// Remote code caller
|
||||
const remoteCode = new RemoteCode({
|
||||
disableCache,
|
||||
preProcess,
|
||||
});
|
||||
|
||||
router.use(
|
||||
errorGuard(async (req, res, next) => {
|
||||
const {
|
||||
query: { clearCache },
|
||||
ricochetOrigin: remote,
|
||||
} = req;
|
||||
|
||||
if (clearCache) {
|
||||
remoteCode.clearCache(remote);
|
||||
log.info(`Clear cache for ${remote}`);
|
||||
}
|
||||
|
||||
if (!setupLoaded[remote] || disableCache || clearCache) {
|
||||
try {
|
||||
let contextAddition = context;
|
||||
if (typeof context === 'function') {
|
||||
contextAddition = context(req);
|
||||
}
|
||||
|
||||
await remoteCode.exec(req, remote, setupPath, {
|
||||
...contextAddition,
|
||||
});
|
||||
|
||||
setupLoaded[remote] = true;
|
||||
log.info(`Setup successfully loaded from ${remote}`);
|
||||
} catch (e) {
|
||||
log.warn({ error: e }, `Fails to load setup from ${remote}`);
|
||||
throwError(e, 500);
|
||||
}
|
||||
}
|
||||
|
||||
next();
|
||||
})
|
||||
);
|
||||
|
||||
router.get(`/ping`, async (req, res) => {
|
||||
res.send('ok');
|
||||
});
|
||||
|
||||
router.use(errorMiddleware);
|
||||
|
||||
return router;
|
||||
};
|
||||
|
||||
export default remote;
|
112
src/remoteCode.js
Normal file
112
src/remoteCode.js
Normal file
|
@ -0,0 +1,112 @@
|
|||
import http from 'http';
|
||||
import https from 'https';
|
||||
import vm2 from 'vm2';
|
||||
|
||||
import NodeCache from 'node-cache';
|
||||
|
||||
const { NodeVM } = vm2;
|
||||
|
||||
const allowedModules = ['http', 'https', 'stream', 'url', 'zlib', 'encoding'];
|
||||
|
||||
class RemoteCode {
|
||||
constructor({ disableCache = false, preProcess = (script) => script }) {
|
||||
const cacheConfig = {
|
||||
useClones: false,
|
||||
stdTTL: 200,
|
||||
checkperiod: 250,
|
||||
};
|
||||
Object.assign(this, {
|
||||
disableCache,
|
||||
scriptCache: new NodeCache(cacheConfig),
|
||||
cacheConfig,
|
||||
preProcess,
|
||||
});
|
||||
this.vm = new NodeVM({
|
||||
console: 'inherit',
|
||||
require: {
|
||||
builtin: allowedModules,
|
||||
root: './',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get and cache the script designed by name from remote
|
||||
* @param {string} scriptPath script name.
|
||||
* @param {string} extraCommands to be concatened at the end of script.
|
||||
*/
|
||||
async cacheOrFetch(req, remote, scriptPath, extraCommands = '') {
|
||||
if (!this.scriptCache.has(remote)) {
|
||||
this.scriptCache.set(remote, new NodeCache(this.cacheConfig));
|
||||
}
|
||||
|
||||
const cache = this.scriptCache.get(remote);
|
||||
|
||||
if (cache.has(scriptPath) && !this.disableCache) {
|
||||
return cache.get(scriptPath);
|
||||
} else {
|
||||
const httpClient = remote.startsWith('https') ? https : http;
|
||||
return new Promise((resolve, reject) => {
|
||||
const scriptUrl = `${remote}/${scriptPath}`.replace('//', '/');
|
||||
|
||||
httpClient
|
||||
.get(scriptUrl, (resp) => {
|
||||
if (resp.statusCode === 404) {
|
||||
reject({ status: 'not-found' });
|
||||
return;
|
||||
}
|
||||
|
||||
let script = '';
|
||||
resp.on('data', (chunk) => {
|
||||
script += chunk;
|
||||
});
|
||||
resp.on('end', () => {
|
||||
try {
|
||||
script = this.preProcess.bind(this)(script, req);
|
||||
} catch (e) {
|
||||
reject({ status: 'error', error: e });
|
||||
}
|
||||
script += extraCommands;
|
||||
try {
|
||||
const scriptFunction = this.vm.run(script).default;
|
||||
cache.set(scriptPath, scriptFunction);
|
||||
this.scriptCache.set(remote, cache);
|
||||
|
||||
resolve(scriptFunction);
|
||||
} catch (e) {
|
||||
reject({ status: 'error', error: e });
|
||||
}
|
||||
});
|
||||
})
|
||||
.on('error', (err) => {
|
||||
/* istanbul ignore next */
|
||||
reject({ status: 'error', error: err });
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async exec(req, remote, scriptPath, context) {
|
||||
try {
|
||||
const toRun = await this.cacheOrFetch(req, remote, scriptPath);
|
||||
|
||||
return toRun({ ...context });
|
||||
} catch (e) {
|
||||
if (e.status === 'not-found') {
|
||||
throw `Script ${scriptPath} not found on remote ${remote}`;
|
||||
} else {
|
||||
if (e.error) {
|
||||
throw e.error;
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
clearCache(remote) {
|
||||
this.scriptCache.del(remote);
|
||||
}
|
||||
}
|
||||
|
||||
export default RemoteCode;
|
155
src/server.js
Normal file
155
src/server.js
Normal file
|
@ -0,0 +1,155 @@
|
|||
import express from 'express';
|
||||
import cors from 'cors';
|
||||
import bodyParser from 'body-parser';
|
||||
import { createServer } from 'http';
|
||||
import pinoHttp from 'pino-http';
|
||||
import i18next from 'i18next';
|
||||
import i18nextMiddleware from 'i18next-http-middleware';
|
||||
import i18nextBackend from 'i18next-fs-backend';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
import log from './log.js';
|
||||
import { getDirname } from './utils.js';
|
||||
import middleware from './middleware.js';
|
||||
|
||||
import {
|
||||
PORT,
|
||||
SERVER_URL,
|
||||
FILE_STORE_TYPE,
|
||||
DISK_DESTINATION,
|
||||
S3_SECRET_KEY,
|
||||
S3_ACCESS_KEY,
|
||||
S3_BUCKET,
|
||||
S3_ENDPOINT,
|
||||
S3_REGION,
|
||||
S3_PROXY,
|
||||
S3_CDN,
|
||||
STORE_BACKEND,
|
||||
STORE_PREFIX,
|
||||
NEDB_BACKEND_DIRNAME,
|
||||
MONGODB_URI,
|
||||
MONGODB_DATABASE,
|
||||
SECRET,
|
||||
DISABLE_CACHE,
|
||||
EMAIL_HOST,
|
||||
EMAIL_PORT,
|
||||
EMAIL_USER,
|
||||
EMAIL_PASSWORD,
|
||||
SETUP_PATH,
|
||||
S3_SIGNED_URL,
|
||||
SERVER_NAME,
|
||||
EMAIL_FROM,
|
||||
SITE_REGISTRATION_ENABLED,
|
||||
USE_PINO,
|
||||
} from './settings.js';
|
||||
|
||||
const __dirname = getDirname(import.meta.url);
|
||||
|
||||
const startServer = () => {
|
||||
i18next
|
||||
.use(i18nextMiddleware.LanguageDetector)
|
||||
.use(i18nextBackend)
|
||||
.init({
|
||||
supportedLngs: ['en', 'fr'],
|
||||
initImmediate: false,
|
||||
fallbackLng: 'en',
|
||||
preload: fs
|
||||
.readdirSync(path.join(__dirname, '../locales'))
|
||||
.filter((fileName) => {
|
||||
const joinedPath = path.join(
|
||||
path.join(__dirname, '../locales'),
|
||||
fileName
|
||||
);
|
||||
const isDirectory = fs.statSync(joinedPath).isDirectory();
|
||||
return isDirectory;
|
||||
}),
|
||||
backend: {
|
||||
loadPath: path.join(__dirname, '../locales/{{lng}}/{{ns}}.json'),
|
||||
},
|
||||
});
|
||||
|
||||
if (!SECRET) {
|
||||
console.log(
|
||||
'You must define "RICOCHET_SECRET" environnement variable (tip: use .env file)'
|
||||
);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
const app = express();
|
||||
const httpServer = createServer(app);
|
||||
|
||||
const corsOption = {
|
||||
credentials: true,
|
||||
origin: (origin, callback) => {
|
||||
// Allow ALL origins pls
|
||||
return callback(null, true);
|
||||
},
|
||||
};
|
||||
|
||||
app.use(cors(corsOption));
|
||||
|
||||
if (USE_PINO) {
|
||||
app.use(pinoHttp({ logger: log }));
|
||||
}
|
||||
|
||||
app.use(
|
||||
bodyParser.json({
|
||||
limit: '50mb',
|
||||
})
|
||||
);
|
||||
|
||||
app.use(i18nextMiddleware.handle(i18next));
|
||||
|
||||
app.use(bodyParser.urlencoded({ extended: true }));
|
||||
|
||||
// Static files
|
||||
const root = path.join(__dirname, '../public');
|
||||
app.use(express.static(root));
|
||||
|
||||
app.use(
|
||||
middleware({
|
||||
secret: SECRET,
|
||||
serverName: SERVER_NAME,
|
||||
serverUrl: SERVER_URL,
|
||||
siteRegistrationEnabled: SITE_REGISTRATION_ENABLED,
|
||||
storeConfig: {
|
||||
type: STORE_BACKEND,
|
||||
prefix: STORE_PREFIX,
|
||||
dirname: NEDB_BACKEND_DIRNAME,
|
||||
uri: MONGODB_URI,
|
||||
database: MONGODB_DATABASE,
|
||||
},
|
||||
fileStoreConfig: {
|
||||
type: FILE_STORE_TYPE,
|
||||
diskDestination: DISK_DESTINATION,
|
||||
s3AccessKey: S3_ACCESS_KEY,
|
||||
s3Bucket: S3_BUCKET,
|
||||
s3Endpoint: S3_ENDPOINT,
|
||||
s3SecretKey: S3_SECRET_KEY,
|
||||
s3Region: S3_REGION,
|
||||
s3Proxy: S3_PROXY,
|
||||
s3Cdn: S3_CDN,
|
||||
s3SignedUrl: S3_SIGNED_URL,
|
||||
},
|
||||
disableCache: DISABLE_CACHE,
|
||||
setupPath: SETUP_PATH,
|
||||
emailConfig: {
|
||||
host: EMAIL_HOST,
|
||||
port: EMAIL_PORT,
|
||||
from: EMAIL_FROM,
|
||||
auth: {
|
||||
user: EMAIL_USER,
|
||||
pass: EMAIL_PASSWORD,
|
||||
},
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
httpServer.listen(PORT, () => {
|
||||
log.info(`Ricochet.js is listening on ${PORT}`);
|
||||
});
|
||||
return app;
|
||||
};
|
||||
|
||||
export default startServer;
|
53
src/settings.js
Normal file
53
src/settings.js
Normal file
|
@ -0,0 +1,53 @@
|
|||
import dotenv from 'dotenv';
|
||||
dotenv.config();
|
||||
|
||||
// Settings
|
||||
|
||||
export const PORT = process.env.SERVER_PORT || process.env.PORT || 4000;
|
||||
export const HOST = process.env.SERVER_HOST || 'localhost';
|
||||
|
||||
export const SERVER_URL = process.env.SERVER_URL || `http://${HOST}:${PORT}`;
|
||||
export const SERVER_NAME = process.env.SERVER_NAME || 'Ricochet.js';
|
||||
|
||||
export const SITE_REGISTRATION_ENABLED =
|
||||
process.env.SITE_REGISTRATION_ENABLED !== '0';
|
||||
|
||||
// File store related
|
||||
export const FILE_STORE_TYPE =
|
||||
process.env.FILE_STORE_BACKEND || process.env.FILE_STORAGE || 'memory';
|
||||
export const DISK_DESTINATION =
|
||||
process.env.DISK_DESTINATION || '/tmp/ricochet_file';
|
||||
|
||||
export const S3_ACCESS_KEY = process.env.S3_ACCESS_KEY;
|
||||
export const S3_SECRET_KEY = process.env.S3_SECRET_KEY;
|
||||
export const S3_ENDPOINT = process.env.S3_ENDPOINT;
|
||||
export const S3_BUCKET = process.env.S3_BUCKET;
|
||||
export const S3_REGION = process.env.S3_REGION;
|
||||
export const S3_PROXY = process.env.S3_PROXY === '1';
|
||||
export const S3_CDN = process.env.S3_CDN || '';
|
||||
export const S3_SIGNED_URL = process.env.S3_SIGNED_URL !== '0';
|
||||
|
||||
// JSON store related
|
||||
export const STORE_BACKEND =
|
||||
process.env.JSON_STORE_BACKEND || process.env.STORE_BACKEND || 'memory';
|
||||
export const STORE_PREFIX = process.env.STORE_PREFIX || 'store';
|
||||
export const NEDB_BACKEND_DIRNAME =
|
||||
process.env.NEDB_BACKEND_DIRNAME || process.env.NEDB_DIRNAME || '/tmp/';
|
||||
export const MONGODB_URI = process.env.MONGODB_URI;
|
||||
export const MONGODB_DATABASE = process.env.MONGODB_DATABASE;
|
||||
|
||||
export const SECRET = process.env.RICOCHET_SECRET || process.env.SECRET;
|
||||
|
||||
export const DISABLE_CACHE = process.env.DISABLE_CACHE === '1';
|
||||
|
||||
export const EMAIL_HOST = process.env.EMAIL_HOST || 'fake';
|
||||
export const EMAIL_PORT = process.env.EMAIL_PORT;
|
||||
export const EMAIL_USER = process.env.EMAIL_USER;
|
||||
export const EMAIL_PASSWORD = process.env.EMAIL_PASSWORD;
|
||||
export const EMAIL_FROM = process.env.EMAIL_FROM || 'no-reply@example.com';
|
||||
|
||||
export const SETUP_PATH = process.env.SETUP_PATH || 'ricochet.json';
|
||||
|
||||
export const USE_PINO = process.env.USE_PINO === '1';
|
||||
|
||||
export const WHITELIST_PATH = process.env.WHITELIST_PATH || '';
|
299
src/site.js
Normal file
299
src/site.js
Normal file
|
@ -0,0 +1,299 @@
|
|||
import fs from 'fs';
|
||||
import express from 'express';
|
||||
|
||||
import log from './log.js';
|
||||
|
||||
import { generateKey } from './crypt.js';
|
||||
import { errorGuard, errorMiddleware, throwError } from './error.js';
|
||||
import { longUid } from './uid.js';
|
||||
|
||||
const emailRegex = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
|
||||
|
||||
const validateEmail = (email) => {
|
||||
return emailRegex.test(email);
|
||||
};
|
||||
|
||||
const writeConfigFile = (configFilePath, data) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.writeFile(configFilePath, JSON.stringify(data, null, 2), (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(data);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const loadConfigFile = (configFilePath, createIfMissing = false) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Read config file
|
||||
fs.readFile(configFilePath, 'utf-8', (err, jsonString) => {
|
||||
if (err) {
|
||||
const { code } = err;
|
||||
if (code === 'ENOENT') {
|
||||
const data = {};
|
||||
if (createIfMissing) {
|
||||
log.info('No config file, create default');
|
||||
writeConfigFile(configFilePath, data).then(() => {
|
||||
resolve(data);
|
||||
});
|
||||
} else {
|
||||
reject(`File ${configFilePath} is missing`);
|
||||
}
|
||||
} else {
|
||||
reject(`Failed to load ${configFilePath} configuration file`);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
const data = JSON.parse(jsonString);
|
||||
resolve(data);
|
||||
} catch (e) {
|
||||
console.log('Fails to parse config file...\n', e);
|
||||
reject('Fails to parse config file...');
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const siteMiddleware = ({
|
||||
storeBackend,
|
||||
configFile,
|
||||
onSiteCreation,
|
||||
onSiteUpdate,
|
||||
siteRegistrationEnabled = true,
|
||||
}) => {
|
||||
const router = express.Router();
|
||||
|
||||
const siteConfig = {};
|
||||
let configLoaded = false;
|
||||
|
||||
const getConfirmPath = (siteId, token) =>
|
||||
`/_register/${siteId}/confirm/${token}`;
|
||||
|
||||
const loadSites = async () => {
|
||||
try {
|
||||
const sites = await storeBackend.list('_site');
|
||||
sites.forEach((site) => {
|
||||
siteConfig[site._id] = site;
|
||||
});
|
||||
configLoaded = true;
|
||||
} catch (e) {
|
||||
if (e.statusCode === 404 && e.message === 'Box not found') {
|
||||
await storeBackend.createOrUpdateBox('_site');
|
||||
await storeBackend.createOrUpdateBox('_pending');
|
||||
try {
|
||||
// Try to load deprecated config file if any
|
||||
const siteConfigFile = await loadConfigFile(configFile);
|
||||
Object.entries(siteConfigFile).forEach(async ([id, data]) => {
|
||||
await storeBackend.save('_site', id, data);
|
||||
});
|
||||
fs.renameSync(configFile, `${configFile}.bak`);
|
||||
console.log('Migrate deprecated config file data to store.');
|
||||
} catch (e) {
|
||||
if (!e.includes('missing')) {
|
||||
console.log('Deprecated config file appears to be invalid.');
|
||||
}
|
||||
}
|
||||
await loadSites();
|
||||
} else {
|
||||
console.log('Error while loading configuration', e);
|
||||
process.exit(-1);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
loadSites();
|
||||
|
||||
router.use((req, res, next) => {
|
||||
if (!configLoaded) {
|
||||
throwError('Server not ready, try again later.', 503);
|
||||
}
|
||||
req.siteConfig = siteConfig;
|
||||
next();
|
||||
});
|
||||
|
||||
// Enable site registration
|
||||
if (siteRegistrationEnabled) {
|
||||
router.get(
|
||||
'/_register/:siteId/confirm/:token',
|
||||
errorGuard(async (req, res) => {
|
||||
const { siteId, token } = req.params;
|
||||
|
||||
let pending;
|
||||
let previous;
|
||||
|
||||
try {
|
||||
// Check if pending exists
|
||||
pending = await storeBackend.get('_pending', siteId);
|
||||
} catch (e) {
|
||||
if (e.statusCode === 404) {
|
||||
try {
|
||||
// Pending missing, check if site already exists
|
||||
await storeBackend.get('_site', siteId);
|
||||
// Yes, so token is already consumed
|
||||
throwError('Token already used.', 403);
|
||||
} catch (e) {
|
||||
if (e.statusCode === 404) {
|
||||
// If site not found so URL is wrong
|
||||
throwError('Bad site.', 404);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Get previous site if exists
|
||||
previous = await storeBackend.get('_site', siteId);
|
||||
} catch (e) {
|
||||
if (e.statusCode !== 404) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
if (pending.token === token) {
|
||||
const toSave = { ...(previous || {}), ...pending };
|
||||
delete toSave.token;
|
||||
const saved = await storeBackend.save('_site', siteId, toSave);
|
||||
await storeBackend.delete('_pending', siteId);
|
||||
siteConfig[siteId] = { ...saved };
|
||||
} else {
|
||||
// Token can be invalid if another modification is sent in the meantime
|
||||
// or if the token is already consumed.
|
||||
throwError('Token invalid or already used.', 403);
|
||||
}
|
||||
|
||||
if (previous) {
|
||||
// If previous, then we have just updated the site
|
||||
res.json({ message: 'Site updated' });
|
||||
} else {
|
||||
// otherwise we have created a new site
|
||||
res.json({ message: 'Site created' });
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/_register/',
|
||||
errorGuard(async (req, res) => {
|
||||
const { siteId, name, emailFrom, owner } = req.body;
|
||||
|
||||
if (!siteId || !name || !emailFrom || !owner) {
|
||||
throwError(
|
||||
'The following data are required for site creation: siteId, name, emailFrom, owner.',
|
||||
400
|
||||
);
|
||||
}
|
||||
|
||||
if (siteId.length < 3 || !siteId.match(/^[a-zA-Z0-9][a-zA-Z0-9_]*$/)) {
|
||||
throwError(
|
||||
"The siteId must contains at least 3 letters or '_' and can't start with '_'.",
|
||||
400
|
||||
);
|
||||
}
|
||||
|
||||
if (siteConfig[siteId]) {
|
||||
// The site already exists
|
||||
throwError('A site with the same name already exists.', 403);
|
||||
}
|
||||
|
||||
if (!validateEmail(emailFrom)) {
|
||||
throwError('emailFrom must be a valid email.', 400);
|
||||
}
|
||||
|
||||
if (!validateEmail(owner)) {
|
||||
throwError('emailFrom must be a valid email.', 400);
|
||||
}
|
||||
|
||||
const key = generateKey();
|
||||
const token = longUid();
|
||||
|
||||
const newSite = await storeBackend.save('_pending', siteId, {
|
||||
name,
|
||||
owner,
|
||||
emailFrom,
|
||||
key,
|
||||
token,
|
||||
});
|
||||
|
||||
await onSiteCreation({
|
||||
req,
|
||||
site: newSite,
|
||||
confirmPath: getConfirmPath(siteId, token),
|
||||
});
|
||||
|
||||
const response = { ...newSite };
|
||||
delete response.token;
|
||||
|
||||
res.json(response);
|
||||
})
|
||||
);
|
||||
|
||||
router.patch(
|
||||
'/_register/:siteId',
|
||||
errorGuard(async (req, res) => {
|
||||
const { siteId } = req.params;
|
||||
const { name, emailFrom } = req.body;
|
||||
|
||||
if (!siteId || !siteConfig[siteId]) {
|
||||
// The site doesn't exist
|
||||
throwError(
|
||||
`Site '${siteId}' doesn't exist. You must create it before.`,
|
||||
404
|
||||
);
|
||||
}
|
||||
|
||||
if (!name || !emailFrom) {
|
||||
throwError(
|
||||
'The following data are required for site update: name, emailFrom.',
|
||||
400
|
||||
);
|
||||
}
|
||||
|
||||
if (!validateEmail(emailFrom)) {
|
||||
throwError('emailFrom must be a valid email.', 400);
|
||||
}
|
||||
|
||||
const previous = await storeBackend.get('_site', siteId);
|
||||
|
||||
const token = longUid();
|
||||
|
||||
const updated = await storeBackend.save('_pending', siteId, {
|
||||
name,
|
||||
emailFrom,
|
||||
token,
|
||||
});
|
||||
|
||||
await onSiteUpdate({
|
||||
req,
|
||||
site: { ...updated },
|
||||
previous: { ...previous },
|
||||
confirmPath: getConfirmPath(siteId, token),
|
||||
});
|
||||
|
||||
const response = { ...updated };
|
||||
delete response.key;
|
||||
delete response.token;
|
||||
res.json({ ...response });
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
router.get(
|
||||
'/site/settings',
|
||||
errorGuard(async (req, res) => {
|
||||
res.json({ registrationEnabled: siteRegistrationEnabled });
|
||||
})
|
||||
);
|
||||
|
||||
router.use(errorMiddleware);
|
||||
|
||||
return router;
|
||||
};
|
||||
|
||||
export default siteMiddleware;
|
32
src/store/backends/index.js
Normal file
32
src/store/backends/index.js
Normal file
|
@ -0,0 +1,32 @@
|
|||
import NeDBBackend from './nedb.js';
|
||||
import MongoDBBackend from './mongodb.js';
|
||||
import MemoryBackend from './memory.js';
|
||||
|
||||
export { default as NeDBBackend } from './nedb.js';
|
||||
export { default as MongoDBBackend } from './mongodb.js';
|
||||
export { default as MemoryBackend } from './memory.js';
|
||||
export { wrapBackend } from './utils.js';
|
||||
|
||||
export const getStoreBackend = (type, options = {}) => {
|
||||
switch (type) {
|
||||
case 'nedb':
|
||||
return NeDBBackend(options);
|
||||
case 'mongodb':
|
||||
return MongoDBBackend(options);
|
||||
default:
|
||||
return MemoryBackend();
|
||||
}
|
||||
};
|
||||
|
||||
// Backend interface
|
||||
/*export const Backend = () => {
|
||||
return {
|
||||
async checkSecurity(boxId, id, key) {},
|
||||
async createOrUpdateBox(boxId, options = { ...DEFAULT_BOX_OPTIONS }) {},
|
||||
async list(boxId, { limit, sort, skip, onlyFields, q }) {},
|
||||
async get(boxId, id) {},
|
||||
async create(boxId, data) {},
|
||||
async update(boxId, id, body) {},
|
||||
async delete(boxId, id) {},
|
||||
};
|
||||
};*/
|
170
src/store/backends/memory.js
Normal file
170
src/store/backends/memory.js
Normal file
|
@ -0,0 +1,170 @@
|
|||
import { parse as parserExpression } from 'pivotql-parser-expression';
|
||||
import { compile as compilerJavascript } from 'pivotql-compiler-javascript';
|
||||
|
||||
import { throwError } from '../../error.js';
|
||||
import { uid } from '../../uid.js';
|
||||
|
||||
import { DEFAULT_BOX_OPTIONS } from './utils.js';
|
||||
|
||||
// Memory backend for proof of concept
|
||||
export const MemoryBackend = () => {
|
||||
const dataMemoryStore = {};
|
||||
const boxOptions = {};
|
||||
|
||||
const getOrCreateBox = (boxId) => {
|
||||
if (typeof dataMemoryStore[boxId] !== 'object') {
|
||||
dataMemoryStore[boxId] = {};
|
||||
}
|
||||
return dataMemoryStore[boxId];
|
||||
};
|
||||
|
||||
const filterObjectProperties = (obj, propArr) => {
|
||||
const newObj = {};
|
||||
for (let key in obj) {
|
||||
if (propArr.includes(key)) {
|
||||
newObj[key] = obj[key];
|
||||
}
|
||||
}
|
||||
return newObj;
|
||||
};
|
||||
|
||||
return {
|
||||
async getBoxOption(boxId) {
|
||||
return boxOptions[boxId];
|
||||
},
|
||||
|
||||
async createOrUpdateBox(boxId, options = { ...DEFAULT_BOX_OPTIONS }) {
|
||||
getOrCreateBox(boxId);
|
||||
boxOptions[boxId] = options;
|
||||
return { box: boxId, ...options };
|
||||
},
|
||||
|
||||
async list(
|
||||
boxId,
|
||||
{
|
||||
limit = 50,
|
||||
sort = '_id',
|
||||
asc = true,
|
||||
skip = 0,
|
||||
onlyFields = [],
|
||||
q,
|
||||
} = {}
|
||||
) {
|
||||
if (dataMemoryStore[boxId] === undefined) {
|
||||
throwError('Box not found', 404);
|
||||
}
|
||||
|
||||
let filter = () => true;
|
||||
if (q) {
|
||||
try {
|
||||
filter = compilerJavascript(parserExpression(q));
|
||||
} catch (e) {
|
||||
throwError('Invalid query expression.', 400);
|
||||
}
|
||||
}
|
||||
|
||||
let result = Object.values(dataMemoryStore[boxId]);
|
||||
|
||||
result = result.filter(filter);
|
||||
|
||||
result.sort((resource1, resource2) => {
|
||||
if (resource1[sort] < resource2[sort]) {
|
||||
return asc ? -1 : 1;
|
||||
}
|
||||
if (resource1[sort] > resource2[sort]) {
|
||||
return asc ? 1 : -1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
|
||||
result = result.slice(skip, skip + limit);
|
||||
|
||||
if (onlyFields.length) {
|
||||
result = result.map((resource) =>
|
||||
filterObjectProperties(resource, onlyFields)
|
||||
);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
|
||||
async get(boxId, id) {
|
||||
if (!dataMemoryStore[boxId]) {
|
||||
throwError('Box not found', 404);
|
||||
}
|
||||
if (!dataMemoryStore[boxId][id]) {
|
||||
throwError('Resource not found', 404);
|
||||
}
|
||||
return dataMemoryStore[boxId][id];
|
||||
},
|
||||
|
||||
async save(boxId, id, data) {
|
||||
if (dataMemoryStore[boxId] === undefined) {
|
||||
throwError('Box not found', 404);
|
||||
}
|
||||
|
||||
const cleanedData = data;
|
||||
delete cleanedData._createdOn;
|
||||
delete cleanedData._modifiedOn;
|
||||
|
||||
const actualId = id || uid();
|
||||
const box = dataMemoryStore[boxId];
|
||||
|
||||
let newRessource = null;
|
||||
if (box[actualId]) {
|
||||
// Update
|
||||
newRessource = {
|
||||
...cleanedData,
|
||||
_id: actualId,
|
||||
_createdOn: box[actualId]._createdOn,
|
||||
_updatedOn: Date.now(),
|
||||
};
|
||||
box[actualId] = newRessource;
|
||||
} else {
|
||||
newRessource = {
|
||||
...cleanedData,
|
||||
_id: actualId,
|
||||
_createdOn: Date.now(),
|
||||
};
|
||||
box[actualId] = newRessource;
|
||||
}
|
||||
return newRessource;
|
||||
},
|
||||
|
||||
async update(boxId, id, data) {
|
||||
if (!dataMemoryStore[boxId]) {
|
||||
throwError('Box not found', 404);
|
||||
}
|
||||
if (!dataMemoryStore[boxId][id]) {
|
||||
throwError('Ressource not found', 404);
|
||||
}
|
||||
|
||||
const cleanedData = data;
|
||||
delete cleanedData._createdOn;
|
||||
delete cleanedData._modifiedOn;
|
||||
|
||||
// To prevent created modification
|
||||
const currentData = dataMemoryStore[boxId][id];
|
||||
const updatedItem = {
|
||||
...currentData,
|
||||
...cleanedData,
|
||||
_id: id,
|
||||
_updatedOn: Date.now(),
|
||||
};
|
||||
dataMemoryStore[boxId][id] = updatedItem;
|
||||
return updatedItem;
|
||||
},
|
||||
|
||||
async delete(boxId, id) {
|
||||
if (!dataMemoryStore[boxId]) {
|
||||
return 0;
|
||||
}
|
||||
if (dataMemoryStore[boxId][id] !== undefined) {
|
||||
delete dataMemoryStore[boxId][id];
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export default MemoryBackend;
|
214
src/store/backends/mongodb.js
Normal file
214
src/store/backends/mongodb.js
Normal file
|
@ -0,0 +1,214 @@
|
|||
import { parse as parserExpression } from 'pivotql-parser-expression';
|
||||
import { compile as compilerMongodb } from 'pivotql-compiler-mongodb';
|
||||
|
||||
import { throwError } from '../../error.js';
|
||||
import { uid } from '../../uid.js';
|
||||
|
||||
import { DEFAULT_BOX_OPTIONS } from './utils.js';
|
||||
|
||||
// Mongodb backend
|
||||
export const MongoDBBackend = (options) => {
|
||||
let database;
|
||||
let _client;
|
||||
|
||||
const getClient = async () => {
|
||||
if (!_client) {
|
||||
try {
|
||||
const { MongoClient, ServerApiVersion } = await import('mongodb');
|
||||
_client = new MongoClient(options.uri, {
|
||||
serverApi: ServerApiVersion.v1,
|
||||
});
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
'You must install "mongodb" package in order to be able to use the MongoDBStoreBackend!'
|
||||
);
|
||||
}
|
||||
}
|
||||
return _client;
|
||||
};
|
||||
|
||||
const close = async () => {
|
||||
const client = await getClient();
|
||||
database = undefined;
|
||||
await client.close();
|
||||
};
|
||||
|
||||
const getBoxDb = async (boxId) => {
|
||||
const client = await getClient();
|
||||
if (!database) {
|
||||
await client.connect();
|
||||
database = await client.db(options.database);
|
||||
}
|
||||
|
||||
return await database.collection(boxId);
|
||||
};
|
||||
|
||||
const getBoxOption = async (boxId) => {
|
||||
const boxes = await getBoxDb('boxes');
|
||||
return await boxes.findOne({ box: boxId });
|
||||
};
|
||||
|
||||
return {
|
||||
getBoxOption,
|
||||
_close: close,
|
||||
|
||||
async createOrUpdateBox(boxId, options = { ...DEFAULT_BOX_OPTIONS }) {
|
||||
const prevOptions = (await getBoxOption(boxId)) || {};
|
||||
|
||||
// TODO boxes should be prefixed with _?
|
||||
const boxes = await getBoxDb('boxes');
|
||||
return await boxes.findOneAndUpdate(
|
||||
{ box: boxId },
|
||||
{ $set: { ...prevOptions, ...options, box: boxId } },
|
||||
{ upsert: true, returnDocument: 'after' }
|
||||
).value;
|
||||
},
|
||||
async list(
|
||||
boxId,
|
||||
{
|
||||
limit = 50,
|
||||
sort = '_id',
|
||||
asc = true,
|
||||
skip = 0,
|
||||
onlyFields = [],
|
||||
q,
|
||||
} = {}
|
||||
) {
|
||||
const boxRecord = await getBoxOption(boxId);
|
||||
|
||||
if (!boxRecord) {
|
||||
throwError('Box not found', 404);
|
||||
}
|
||||
|
||||
const boxDB = await getBoxDb(boxId);
|
||||
|
||||
const listOptions = {};
|
||||
|
||||
let filter = {};
|
||||
if (q) {
|
||||
try {
|
||||
filter = compilerMongodb(parserExpression(q));
|
||||
} catch (e) {
|
||||
throwError('Invalid query expression.', 400);
|
||||
}
|
||||
}
|
||||
|
||||
if (onlyFields.length) {
|
||||
listOptions.projection = onlyFields.reduce((acc, field) => {
|
||||
acc[field] = 1;
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
|
||||
return await boxDB
|
||||
.find(filter, listOptions)
|
||||
.limit(limit)
|
||||
.skip(skip)
|
||||
.sort({ [sort]: asc ? 1 : -1 })
|
||||
.toArray();
|
||||
},
|
||||
async get(boxId, id) {
|
||||
const boxRecord = await getBoxOption(boxId);
|
||||
|
||||
if (!boxRecord) {
|
||||
throwError('Box not found', 404);
|
||||
}
|
||||
|
||||
const boxDB = await getBoxDb(boxId);
|
||||
|
||||
const result = await boxDB.findOne({ _id: id });
|
||||
|
||||
if (!result) {
|
||||
const newError = new Error('Resource not found');
|
||||
newError.statusCode = 404;
|
||||
throw newError;
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
|
||||
async save(boxId, id, data) {
|
||||
const boxRecord = await getBoxOption(boxId);
|
||||
|
||||
if (!boxRecord) {
|
||||
throwError('Box not found', 404);
|
||||
}
|
||||
|
||||
const boxDB = await getBoxDb(boxId);
|
||||
const actualId = id || uid();
|
||||
|
||||
const cleanedData = data;
|
||||
delete cleanedData._createdOn;
|
||||
const now = Date.now();
|
||||
cleanedData._updatedOn = now;
|
||||
|
||||
const found = await boxDB.findOne({ _id: actualId });
|
||||
|
||||
if (!found) {
|
||||
const toBeInserted = {
|
||||
...cleanedData,
|
||||
_createdOn: now,
|
||||
_id: actualId,
|
||||
};
|
||||
await boxDB.insertOne(toBeInserted);
|
||||
return toBeInserted;
|
||||
} else {
|
||||
const response = await boxDB.findOneAndReplace(
|
||||
{ _id: actualId },
|
||||
{
|
||||
...cleanedData,
|
||||
_createdOn: found._createdOn,
|
||||
_id: actualId,
|
||||
},
|
||||
{ returnDocument: 'after' }
|
||||
);
|
||||
return response.value;
|
||||
}
|
||||
},
|
||||
|
||||
async update(boxId, id, data) {
|
||||
const boxRecord = await getBoxOption(boxId);
|
||||
if (!boxRecord) {
|
||||
throwError('Box not found', 404);
|
||||
}
|
||||
const boxDB = await getBoxDb(boxId);
|
||||
|
||||
const cleanedData = data;
|
||||
delete cleanedData._createdOn;
|
||||
delete cleanedData._modifiedOn;
|
||||
|
||||
const found = await boxDB.findOne({ _id: id });
|
||||
|
||||
if (!found) {
|
||||
const newError = new Error('Resource not found');
|
||||
newError.statusCode = 404;
|
||||
throw newError;
|
||||
}
|
||||
|
||||
const response = await boxDB.findOneAndUpdate(
|
||||
{ _id: id },
|
||||
{
|
||||
$set: {
|
||||
...cleanedData,
|
||||
_updatedOn: Date.now(),
|
||||
_id: id,
|
||||
},
|
||||
},
|
||||
{ returnDocument: 'after' }
|
||||
);
|
||||
return response.value;
|
||||
},
|
||||
|
||||
async delete(boxId, id) {
|
||||
const boxRecord = await getBoxOption(boxId);
|
||||
if (!boxRecord) {
|
||||
return 0;
|
||||
}
|
||||
const boxDB = await getBoxDb(boxId);
|
||||
const { deletedCount } = await boxDB.deleteOne({ _id: id });
|
||||
return deletedCount;
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export default MongoDBBackend;
|
263
src/store/backends/nedb.js
Normal file
263
src/store/backends/nedb.js
Normal file
|
@ -0,0 +1,263 @@
|
|||
import { parse as parserExpression } from 'pivotql-parser-expression';
|
||||
import { compile as compilerMongodb } from 'pivotql-compiler-mongodb';
|
||||
|
||||
import { throwError } from '../../error.js';
|
||||
import { uid } from '../../uid.js';
|
||||
|
||||
import { DEFAULT_BOX_OPTIONS } from './utils.js';
|
||||
|
||||
// Nedb backend for proof of concept
|
||||
export const NeDBBackend = (options) => {
|
||||
const db = {};
|
||||
let _Datastore;
|
||||
|
||||
const getBoxDB = async (boxId) => {
|
||||
if (!db[boxId]) {
|
||||
if (!_Datastore) {
|
||||
try {
|
||||
_Datastore = (await import('@seald-io/nedb')).default;
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
'You must install "nedb" package in order to be able to use the NeDBStoreBackend!'
|
||||
);
|
||||
}
|
||||
}
|
||||
db[boxId] = new _Datastore({
|
||||
filename: `${options.dirname}/${boxId}.json`,
|
||||
...options,
|
||||
autoload: true,
|
||||
});
|
||||
}
|
||||
return db[boxId];
|
||||
};
|
||||
|
||||
const getBoxOption = async (boxId) => {
|
||||
const boxes = await getBoxDB('boxes');
|
||||
return new Promise((resolve, reject) => {
|
||||
boxes.findOne({ box: boxId }, (err, doc) => {
|
||||
if (err) {
|
||||
/* istanbul ignore next */
|
||||
reject(err);
|
||||
}
|
||||
resolve(doc || undefined);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
getBoxOption,
|
||||
async createOrUpdateBox(boxId, options = { ...DEFAULT_BOX_OPTIONS }) {
|
||||
const prevOptions = (await getBoxOption(boxId)) || {};
|
||||
const boxes = await getBoxDB('boxes');
|
||||
return new Promise((resolve, reject) => {
|
||||
boxes.update(
|
||||
{ box: boxId },
|
||||
{ ...prevOptions, ...options, box: boxId },
|
||||
{ upsert: true },
|
||||
(err, doc) => {
|
||||
if (err) {
|
||||
/* istanbul ignore next */
|
||||
reject(err);
|
||||
}
|
||||
resolve(doc);
|
||||
}
|
||||
);
|
||||
});
|
||||
},
|
||||
|
||||
async list(
|
||||
boxId,
|
||||
{
|
||||
limit = 50,
|
||||
sort = '_id',
|
||||
asc = true,
|
||||
skip = 0,
|
||||
onlyFields = [],
|
||||
q,
|
||||
} = {}
|
||||
) {
|
||||
const boxRecord = await getBoxOption(boxId);
|
||||
|
||||
if (!boxRecord) {
|
||||
throwError('Box not found', 404);
|
||||
}
|
||||
|
||||
const boxDB = await getBoxDB(boxId);
|
||||
|
||||
let filter = {};
|
||||
if (q) {
|
||||
try {
|
||||
filter = compilerMongodb(parserExpression(q));
|
||||
} catch (e) {
|
||||
throwError('Invalid query expression.', 400);
|
||||
}
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
boxDB
|
||||
.find(
|
||||
filter,
|
||||
onlyFields.length
|
||||
? onlyFields.reduce((acc, field) => {
|
||||
acc[field] = 1;
|
||||
return acc;
|
||||
}, {})
|
||||
: {}
|
||||
)
|
||||
.limit(limit)
|
||||
.skip(skip)
|
||||
.sort({ [sort]: asc ? 1 : -1 })
|
||||
.exec((err, docs) => {
|
||||
if (err) {
|
||||
/* istanbul ignore next */
|
||||
reject(err);
|
||||
}
|
||||
resolve(docs);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
async get(boxId, id) {
|
||||
const boxRecord = await getBoxOption(boxId);
|
||||
|
||||
if (!boxRecord) {
|
||||
throwError('Box not found', 404);
|
||||
}
|
||||
|
||||
const boxDB = await getBoxDB(boxId);
|
||||
return new Promise((resolve, reject) => {
|
||||
boxDB.findOne({ _id: id }, (err, doc) => {
|
||||
if (err) {
|
||||
/* istanbul ignore next */
|
||||
reject(err);
|
||||
}
|
||||
if (!doc) {
|
||||
const newError = new Error('Resource not found');
|
||||
newError.statusCode = 404;
|
||||
reject(newError);
|
||||
}
|
||||
resolve(doc);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
async save(boxId, id, data) {
|
||||
const boxRecord = await getBoxOption(boxId);
|
||||
|
||||
if (!boxRecord) {
|
||||
throwError('Box not found', 404);
|
||||
}
|
||||
|
||||
const boxDB = await getBoxDB(boxId);
|
||||
const actualId = id || uid();
|
||||
|
||||
const cleanedData = data;
|
||||
delete cleanedData._createdOn;
|
||||
delete cleanedData._modifiedOn;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// Creation with id or update with id
|
||||
boxDB.findOne({ _id: actualId }, (err, doc) => {
|
||||
if (err) {
|
||||
/* istanbul ignore next */
|
||||
reject(err);
|
||||
}
|
||||
if (!doc) {
|
||||
// Creation
|
||||
boxDB.insert(
|
||||
{ ...cleanedData, _createdOn: Date.now(), _id: actualId },
|
||||
(err, doc) => {
|
||||
if (err) {
|
||||
/* istanbul ignore next */
|
||||
reject(err);
|
||||
}
|
||||
resolve(doc);
|
||||
}
|
||||
);
|
||||
} else {
|
||||
// Update
|
||||
boxDB.update(
|
||||
{ _id: actualId },
|
||||
{
|
||||
...cleanedData,
|
||||
_updatedOn: Date.now(),
|
||||
_createdOn: doc._createdOn,
|
||||
_id: actualId,
|
||||
},
|
||||
{ returnUpdatedDocs: true },
|
||||
(err, numAffected, affectedDoc) => {
|
||||
if (!numAffected) {
|
||||
const newError = new Error('Resource not found');
|
||||
newError.statusCode = 404;
|
||||
reject(newError);
|
||||
}
|
||||
if (err) {
|
||||
/* istanbul ignore next */
|
||||
reject(err);
|
||||
}
|
||||
resolve(affectedDoc);
|
||||
}
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
async update(boxId, id, data) {
|
||||
const boxRecord = await getBoxOption(boxId);
|
||||
if (!boxRecord) {
|
||||
throwError('Box not found', 404);
|
||||
}
|
||||
const boxDB = await getBoxDB(boxId);
|
||||
|
||||
const cleanedData = data;
|
||||
delete cleanedData._createdOn;
|
||||
delete cleanedData._modifiedOn;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
boxDB.update(
|
||||
{ _id: id },
|
||||
{
|
||||
$set: {
|
||||
...cleanedData,
|
||||
_updatedOn: Date.now(),
|
||||
_id: id,
|
||||
},
|
||||
},
|
||||
{ returnUpdatedDocs: true },
|
||||
(err, numAffected, affectedDoc) => {
|
||||
if (!numAffected) {
|
||||
const newError = new Error('Resource not found');
|
||||
newError.statusCode = 404;
|
||||
reject(newError);
|
||||
}
|
||||
if (err) {
|
||||
/* istanbul ignore next */
|
||||
reject(err);
|
||||
}
|
||||
resolve(affectedDoc);
|
||||
}
|
||||
);
|
||||
});
|
||||
},
|
||||
|
||||
async delete(boxId, id) {
|
||||
const boxRecord = await getBoxOption(boxId);
|
||||
if (!boxRecord) {
|
||||
return 0;
|
||||
}
|
||||
const boxDB = await getBoxDB(boxId);
|
||||
return new Promise((resolve, reject) => {
|
||||
boxDB.remove({ _id: id }, {}, (err, numRemoved) => {
|
||||
if (err) {
|
||||
/* istanbul ignore next */
|
||||
reject(err);
|
||||
}
|
||||
resolve(numRemoved);
|
||||
});
|
||||
});
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export default NeDBBackend;
|
87
src/store/backends/utils.js
Normal file
87
src/store/backends/utils.js
Normal file
|
@ -0,0 +1,87 @@
|
|||
export const DEFAULT_BOX_OPTIONS = { security: 'private', personal: false };
|
||||
|
||||
export const wrapBackend = (backend, siteId) => {
|
||||
const getBoxId = (userBoxId) => {
|
||||
return `_${siteId}__${userBoxId}`;
|
||||
};
|
||||
|
||||
const migrationToApply = {
|
||||
async storeBySiteId(newBoxId) {
|
||||
const oldBoxId = newBoxId.split('__')[1];
|
||||
const exists = await backend.getBoxOption(oldBoxId);
|
||||
|
||||
// Migrate only previously existing collection
|
||||
if (!exists) return;
|
||||
|
||||
const data = await backend.list(oldBoxId);
|
||||
|
||||
for (const item of data) {
|
||||
await backend.save(newBoxId, item.id, item);
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const migrate = async (boxId) => {
|
||||
const options = (await backend.getBoxOption(boxId)) || {};
|
||||
const { migrations = [] } = options;
|
||||
const migrationApplied = [];
|
||||
|
||||
for (const key of Object.keys(migrationToApply)) {
|
||||
if (!migrations.includes(key)) {
|
||||
await migrationToApply[key](boxId);
|
||||
migrationApplied.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
await backend.createOrUpdateBox(boxId, {
|
||||
...options,
|
||||
migrations: Array.from(new Set([...migrations, ...migrationApplied])),
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
async checkSecurity(boxId, id, write = false) {
|
||||
const { security = 'private' } =
|
||||
(await backend.getBoxOption(getBoxId(boxId))) || {};
|
||||
switch (security) {
|
||||
case 'private':
|
||||
return false;
|
||||
case 'public':
|
||||
return true;
|
||||
case 'readOnly':
|
||||
return !write;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
},
|
||||
async createOrUpdateBox(boxId, options) {
|
||||
const result = await backend.createOrUpdateBox(getBoxId(boxId), options);
|
||||
// Apply migration if any
|
||||
await migrate(getBoxId(boxId));
|
||||
return result;
|
||||
},
|
||||
async list(boxId, options) {
|
||||
// find
|
||||
return await backend.list(getBoxId(boxId), options);
|
||||
},
|
||||
// has
|
||||
/*async has(boxId, id) {
|
||||
return await backend.has(getBoxId(boxId), id);
|
||||
},*/
|
||||
async get(boxId, id) {
|
||||
return await backend.get(getBoxId(boxId), id);
|
||||
},
|
||||
async set(boxId, id, data) {
|
||||
return await backend.save(getBoxId(boxId), id, data);
|
||||
},
|
||||
async save(boxId, id, data) {
|
||||
return await backend.save(getBoxId(boxId), id, data);
|
||||
},
|
||||
async update(boxId, id, data) {
|
||||
return await backend.update(getBoxId(boxId), id, data);
|
||||
},
|
||||
async delete(boxId, id) {
|
||||
return await backend.delete(getBoxId(boxId), id);
|
||||
},
|
||||
};
|
||||
};
|
338
src/store/index.js
Normal file
338
src/store/index.js
Normal file
|
@ -0,0 +1,338 @@
|
|||
import express from 'express';
|
||||
import { MemoryBackend, wrapBackend } from './backends/index.js';
|
||||
import { MemoryFileBackend } from '../fileStore/backends/index.js';
|
||||
import fileStore from '../fileStore/index.js';
|
||||
import { throwError, errorGuard, errorMiddleware } from '../error.js';
|
||||
|
||||
// Utility functions
|
||||
|
||||
// ROADMAP
|
||||
// - Add bulk operations with atomicity
|
||||
// - Add Queries
|
||||
// - Add relationship
|
||||
// - Add http2 relationship ?
|
||||
// - Add multiple strategies
|
||||
// - Read / Write
|
||||
// - Read only
|
||||
// - No access (only from execute)
|
||||
|
||||
const SAFE_METHOD = ['GET', 'OPTIONS', 'HEAD'];
|
||||
|
||||
// Store Middleware
|
||||
export const store = ({
|
||||
prefix = 'store',
|
||||
backend = MemoryBackend(),
|
||||
fileBackend = MemoryFileBackend(),
|
||||
hooks = {},
|
||||
} = {}) => {
|
||||
const router = express.Router();
|
||||
|
||||
const applyHooks = async (
|
||||
type,
|
||||
req,
|
||||
roContextAddition,
|
||||
writableContextAddition = {}
|
||||
) => {
|
||||
let hooksMap = hooks;
|
||||
if (typeof hooks === 'function') {
|
||||
hooksMap = hooks(req);
|
||||
}
|
||||
|
||||
const {
|
||||
body,
|
||||
params: { boxId, id },
|
||||
query,
|
||||
method,
|
||||
authenticatedUser = null,
|
||||
} = req;
|
||||
|
||||
const roContext = {
|
||||
method,
|
||||
boxId: boxId,
|
||||
resourceId: id,
|
||||
userId: authenticatedUser,
|
||||
...roContextAddition,
|
||||
};
|
||||
|
||||
let context = {
|
||||
query,
|
||||
body,
|
||||
...writableContextAddition,
|
||||
...roContext,
|
||||
};
|
||||
|
||||
const hookList = hooksMap[type] || [];
|
||||
|
||||
for (const hook of hookList) {
|
||||
const newContext = await hook(context);
|
||||
context = { ...newContext, ...roContext };
|
||||
}
|
||||
|
||||
return context;
|
||||
};
|
||||
|
||||
// Resource list
|
||||
router.get(
|
||||
`/${prefix}/:boxId/`,
|
||||
errorGuard(async (req, res) => {
|
||||
const { boxId } = req.params;
|
||||
const { siteId, authenticatedUser } = req;
|
||||
|
||||
const wrappedBackend = wrapBackend(backend, siteId, authenticatedUser);
|
||||
|
||||
const { query, allow = false } = await applyHooks('before', req, {
|
||||
store: wrappedBackend,
|
||||
});
|
||||
|
||||
if (!allow && !(await wrappedBackend.checkSecurity(boxId, null))) {
|
||||
throwError('You need read access for this box', 403);
|
||||
}
|
||||
|
||||
const {
|
||||
limit = '50',
|
||||
sort = '_createdOn',
|
||||
skip = '0',
|
||||
q,
|
||||
fields,
|
||||
} = query;
|
||||
|
||||
const onlyFields = fields ? fields.split(',') : [];
|
||||
|
||||
const parsedLimit = parseInt(limit, 10);
|
||||
const parsedSkip = parseInt(skip, 10);
|
||||
|
||||
let sortProperty = sort;
|
||||
let asc = true;
|
||||
|
||||
// If prefixed with '-' inverse order
|
||||
if (sort[0] === '-') {
|
||||
sortProperty = sort.substring(1);
|
||||
asc = false;
|
||||
}
|
||||
|
||||
const response = await wrappedBackend.list(boxId, {
|
||||
sort: sortProperty,
|
||||
asc,
|
||||
limit: parsedLimit,
|
||||
skip: parsedSkip,
|
||||
onlyFields: onlyFields,
|
||||
q,
|
||||
});
|
||||
|
||||
const { response: hookedResponse } = await applyHooks(
|
||||
'after',
|
||||
req,
|
||||
{
|
||||
query,
|
||||
store: wrappedBackend,
|
||||
},
|
||||
{ response }
|
||||
);
|
||||
|
||||
res.json(hookedResponse);
|
||||
})
|
||||
);
|
||||
|
||||
// One object
|
||||
router.get(
|
||||
`/${prefix}/:boxId/:id`,
|
||||
errorGuard(async (req, res) => {
|
||||
const { boxId, id } = req.params;
|
||||
|
||||
const { siteId, authenticatedUser } = req;
|
||||
|
||||
const wrappedBackend = wrapBackend(backend, siteId, authenticatedUser);
|
||||
|
||||
if (boxId[0] === '_') {
|
||||
throwError(
|
||||
"'_' char is forbidden as first letter of a box id parameter",
|
||||
400
|
||||
);
|
||||
}
|
||||
|
||||
const { allow = false } = await applyHooks('before', req, {
|
||||
store: wrappedBackend,
|
||||
});
|
||||
|
||||
if (!allow && !(await wrappedBackend.checkSecurity(boxId, id))) {
|
||||
throwError('You need read access for this box', 403);
|
||||
}
|
||||
|
||||
const response = await wrappedBackend.get(boxId, id);
|
||||
|
||||
const { response: hookedResponse } = await applyHooks(
|
||||
'after',
|
||||
req,
|
||||
{
|
||||
store: wrappedBackend,
|
||||
},
|
||||
{ response }
|
||||
);
|
||||
|
||||
res.json(hookedResponse);
|
||||
})
|
||||
);
|
||||
|
||||
// Create / replace object
|
||||
router.post(
|
||||
`/${prefix}/:boxId/:id?`,
|
||||
errorGuard(async (req, res) => {
|
||||
const {
|
||||
params: { boxId, id },
|
||||
siteId,
|
||||
authenticatedUser,
|
||||
} = req;
|
||||
|
||||
const wrappedBackend = wrapBackend(backend, siteId, authenticatedUser);
|
||||
|
||||
if (boxId[0] === '_') {
|
||||
throwError(
|
||||
"'_' char is forbidden for first letter of a box id parameter",
|
||||
400
|
||||
);
|
||||
}
|
||||
|
||||
const { body, allow = false } = await applyHooks('before', req, {
|
||||
store: wrappedBackend,
|
||||
});
|
||||
|
||||
if (!allow && !(await wrappedBackend.checkSecurity(boxId, id, true))) {
|
||||
throwError('You need write access for this box', 403);
|
||||
}
|
||||
|
||||
const response = await wrappedBackend.save(boxId, id, body);
|
||||
|
||||
const { response: hookedResponse } = await applyHooks('after', req, {
|
||||
response,
|
||||
store: wrappedBackend,
|
||||
});
|
||||
|
||||
return res.json(hookedResponse);
|
||||
})
|
||||
);
|
||||
|
||||
// Update existing object
|
||||
router.put(
|
||||
`/${prefix}/:boxId/:id`,
|
||||
errorGuard(async (req, res) => {
|
||||
const { boxId, id } = req.params;
|
||||
|
||||
const { siteId, authenticatedUser } = req;
|
||||
|
||||
const wrappedBackend = wrapBackend(backend, siteId, authenticatedUser);
|
||||
|
||||
if (boxId[0] === '_') {
|
||||
throwError(
|
||||
"'_' char is forbidden for first letter of a letter of a box id parameter",
|
||||
400
|
||||
);
|
||||
}
|
||||
|
||||
const { body, allow = false } = await applyHooks('before', req, {
|
||||
store: wrappedBackend,
|
||||
});
|
||||
|
||||
if (!allow && !(await wrappedBackend.checkSecurity(boxId, id, true))) {
|
||||
throwError('You need write access for this resource', 403);
|
||||
}
|
||||
|
||||
const response = await wrappedBackend.update(boxId, id, body);
|
||||
|
||||
const { response: hookedResponse } = await applyHooks('after', req, {
|
||||
response,
|
||||
store: wrappedBackend,
|
||||
});
|
||||
|
||||
return res.json(hookedResponse);
|
||||
})
|
||||
);
|
||||
|
||||
// Delete object
|
||||
router.delete(
|
||||
`/${prefix}/:boxId/:id`,
|
||||
errorGuard(async (req, res) => {
|
||||
const { boxId, id } = req.params;
|
||||
|
||||
const { siteId, authenticatedUser } = req;
|
||||
|
||||
const wrappedBackend = wrapBackend(backend, siteId, authenticatedUser);
|
||||
|
||||
if (boxId[0] === '_') {
|
||||
throwError(
|
||||
"'_' char is forbidden for first letter of a box id parameter",
|
||||
400
|
||||
);
|
||||
}
|
||||
|
||||
const { allow = false } = await applyHooks('before', req, {
|
||||
store: wrappedBackend,
|
||||
});
|
||||
|
||||
if (!allow && !(await wrappedBackend.checkSecurity(boxId, id, true))) {
|
||||
throwError('You need write access for this resource', 403);
|
||||
}
|
||||
|
||||
const result = await wrappedBackend.delete(boxId, id);
|
||||
|
||||
await applyHooks('after', req, {
|
||||
store: wrappedBackend,
|
||||
});
|
||||
|
||||
if (result === 1) {
|
||||
res.json({ message: 'Deleted' });
|
||||
return;
|
||||
}
|
||||
|
||||
throwError('Box or resource not found', 404);
|
||||
})
|
||||
);
|
||||
|
||||
router.use(
|
||||
`/${prefix}/:boxId/:id/file`,
|
||||
errorGuard(async (req, _, next) => {
|
||||
const { boxId, id } = req.params;
|
||||
|
||||
const { siteId, authenticatedUser } = req;
|
||||
|
||||
const wrappedBackend = wrapBackend(backend, siteId, authenticatedUser);
|
||||
|
||||
const { allow = false } = await applyHooks('beforeFile', req, {
|
||||
store: wrappedBackend,
|
||||
});
|
||||
|
||||
if (
|
||||
!allow &&
|
||||
!(await wrappedBackend.checkSecurity(
|
||||
boxId,
|
||||
id,
|
||||
!SAFE_METHOD.includes(req.method)
|
||||
))
|
||||
) {
|
||||
throwError('You need write access for this resource', 403);
|
||||
}
|
||||
|
||||
req.boxId = boxId;
|
||||
req.resourceId = id;
|
||||
next();
|
||||
}),
|
||||
fileStore(fileBackend, { prefix }),
|
||||
errorGuard(async (req, _, next) => {
|
||||
const { siteId, authenticatedUser } = req;
|
||||
const wrappedBackend = wrapBackend(backend, siteId, authenticatedUser);
|
||||
|
||||
console.log('execute after file hooks');
|
||||
|
||||
await applyHooks('afterFile', req, {
|
||||
store: wrappedBackend,
|
||||
});
|
||||
|
||||
next();
|
||||
})
|
||||
);
|
||||
|
||||
router.use(errorMiddleware);
|
||||
|
||||
return router;
|
||||
};
|
||||
|
||||
export default store;
|
133
src/test.http
Normal file
133
src/test.http
Normal file
|
@ -0,0 +1,133 @@
|
|||
@remote_url = http://localhost:9000
|
||||
@api_url = http://192.168.0.11:4000/store
|
||||
@exec_url = http://192.168.0.11:4000/execute
|
||||
|
||||
@auth_url = http://192.168.0.11:4000/auth
|
||||
|
||||
### LIST
|
||||
|
||||
GET {{api_url}}/boxId/ HTTP/1.1
|
||||
x-spc-host: {{remote_url}}
|
||||
|
||||
### CREATE FOO
|
||||
|
||||
# @name createElem
|
||||
POST {{api_url}}/boxId/ HTTP/1.1
|
||||
content-type: application/json
|
||||
|
||||
{
|
||||
"name": "foo"
|
||||
}
|
||||
|
||||
### CREATE BAR
|
||||
|
||||
POST {{api_url}}/boxId/ HTTP/1.1
|
||||
content-type: application/json
|
||||
|
||||
{
|
||||
"name": "bar"
|
||||
}
|
||||
|
||||
### LIST with limit, skip and sort
|
||||
|
||||
GET {{api_url}}/boxId/?limit=2&sort=-name&skip=1&fields=name,_id HTTP/1.1
|
||||
|
||||
### GET
|
||||
|
||||
@elemId = {{createElem.response.body.$._id}}
|
||||
|
||||
GET {{api_url}}/boxId/{{elemId}} HTTP/1.1
|
||||
|
||||
### UPDATE
|
||||
|
||||
@elemId = {{createElem.response.body.$._id}}
|
||||
|
||||
PUT {{api_url}}/boxId/{{elemId}} HTTP/1.1
|
||||
content-type: application/json
|
||||
|
||||
{
|
||||
"other": "baz"
|
||||
}
|
||||
|
||||
### DELETE
|
||||
|
||||
@elemId = {{createElem.response.body.$._id}}
|
||||
|
||||
DELETE {{api_url}}/boxId/{{elemId}} HTTP/1.1
|
||||
|
||||
# With authentication ########################
|
||||
|
||||
### CREATE TIP
|
||||
|
||||
# @name createElemKey
|
||||
|
||||
POST {{api_url}}/boxId/?key=testkey HTTP/1.1
|
||||
content-type: application/json
|
||||
|
||||
{
|
||||
"name": "tip"
|
||||
}
|
||||
|
||||
### UPDATE with good key
|
||||
|
||||
@elemIdWithKey = {{createElemKey.response.body.$._id}}
|
||||
|
||||
PUT {{api_url}}/boxId/{{elemIdWithKey}}?key=testkey HTTP/1.1
|
||||
content-type: application/json
|
||||
|
||||
{
|
||||
"other": "baz"
|
||||
}
|
||||
|
||||
|
||||
### UPDATE with bad key
|
||||
|
||||
@elemIdWithKey = {{createElemKey.response.body.$._id}}
|
||||
|
||||
PUT {{api_url}}/boxId/{{elemIdWithKey}}?key=badkey HTTP/1.1
|
||||
content-type: application/json
|
||||
|
||||
{
|
||||
"other": "baz"
|
||||
}
|
||||
|
||||
### DELETE with bad key
|
||||
|
||||
@elemIdWithKey = {{createElemKey.response.body.$._id}}
|
||||
|
||||
DELETE {{api_url}}/boxId/{{elemIdWithKey}}?key=badkey HTTP/1.1
|
||||
|
||||
### DELETE with good key
|
||||
|
||||
@elemIdWithKey = {{createElemKey.response.body.$._id}}
|
||||
|
||||
DELETE {{api_url}}/boxId/{{elemIdWithKey}}?key=testkey HTTP/1.1
|
||||
|
||||
|
||||
# Execution module
|
||||
|
||||
### just get test
|
||||
|
||||
GET {{exec_url}}/test/?param1=toto HTTP/1.1
|
||||
x-spc-host: http://localhost:9000
|
||||
|
||||
# Auth module
|
||||
|
||||
### Get token
|
||||
|
||||
POST {{auth_url}}/ HTTP/1.1
|
||||
|
||||
{
|
||||
"userId": "test@yopmail.com"
|
||||
}
|
||||
|
||||
|
||||
### LIST GAME
|
||||
|
||||
GET {{api_url}}/game/ HTTP/1.1
|
||||
x-spc-host: {{remote_url}}
|
||||
|
||||
### just get test
|
||||
|
||||
GET {{exec_url}}/test/?param1=toto HTTP/1.1
|
||||
x-spc-host: http://localhost:9000/
|
12
src/uid.js
Normal file
12
src/uid.js
Normal file
|
@ -0,0 +1,12 @@
|
|||
import { customAlphabet } from 'nanoid';
|
||||
|
||||
const alpha = '23456789ABCDEFGHJKMNPQRSTUVWXYZabcdefghjkmnpqrstuvwxyz';
|
||||
|
||||
// Custom uid generator
|
||||
export const longUid = customAlphabet(alpha, 40);
|
||||
|
||||
// Custom uid generator
|
||||
export const uid = customAlphabet(alpha, 15);
|
||||
|
||||
// Custom small uid generator
|
||||
export const smallUid = customAlphabet(alpha, 5);
|
7
src/utils.js
Normal file
7
src/utils.js
Normal file
|
@ -0,0 +1,7 @@
|
|||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
export const getDirname = (url) => {
|
||||
const __filename = fileURLToPath(url);
|
||||
return path.dirname(__filename);
|
||||
};
|
36
src/whitelist.js
Normal file
36
src/whitelist.js
Normal file
|
@ -0,0 +1,36 @@
|
|||
import fs from 'fs';
|
||||
import * as readline from 'node:readline';
|
||||
|
||||
import log from './log.js';
|
||||
|
||||
export const isInWhiteList = async (filename, email) => {
|
||||
if (!email) {
|
||||
log.warn("isInWhiteList: empty email.");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (filename) {
|
||||
const fileStream = fs.createReadStream(filename);
|
||||
|
||||
const rl = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity
|
||||
});
|
||||
// Note: we use the crlfDelay option to recognize all instances of CR LF
|
||||
// ('\r\n') in input as a single line break.
|
||||
|
||||
for await (const line of rl) {
|
||||
if (email.trim() === line) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
} else {
|
||||
log.debug('isInWhiteList: no whitelist file defined.')
|
||||
return true;
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue