Spaces:
Sleeping
Sleeping
Merge pull request #8 from hiett/sh/response-encoding
Browse filesResponse encoding, and automated testing with @upstash/redis
- .github/workflows/test.yml +37 -0
- .gitignore +1 -3
- HOW_TO_BUILD.md +7 -0
- README.md +117 -74
- config/test.exs +1 -0
- example/.gitignore +2 -0
- example/package.json +19 -0
- example/src/index.ts +39 -0
- example/srh-config.json +7 -0
- example/tsconfig.json +109 -0
- example/yarn.lock +162 -0
- lib/srh/auth/token_resolver.ex +28 -3
- lib/srh/http/base_router.ex +31 -9
- lib/srh/http/request_validator.ex +15 -0
- lib/srh/http/result_encoder.ex +66 -0
.github/workflows/test.yml
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Test @upstash/redis compatability
|
2 |
+
on:
|
3 |
+
workflow_dispatch:
|
4 |
+
push:
|
5 |
+
paths:
|
6 |
+
- 'lib/**'
|
7 |
+
schedule:
|
8 |
+
- cron: '0 12 * * *'
|
9 |
+
|
10 |
+
env:
|
11 |
+
SRH_TOKEN: example_token
|
12 |
+
|
13 |
+
jobs:
|
14 |
+
container-job:
|
15 |
+
runs-on: ubuntu-latest
|
16 |
+
container: denoland/deno
|
17 |
+
services:
|
18 |
+
redis:
|
19 |
+
image: redis/redis-stack-server:6.2.6-v6 # 6.2 is the Upstash compatible Redis version
|
20 |
+
srh:
|
21 |
+
image: hiett/serverless-redis-http:0.0.5-alpha
|
22 |
+
env:
|
23 |
+
SRH_MODE: env
|
24 |
+
SRH_TOKEN: ${{ env.SRH_TOKEN }}
|
25 |
+
SRH_CONNECTION_STRING: redis://redis:6379
|
26 |
+
|
27 |
+
steps:
|
28 |
+
- name: Checkout code
|
29 |
+
uses: actions/checkout@v3
|
30 |
+
with:
|
31 |
+
repository: upstash/upstash-redis
|
32 |
+
|
33 |
+
- name: Run @upstash/redis Test Suite
|
34 |
+
run: deno test -A ./pkg
|
35 |
+
env:
|
36 |
+
UPSTASH_REDIS_REST_URL: http://srh:80
|
37 |
+
UPSTASH_REDIS_REST_TOKEN: ${{ env.SRH_TOKEN }}
|
.gitignore
CHANGED
@@ -29,6 +29,4 @@ srh-*.tar
|
|
29 |
|
30 |
*.iml
|
31 |
|
32 |
-
srh-config/
|
33 |
-
|
34 |
-
example/
|
|
|
29 |
|
30 |
*.iml
|
31 |
|
32 |
+
srh-config/
|
|
|
|
HOW_TO_BUILD.md
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
## Building the Docker image
|
2 |
+
|
3 |
+
To build both an amd64 image and an arm64 image, on an M1 Mac:
|
4 |
+
|
5 |
+
```
|
6 |
+
docker buildx build --platform linux/amd64,linux/arm64 --push -t hiett/serverless-redis-http:0.0.5-alpha
|
7 |
+
```
|
README.md
CHANGED
@@ -1,102 +1,145 @@
|
|
1 |
-
#
|
2 |
|
3 |
-
|
|
|
4 |
|
5 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
|
|
23 |
import {Redis} from '@upstash/redis';
|
24 |
|
25 |
export const redis = new Redis({
|
26 |
url: "http://localhost:8079",
|
27 |
token: "example_token",
|
28 |
-
responseEncoding: false, // IMPORTANT: Upstash has recently added response encoding, but SRH does not support it yet.
|
29 |
});
|
30 |
```
|
31 |
-
---
|
32 |
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
-
|
41 |
-
-
|
42 |
-
-
|
43 |
-
-
|
44 |
-
-
|
|
|
45 |
|
46 |
-
##
|
47 |
-
|
48 |
-
|
49 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
|
51 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
52 |
|
53 |
-
|
54 |
-
You have to options to run this:
|
55 |
-
- Via docker: `docker pull hiett/serverless-redis-http:latest` [Docker Hub link](https://hub.docker.com/r/hiett/serverless-redis-http)
|
56 |
-
- Via elixir: `(clone this repo)` -> `mix deps.get` -> `iex -S mix`
|
57 |
|
58 |
-
|
59 |
-
An example of a run command is the following:
|
60 |
|
61 |
-
|
62 |
|
63 |
-
|
64 |
|
65 |
-
|
66 |
-
Create a file: `srh-config/tokens.json`
|
67 |
```json
|
68 |
{
|
69 |
"example_token": {
|
70 |
"srh_id": "some_unique_identifier",
|
71 |
"connection_string": "redis://localhost:6379",
|
72 |
"max_connections": 3
|
73 |
-
}
|
74 |
}
|
75 |
```
|
|
|
76 |
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
docker-compose.yaml
|
83 |
-
```yaml
|
84 |
-
version: '3'
|
85 |
-
services:
|
86 |
-
redis:
|
87 |
-
image: redis
|
88 |
-
ports:
|
89 |
-
- '6379:6379'
|
90 |
-
serverless-redis-http:
|
91 |
-
ports:
|
92 |
-
- '8079:80'
|
93 |
-
image: hiett/serverless-redis-http:latest
|
94 |
-
volumes:
|
95 |
-
- ./path/to/tokens.json:/app/srh-config/tokens.json
|
96 |
-
```
|
97 |
|
98 |
-
Notes
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
|
|
|
1 |
+
# Serverless Redis HTTP (SRH)
|
2 |
|
3 |
+
A Redis proxy and connection pooler that uses HTTP rather than the Redis binary protocol.\
|
4 |
+
The aim of this project is to be entirely compatible with Upstash, and work with any Upstash supported Redis version.
|
5 |
|
6 |
+
Use cases for SRH:
|
7 |
+
- For usage in your CI pipelines, creating Upstash databases is tedious, or you have lots of parallel runs.
|
8 |
+
- See [Using in GitHub Actions](#in-github-actions) on how to quickly get SRH setup for this context.
|
9 |
+
- For usage inside of Kubernetes, or any network whereby the Redis server is not exposed to the internet.
|
10 |
+
- See [Using in Docker Compose](#via-docker-compose) for the various setup options directly using the Docker Container.
|
11 |
+
- For local development environments, where you have a local Redis server running, or require offline access.
|
12 |
+
- See [Using the Docker Command](#via-docker-command), or [Using Docker Compose](#via-docker-compose).
|
13 |
|
14 |
+
## Differences between Upstash and Redis to note
|
15 |
+
SRH tests are ran nightly against the `@upstash/redis` JavaScript package. However, there are some minor differences between Upstash's implementation of Redis and the official Redis code.
|
16 |
+
|
17 |
+
- The `UNLINK` command will not throw an error when 0 keys are given to it. In Redis, and as such SRH, an error will be thrown.
|
18 |
+
- In the `ZRANGE` command, in Upstash you are not required to provide `BYSCORE` or `BYLEX` in order to use the `LIMIT` argument. With Redis/SRH, this will throw an error if not provided.
|
19 |
+
- The Upstash implementation of `RedisJSON` contains a number of subtle differences in what is returned in responses. For this reason, **it is not advisable to use SRH with Redis Stack if you are testing your Upstash implementation that uses JSON commands**. If you don't use any JSON commands, then all is good :)
|
20 |
+
- **SRH does not implement commands via paths, or accepting the token via a query param**. Only the body method is implemented, which the `@upstash/redis` SDK uses.
|
21 |
+
|
22 |
+
### Similarities to note:
|
23 |
+
|
24 |
+
Pipelines and Transaction endpoints are also implemented, also using the body data only. You can read more about the RestAPI here: [Upstash Docs on the Rest API](https://docs.upstash.com/redis/features/restapi)
|
25 |
+
|
26 |
+
Response encoding is also fully implemented. This is enabled by default by the `@upstash/redis` SDK. You can read more about that here: [Upstash Docs on Hashed Responses](https://docs.upstash.com/redis/sdks/javascriptsdk/troubleshooting#hashed-response)
|
27 |
+
|
28 |
+
## How to use with the `@upstash/redis` SDK
|
29 |
+
Simply set the REST URL and token to where the SRH instance is running. For example:
|
30 |
+
```ts
|
31 |
import {Redis} from '@upstash/redis';
|
32 |
|
33 |
export const redis = new Redis({
|
34 |
url: "http://localhost:8079",
|
35 |
token: "example_token",
|
|
|
36 |
});
|
37 |
```
|
|
|
38 |
|
39 |
+
# Setting up SRH
|
40 |
+
## Via Docker command
|
41 |
+
If you have a locally running Redis server, you can simply start an SRH container that connects to it.
|
42 |
+
In this example, SRH will be running on port `8080`.
|
43 |
+
|
44 |
+
```bash
|
45 |
+
docker run \
|
46 |
+
-it -d -p 8080:80 --name srh \
|
47 |
+
-e SRH_MODE=env \
|
48 |
+
-e SRH_TOKEN=your_token_here \
|
49 |
+
-e SRH_CONNECTION_STRING="redis://your_server_here:6379" \
|
50 |
+
hiett/serverless-redis-http:latest
|
51 |
+
```
|
52 |
|
53 |
+
## Via Docker Compose
|
54 |
+
If you wish to run in Kubernetes, this should contain all the basics would need to set that up. However, be sure to read the Configuration Options, because you can create a setup whereby multiple Redis servers are proxied.
|
55 |
+
```yml
|
56 |
+
version: '3'
|
57 |
+
services:
|
58 |
+
redis:
|
59 |
+
image: redis
|
60 |
+
ports:
|
61 |
+
- '6379:6379'
|
62 |
+
serverless-redis-http:
|
63 |
+
ports:
|
64 |
+
- '8079:80'
|
65 |
+
image: hiett/serverless-redis-http:latest
|
66 |
+
environment:
|
67 |
+
SRH_MODE: env
|
68 |
+
SRH_TOKEN: example_token
|
69 |
+
SRH_CONNECTION_STRING: 'redis://redis:6379' # Using `redis` hostname since they're in the same Docker network.
|
70 |
+
```
|
71 |
|
72 |
+
## In GitHub Actions
|
73 |
+
|
74 |
+
SRH works nicely in GitHub Actions because you can run it as a container in a job's services. Simply start a Redis server, and then
|
75 |
+
SRH alongside it. You don't need to worry about a race condition of the Redis instance not being ready, because SRH doesn't create a Redis connection until the first command comes in.
|
76 |
+
|
77 |
+
```yml
|
78 |
+
name: Test @upstash/redis compatability
|
79 |
+
on:
|
80 |
+
push:
|
81 |
+
workflow_dispatch:
|
82 |
+
|
83 |
+
env:
|
84 |
+
SRH_TOKEN: example_token
|
85 |
+
|
86 |
+
jobs:
|
87 |
+
container-job:
|
88 |
+
runs-on: ubuntu-latest
|
89 |
+
container: denoland/deno
|
90 |
+
services:
|
91 |
+
redis:
|
92 |
+
image: redis/redis-stack-server:6.2.6-v6 # 6.2 is the Upstash compatible Redis version
|
93 |
+
srh:
|
94 |
+
image: hiett/serverless-redis-http:latest
|
95 |
+
env:
|
96 |
+
SRH_MODE: env # We are using env mode because we are only connecting to one server.
|
97 |
+
SRH_TOKEN: ${{ env.SRH_TOKEN }}
|
98 |
+
SRH_CONNECTION_STRING: redis://redis:6379
|
99 |
+
|
100 |
+
steps:
|
101 |
+
# You can place your normal testing steps here. In this example, we are running SRH against the upstash/upstash-redis test suite.
|
102 |
+
- name: Checkout code
|
103 |
+
uses: actions/checkout@v3
|
104 |
+
with:
|
105 |
+
repository: upstash/upstash-redis
|
106 |
+
|
107 |
+
- name: Run @upstash/redis Test Suite
|
108 |
+
run: deno test -A ./pkg
|
109 |
+
env:
|
110 |
+
UPSTASH_REDIS_REST_URL: http://srh:80
|
111 |
+
UPSTASH_REDIS_REST_TOKEN: ${{ env.SRH_TOKEN }}
|
112 |
+
```
|
113 |
|
114 |
+
# Configuration Options
|
|
|
|
|
|
|
115 |
|
116 |
+
SRH works with multiple Redis servers, and can pool however many connections you wish it to. It will shut down un-used pools after 15 minutes of inactivity. Upon the next command, it will re-build the pool.
|
|
|
117 |
|
118 |
+
## Connecting to multiple Redis servers at the same time
|
119 |
|
120 |
+
The examples above use environment variables in order to tell SRH which Redis server to connect to. However, you can also use a configuration JSON file, which lets you create as many connections as you wish. The token provided in each request will decide which pool is used.
|
121 |
|
122 |
+
Create a JSON file, in this example called `tokens.json`:
|
|
|
123 |
```json
|
124 |
{
|
125 |
"example_token": {
|
126 |
"srh_id": "some_unique_identifier",
|
127 |
"connection_string": "redis://localhost:6379",
|
128 |
"max_connections": 3
|
129 |
+
}
|
130 |
}
|
131 |
```
|
132 |
+
You can provide as many entries to the base object as you wish, and configure the number of max connections per pool. The `srh_id` is used internally to keep track of instances. It can be anything you want.
|
133 |
|
134 |
+
Once you have created this, mount it to the docker container to the `/app/srh-config/tokens.json` file. Here is an example docker command:
|
135 |
+
|
136 |
+
`docker run -it -d -p 8079:80 --name srh --mount type=bind,source=$(pwd)/tokens.json,target=/app/srh-config/tokens.json hiett/serverless-redis-http:latest`
|
137 |
+
|
138 |
+
## Environment Variables
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
139 |
|
140 |
+
| Name | Default Value | Notes |
|
141 |
+
| ---- | ------------- | ----- |
|
142 |
+
| SRH_MODE | `file` | Can be `env` or `file`. If `file`, see [Connecting to multiple Redis servers](#connecting-to-multiple-redis-servers-at-the-same-time). If set to `env`, you are required to provide the following environment variables: |
|
143 |
+
| SRH_TOKEN | `<required if SRH_MODE = env>` | Set the token that the Rest API will require |
|
144 |
+
| SRH_CONNECTION_STRING | `<required if SRH_MODE = env>` | Sets the connection string to the Redis server. |
|
145 |
+
| SRH_MAX_CONNECTIONS | `3` | Only used if `SRH_MODE=env`.
|
config/test.exs
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
import Config
|
example/.gitignore
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
node_modules/
|
2 |
+
dist/
|
example/package.json
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"name": "srh-example",
|
3 |
+
"version": "1.0.0",
|
4 |
+
"main": "index.js",
|
5 |
+
"author": "Scott Hiett",
|
6 |
+
"license": "MIT",
|
7 |
+
"private": false,
|
8 |
+
"scripts": {
|
9 |
+
"start": "ts-node src/index.ts"
|
10 |
+
},
|
11 |
+
"dependencies": {
|
12 |
+
"@upstash/redis": "^1.20.2"
|
13 |
+
},
|
14 |
+
"devDependencies": {
|
15 |
+
"@types/node": "^18.15.11",
|
16 |
+
"ts-node": "^10.9.1",
|
17 |
+
"typescript": "^5.0.2"
|
18 |
+
}
|
19 |
+
}
|
example/src/index.ts
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {Redis} from "@upstash/redis";
|
2 |
+
|
3 |
+
const redis = new Redis({
|
4 |
+
// The URL of the SRH instance
|
5 |
+
url: "http://127.0.0.1:8080",
|
6 |
+
|
7 |
+
// The token you defined in tokens.json
|
8 |
+
token: "example_token",
|
9 |
+
|
10 |
+
// Response encoding is supported (this is enabled by default)
|
11 |
+
responseEncoding: true,
|
12 |
+
});
|
13 |
+
|
14 |
+
(async () => {
|
15 |
+
await redis.set("foo", "bar");
|
16 |
+
const value = await redis.get("foo");
|
17 |
+
console.log(value);
|
18 |
+
|
19 |
+
// Run a pipeline operation
|
20 |
+
const pipelineResponse = await redis.pipeline()
|
21 |
+
.set("amazing-key", "bar")
|
22 |
+
.get("amazing-key")
|
23 |
+
.del("amazing-other-key")
|
24 |
+
.del("random-key-that-doesnt-exist")
|
25 |
+
.srandmember("random-key-that-doesnt-exist")
|
26 |
+
.sadd("amazing-set", "item1", "item2", "item3", "bar", "foo", "example")
|
27 |
+
.smembers("amazing-set")
|
28 |
+
.get("foo")
|
29 |
+
.exec();
|
30 |
+
|
31 |
+
console.log(pipelineResponse);
|
32 |
+
|
33 |
+
const multiExecResponse = await redis.multi()
|
34 |
+
.set("example", "value")
|
35 |
+
.get("example")
|
36 |
+
.exec();
|
37 |
+
|
38 |
+
console.log(multiExecResponse);
|
39 |
+
})();
|
example/srh-config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"example_token": {
|
3 |
+
"srh_id": "some_unique_identifier",
|
4 |
+
"connection_string": "redis://redis:6379",
|
5 |
+
"max_connections": 3
|
6 |
+
}
|
7 |
+
}
|
example/tsconfig.json
ADDED
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"compilerOptions": {
|
3 |
+
/* Visit https://aka.ms/tsconfig to read more about this file */
|
4 |
+
|
5 |
+
/* Projects */
|
6 |
+
// "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
|
7 |
+
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
|
8 |
+
// "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
|
9 |
+
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
|
10 |
+
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
|
11 |
+
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
|
12 |
+
|
13 |
+
/* Language and Environment */
|
14 |
+
"target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
|
15 |
+
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
|
16 |
+
// "jsx": "preserve", /* Specify what JSX code is generated. */
|
17 |
+
// "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */
|
18 |
+
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
|
19 |
+
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
|
20 |
+
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
|
21 |
+
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
|
22 |
+
// "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
|
23 |
+
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
|
24 |
+
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
|
25 |
+
// "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
|
26 |
+
|
27 |
+
/* Modules */
|
28 |
+
"module": "commonjs", /* Specify what module code is generated. */
|
29 |
+
"rootDir": "./src", /* Specify the root folder within your source files. */
|
30 |
+
// "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */
|
31 |
+
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
|
32 |
+
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
|
33 |
+
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
|
34 |
+
// "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
|
35 |
+
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
|
36 |
+
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
37 |
+
// "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
|
38 |
+
// "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */
|
39 |
+
// "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
|
40 |
+
// "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
|
41 |
+
// "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
|
42 |
+
// "resolveJsonModule": true, /* Enable importing .json files. */
|
43 |
+
// "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
|
44 |
+
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
|
45 |
+
|
46 |
+
/* JavaScript Support */
|
47 |
+
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
|
48 |
+
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
|
49 |
+
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
|
50 |
+
|
51 |
+
/* Emit */
|
52 |
+
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
|
53 |
+
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
|
54 |
+
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
|
55 |
+
// "sourceMap": true, /* Create source map files for emitted JavaScript files. */
|
56 |
+
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
|
57 |
+
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
|
58 |
+
"outDir": "./dist", /* Specify an output folder for all emitted files. */
|
59 |
+
// "removeComments": true, /* Disable emitting comments. */
|
60 |
+
// "noEmit": true, /* Disable emitting files from a compilation. */
|
61 |
+
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
|
62 |
+
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
|
63 |
+
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
|
64 |
+
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
|
65 |
+
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
66 |
+
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
|
67 |
+
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
|
68 |
+
// "newLine": "crlf", /* Set the newline character for emitting files. */
|
69 |
+
// "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
|
70 |
+
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
|
71 |
+
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
|
72 |
+
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
|
73 |
+
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
|
74 |
+
// "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
|
75 |
+
|
76 |
+
/* Interop Constraints */
|
77 |
+
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
|
78 |
+
// "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */
|
79 |
+
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
|
80 |
+
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
|
81 |
+
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
|
82 |
+
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
|
83 |
+
|
84 |
+
/* Type Checking */
|
85 |
+
"strict": true, /* Enable all strict type-checking options. */
|
86 |
+
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
|
87 |
+
// "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
|
88 |
+
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
|
89 |
+
// "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
|
90 |
+
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
|
91 |
+
// "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
|
92 |
+
// "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
|
93 |
+
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
|
94 |
+
// "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
|
95 |
+
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
|
96 |
+
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
|
97 |
+
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
|
98 |
+
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
|
99 |
+
// "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
|
100 |
+
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
|
101 |
+
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
|
102 |
+
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
|
103 |
+
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
|
104 |
+
|
105 |
+
/* Completeness */
|
106 |
+
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
|
107 |
+
"skipLibCheck": true /* Skip type checking all .d.ts files. */
|
108 |
+
}
|
109 |
+
}
|
example/yarn.lock
ADDED
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
2 |
+
# yarn lockfile v1
|
3 |
+
|
4 |
+
|
5 |
+
"@cspotcode/source-map-support@^0.8.0":
|
6 |
+
version "0.8.1"
|
7 |
+
resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1"
|
8 |
+
integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==
|
9 |
+
dependencies:
|
10 |
+
"@jridgewell/trace-mapping" "0.3.9"
|
11 |
+
|
12 |
+
"@jridgewell/resolve-uri@^3.0.3":
|
13 |
+
version "3.1.0"
|
14 |
+
resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78"
|
15 |
+
integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==
|
16 |
+
|
17 |
+
"@jridgewell/sourcemap-codec@^1.4.10":
|
18 |
+
version "1.4.14"
|
19 |
+
resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24"
|
20 |
+
integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==
|
21 |
+
|
22 |
+
"@jridgewell/[email protected]":
|
23 |
+
version "0.3.9"
|
24 |
+
resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9"
|
25 |
+
integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==
|
26 |
+
dependencies:
|
27 |
+
"@jridgewell/resolve-uri" "^3.0.3"
|
28 |
+
"@jridgewell/sourcemap-codec" "^1.4.10"
|
29 |
+
|
30 |
+
"@tsconfig/node10@^1.0.7":
|
31 |
+
version "1.0.9"
|
32 |
+
resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2"
|
33 |
+
integrity sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==
|
34 |
+
|
35 |
+
"@tsconfig/node12@^1.0.7":
|
36 |
+
version "1.0.11"
|
37 |
+
resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d"
|
38 |
+
integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==
|
39 |
+
|
40 |
+
"@tsconfig/node14@^1.0.0":
|
41 |
+
version "1.0.3"
|
42 |
+
resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1"
|
43 |
+
integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==
|
44 |
+
|
45 |
+
"@tsconfig/node16@^1.0.2":
|
46 |
+
version "1.0.3"
|
47 |
+
resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.3.tgz#472eaab5f15c1ffdd7f8628bd4c4f753995ec79e"
|
48 |
+
integrity sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==
|
49 |
+
|
50 |
+
"@types/node@^18.15.11":
|
51 |
+
version "18.15.11"
|
52 |
+
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.15.11.tgz#b3b790f09cb1696cffcec605de025b088fa4225f"
|
53 |
+
integrity sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==
|
54 |
+
|
55 |
+
"@upstash/redis@^1.20.2":
|
56 |
+
version "1.20.2"
|
57 |
+
resolved "https://registry.yarnpkg.com/@upstash/redis/-/redis-1.20.2.tgz#f797915c90054764b26d2289f5da5dd4b68d8480"
|
58 |
+
integrity sha512-9QS/SypDxeeh672H7dEEmuYOX5TtPYnaDLlhxWJEPd8LzcEQ6hohwDJuojpqGkvvvrK58mlWOkN1GrMxbXPTeQ==
|
59 |
+
dependencies:
|
60 |
+
isomorphic-fetch "^3.0.0"
|
61 |
+
|
62 |
+
acorn-walk@^8.1.1:
|
63 |
+
version "8.2.0"
|
64 |
+
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1"
|
65 |
+
integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==
|
66 |
+
|
67 |
+
acorn@^8.4.1:
|
68 |
+
version "8.8.2"
|
69 |
+
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.2.tgz#1b2f25db02af965399b9776b0c2c391276d37c4a"
|
70 |
+
integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==
|
71 |
+
|
72 |
+
arg@^4.1.0:
|
73 |
+
version "4.1.3"
|
74 |
+
resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089"
|
75 |
+
integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==
|
76 |
+
|
77 |
+
create-require@^1.1.0:
|
78 |
+
version "1.1.1"
|
79 |
+
resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333"
|
80 |
+
integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==
|
81 |
+
|
82 |
+
diff@^4.0.1:
|
83 |
+
version "4.0.2"
|
84 |
+
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d"
|
85 |
+
integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==
|
86 |
+
|
87 |
+
isomorphic-fetch@^3.0.0:
|
88 |
+
version "3.0.0"
|
89 |
+
resolved "https://registry.yarnpkg.com/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz#0267b005049046d2421207215d45d6a262b8b8b4"
|
90 |
+
integrity sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA==
|
91 |
+
dependencies:
|
92 |
+
node-fetch "^2.6.1"
|
93 |
+
whatwg-fetch "^3.4.1"
|
94 |
+
|
95 |
+
make-error@^1.1.1:
|
96 |
+
version "1.3.6"
|
97 |
+
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
|
98 |
+
integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==
|
99 |
+
|
100 |
+
node-fetch@^2.6.1:
|
101 |
+
version "2.6.9"
|
102 |
+
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.9.tgz#7c7f744b5cc6eb5fd404e0c7a9fec630a55657e6"
|
103 |
+
integrity sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==
|
104 |
+
dependencies:
|
105 |
+
whatwg-url "^5.0.0"
|
106 |
+
|
107 |
+
tr46@~0.0.3:
|
108 |
+
version "0.0.3"
|
109 |
+
resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a"
|
110 |
+
integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==
|
111 |
+
|
112 |
+
ts-node@^10.9.1:
|
113 |
+
version "10.9.1"
|
114 |
+
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b"
|
115 |
+
integrity sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==
|
116 |
+
dependencies:
|
117 |
+
"@cspotcode/source-map-support" "^0.8.0"
|
118 |
+
"@tsconfig/node10" "^1.0.7"
|
119 |
+
"@tsconfig/node12" "^1.0.7"
|
120 |
+
"@tsconfig/node14" "^1.0.0"
|
121 |
+
"@tsconfig/node16" "^1.0.2"
|
122 |
+
acorn "^8.4.1"
|
123 |
+
acorn-walk "^8.1.1"
|
124 |
+
arg "^4.1.0"
|
125 |
+
create-require "^1.1.0"
|
126 |
+
diff "^4.0.1"
|
127 |
+
make-error "^1.1.1"
|
128 |
+
v8-compile-cache-lib "^3.0.1"
|
129 |
+
yn "3.1.1"
|
130 |
+
|
131 |
+
typescript@^5.0.2:
|
132 |
+
version "5.0.2"
|
133 |
+
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.0.2.tgz#891e1a90c5189d8506af64b9ef929fca99ba1ee5"
|
134 |
+
integrity sha512-wVORMBGO/FAs/++blGNeAVdbNKtIh1rbBL2EyQ1+J9lClJ93KiiKe8PmFIVdXhHcyv44SL9oglmfeSsndo0jRw==
|
135 |
+
|
136 |
+
v8-compile-cache-lib@^3.0.1:
|
137 |
+
version "3.0.1"
|
138 |
+
resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf"
|
139 |
+
integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==
|
140 |
+
|
141 |
+
webidl-conversions@^3.0.0:
|
142 |
+
version "3.0.1"
|
143 |
+
resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871"
|
144 |
+
integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==
|
145 |
+
|
146 |
+
whatwg-fetch@^3.4.1:
|
147 |
+
version "3.6.2"
|
148 |
+
resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c"
|
149 |
+
integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA==
|
150 |
+
|
151 |
+
whatwg-url@^5.0.0:
|
152 |
+
version "5.0.0"
|
153 |
+
resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d"
|
154 |
+
integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==
|
155 |
+
dependencies:
|
156 |
+
tr46 "~0.0.3"
|
157 |
+
webidl-conversions "^3.0.0"
|
158 |
+
|
159 | |
160 |
+
version "3.1.1"
|
161 |
+
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
|
162 |
+
integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==
|
lib/srh/auth/token_resolver.ex
CHANGED
@@ -1,7 +1,6 @@
|
|
1 |
defmodule Srh.Auth.TokenResolver do
|
2 |
use GenServer
|
3 |
|
4 |
-
@mode Application.fetch_env!(:srh, :mode)
|
5 |
@file_path Application.fetch_env!(:srh, :file_path)
|
6 |
|
7 |
@ets_table_name :srh_token_resolver
|
@@ -25,7 +24,7 @@ defmodule Srh.Auth.TokenResolver do
|
|
25 |
table = :ets.new(@ets_table_name, [:named_table, read_concurrency: true])
|
26 |
|
27 |
# Populate the ETS table with data from storage
|
28 |
-
do_init_load(
|
29 |
|
30 |
{
|
31 |
:ok,
|
@@ -36,7 +35,7 @@ defmodule Srh.Auth.TokenResolver do
|
|
36 |
end
|
37 |
|
38 |
def resolve(token) do
|
39 |
-
do_resolve(
|
40 |
end
|
41 |
|
42 |
# Server methods
|
@@ -49,6 +48,10 @@ defmodule Srh.Auth.TokenResolver do
|
|
49 |
end
|
50 |
|
51 |
# Internal server
|
|
|
|
|
|
|
|
|
52 |
defp do_init_load("file") do
|
53 |
config_file_data = Jason.decode!(File.read!(@file_path))
|
54 |
IO.puts("Loaded config file from disk. #{map_size(config_file_data)} entries.")
|
@@ -59,6 +62,25 @@ defmodule Srh.Auth.TokenResolver do
|
|
59 |
)
|
60 |
end
|
61 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
62 |
defp do_init_load(_), do: :ok
|
63 |
|
64 |
# Internal, but client side, methods. These are client side to prevent GenServer lockup
|
@@ -73,6 +95,9 @@ defmodule Srh.Auth.TokenResolver do
|
|
73 |
end
|
74 |
end
|
75 |
|
|
|
|
|
|
|
76 |
defp do_resolve("redis", _token) do
|
77 |
{
|
78 |
:ok,
|
|
|
1 |
defmodule Srh.Auth.TokenResolver do
|
2 |
use GenServer
|
3 |
|
|
|
4 |
@file_path Application.fetch_env!(:srh, :file_path)
|
5 |
|
6 |
@ets_table_name :srh_token_resolver
|
|
|
24 |
table = :ets.new(@ets_table_name, [:named_table, read_concurrency: true])
|
25 |
|
26 |
# Populate the ETS table with data from storage
|
27 |
+
do_init_load(get_token_loader_mode())
|
28 |
|
29 |
{
|
30 |
:ok,
|
|
|
35 |
end
|
36 |
|
37 |
def resolve(token) do
|
38 |
+
do_resolve(get_token_loader_mode(), token)
|
39 |
end
|
40 |
|
41 |
# Server methods
|
|
|
48 |
end
|
49 |
|
50 |
# Internal server
|
51 |
+
defp get_token_loader_mode() do
|
52 |
+
System.get_env("SRH_MODE", "file")
|
53 |
+
end
|
54 |
+
|
55 |
defp do_init_load("file") do
|
56 |
config_file_data = Jason.decode!(File.read!(@file_path))
|
57 |
IO.puts("Loaded config file from disk. #{map_size(config_file_data)} entries.")
|
|
|
62 |
)
|
63 |
end
|
64 |
|
65 |
+
defp do_init_load("env") do
|
66 |
+
srh_token = System.get_env("SRH_TOKEN")
|
67 |
+
srh_connection_string = System.get_env("SRH_CONNECTION_STRING")
|
68 |
+
|
69 |
+
# Returns an error if fails, first tuple value is the number
|
70 |
+
{srh_max_connections, ""} = Integer.parse(System.get_env("SRH_MAX_CONNECTIONS", "3"))
|
71 |
+
|
72 |
+
# Create a config-file-like structure that the ETS layout expects, with just one entry
|
73 |
+
config_file_data = Map.put(%{}, srh_token, %{
|
74 |
+
"srh_id" => "env_config_connection", # Jason.parse! expects these keys to be strings, not atoms, so we need to replicate that setup
|
75 |
+
"connection_string" => srh_connection_string,
|
76 |
+
"max_connections" => srh_max_connections
|
77 |
+
})
|
78 |
+
|
79 |
+
IO.puts("Loaded config from env. #{map_size(config_file_data)} entries.")
|
80 |
+
# Load this into ETS
|
81 |
+
Enum.each(config_file_data, &:ets.insert(@ets_table_name, &1))
|
82 |
+
end
|
83 |
+
|
84 |
defp do_init_load(_), do: :ok
|
85 |
|
86 |
# Internal, but client side, methods. These are client side to prevent GenServer lockup
|
|
|
95 |
end
|
96 |
end
|
97 |
|
98 |
+
# The env strategy uses the same ETS table as the file strategy, so we can fall back on that
|
99 |
+
defp do_resolve("env", token), do: do_resolve("file", token)
|
100 |
+
|
101 |
defp do_resolve("redis", _token) do
|
102 |
{
|
103 |
:ok,
|
lib/srh/http/base_router.ex
CHANGED
@@ -2,6 +2,7 @@ defmodule Srh.Http.BaseRouter do
|
|
2 |
use Plug.Router
|
3 |
alias Srh.Http.RequestValidator
|
4 |
alias Srh.Http.CommandHandler
|
|
|
5 |
|
6 |
plug(:match)
|
7 |
plug(Plug.Parsers, parsers: [:json], pass: ["application/json"], json_decoder: Jason)
|
@@ -12,27 +13,30 @@ defmodule Srh.Http.BaseRouter do
|
|
12 |
end
|
13 |
|
14 |
post "/" do
|
15 |
-
conn
|
16 |
-
|> handle_extract_auth(&CommandHandler.handle_command(conn, &1))
|
17 |
-
|> handle_response(conn)
|
18 |
end
|
19 |
|
20 |
post "/pipeline" do
|
21 |
-
conn
|
22 |
-
|> handle_extract_auth(&CommandHandler.handle_command_array(conn, &1))
|
23 |
-
|> handle_response(conn)
|
24 |
end
|
25 |
|
26 |
post "/multi-exec" do
|
27 |
-
conn
|
28 |
-
|> handle_extract_auth(&CommandHandler.handle_command_transaction_array(conn, &1))
|
29 |
-
|> handle_response(conn)
|
30 |
end
|
31 |
|
32 |
match _ do
|
33 |
send_resp(conn, 404, "Endpoint not found")
|
34 |
end
|
35 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
defp handle_extract_auth(conn, success_lambda) do
|
37 |
case conn
|
38 |
|> get_req_header("authorization")
|
@@ -45,6 +49,24 @@ defmodule Srh.Http.BaseRouter do
|
|
45 |
end
|
46 |
end
|
47 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
48 |
defp handle_response(response, conn) do
|
49 |
%{code: code, message: message, json: json} =
|
50 |
case response do
|
|
|
2 |
use Plug.Router
|
3 |
alias Srh.Http.RequestValidator
|
4 |
alias Srh.Http.CommandHandler
|
5 |
+
alias Srh.Http.ResultEncoder
|
6 |
|
7 |
plug(:match)
|
8 |
plug(Plug.Parsers, parsers: [:json], pass: ["application/json"], json_decoder: Jason)
|
|
|
13 |
end
|
14 |
|
15 |
post "/" do
|
16 |
+
do_command_request(conn, &CommandHandler.handle_command(&1, &2))
|
|
|
|
|
17 |
end
|
18 |
|
19 |
post "/pipeline" do
|
20 |
+
do_command_request(conn, &CommandHandler.handle_command_array(&1, &2))
|
|
|
|
|
21 |
end
|
22 |
|
23 |
post "/multi-exec" do
|
24 |
+
do_command_request(conn, &CommandHandler.handle_command_transaction_array(&1, &2))
|
|
|
|
|
25 |
end
|
26 |
|
27 |
match _ do
|
28 |
send_resp(conn, 404, "Endpoint not found")
|
29 |
end
|
30 |
|
31 |
+
defp do_command_request(conn, success_lambda) do
|
32 |
+
encoding_enabled = handle_extract_encoding?(conn)
|
33 |
+
|
34 |
+
conn
|
35 |
+
|> handle_extract_auth(&success_lambda.(conn, &1))
|
36 |
+
|> handle_encoding_step(encoding_enabled)
|
37 |
+
|> handle_response(conn)
|
38 |
+
end
|
39 |
+
|
40 |
defp handle_extract_auth(conn, success_lambda) do
|
41 |
case conn
|
42 |
|> get_req_header("authorization")
|
|
|
49 |
end
|
50 |
end
|
51 |
|
52 |
+
defp handle_extract_encoding?(conn) do
|
53 |
+
case conn
|
54 |
+
|> get_req_header("upstash-encoding")
|
55 |
+
|> RequestValidator.validate_encoding_header() do
|
56 |
+
{:ok, _encoding_enabled} -> true
|
57 |
+
{:error, _} -> false # it's not required to be present
|
58 |
+
end
|
59 |
+
end
|
60 |
+
|
61 |
+
defp handle_encoding_step(response, encoding_enabled) do
|
62 |
+
case encoding_enabled do
|
63 |
+
true ->
|
64 |
+
# We need to use the encoder to
|
65 |
+
ResultEncoder.encode_response(response)
|
66 |
+
false -> response
|
67 |
+
end
|
68 |
+
end
|
69 |
+
|
70 |
defp handle_response(response, conn) do
|
71 |
%{code: code, message: message, json: json} =
|
72 |
case response do
|
lib/srh/http/request_validator.ex
CHANGED
@@ -26,6 +26,21 @@ defmodule Srh.Http.RequestValidator do
|
|
26 |
|
27 |
defp do_validate_pipeline_item(_), do: :error
|
28 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
def validate_bearer_header(header_value_array) when is_list(header_value_array) do
|
30 |
do_validate_bearer_header(header_value_array)
|
31 |
end
|
|
|
26 |
|
27 |
defp do_validate_pipeline_item(_), do: :error
|
28 |
|
29 |
+
def validate_encoding_header(header_value_array) when is_list(header_value_array) do
|
30 |
+
do_validate_encoding_header(header_value_array)
|
31 |
+
end
|
32 |
+
|
33 |
+
# This has been broken up like this to future-proof different encoding modes in the future
|
34 |
+
defp do_validate_encoding_header([first_item | rest]) do
|
35 |
+
case first_item do
|
36 |
+
"base64" -> {:ok, true}
|
37 |
+
|
38 |
+
_ -> do_validate_encoding_header(rest)
|
39 |
+
end
|
40 |
+
end
|
41 |
+
|
42 |
+
defp do_validate_encoding_header([]), do: {:error, :not_found}
|
43 |
+
|
44 |
def validate_bearer_header(header_value_array) when is_list(header_value_array) do
|
45 |
do_validate_bearer_header(header_value_array)
|
46 |
end
|
lib/srh/http/result_encoder.ex
ADDED
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
defmodule Srh.Http.ResultEncoder do
|
2 |
+
|
3 |
+
# Errors don't get encoded, we need to skip over those
|
4 |
+
def encode_response({:redis_error, error_result_map}) do
|
5 |
+
{:redis_error, error_result_map}
|
6 |
+
end
|
7 |
+
|
8 |
+
# List-based responses, they will contain multiple entries
|
9 |
+
# It's important to note that this is DIFFERENT from a list of values,
|
10 |
+
# as it's a list of separate command responses. Each is a map that either
|
11 |
+
# Contains a result or an error
|
12 |
+
def encode_response({:ok, result_list}) when is_list(result_list) do
|
13 |
+
# Each one of these entries needs to be encoded
|
14 |
+
{:ok, encode_response_list(result_list, [])}
|
15 |
+
end
|
16 |
+
|
17 |
+
# Single item response
|
18 |
+
def encode_response({:ok, %{result: result_value}}) do
|
19 |
+
{:ok, %{result: encode_result_value(result_value)}}
|
20 |
+
end
|
21 |
+
|
22 |
+
## RESULT LIST ENCODING ##
|
23 |
+
|
24 |
+
defp encode_response_list([current | rest], encoded_responses) do
|
25 |
+
encoded_current_entry = case current do
|
26 |
+
%{result: value} ->
|
27 |
+
%{result: encode_result_value(value)} # Encode the value
|
28 |
+
%{error: error_message} ->
|
29 |
+
%{error: error_message} # We don't encode errors
|
30 |
+
end
|
31 |
+
|
32 |
+
encode_response_list(rest, [encoded_current_entry | encoded_responses])
|
33 |
+
end
|
34 |
+
|
35 |
+
defp encode_response_list([], encoded_responses) do
|
36 |
+
Enum.reverse(encoded_responses)
|
37 |
+
end
|
38 |
+
|
39 |
+
## RESULT VALUE ENCODING ##
|
40 |
+
|
41 |
+
# Numbers are ignored
|
42 |
+
defp encode_result_value(value) when is_number(value), do: value
|
43 |
+
|
44 |
+
# Null/nil is ignored
|
45 |
+
defp encode_result_value(value) when is_nil(value), do: value
|
46 |
+
|
47 |
+
# Strings / blobs (any binary data) is encoded to Base64
|
48 |
+
defp encode_result_value(value) when is_binary(value), do: Base.encode64(value)
|
49 |
+
|
50 |
+
defp encode_result_value(arr) when is_list(arr) do
|
51 |
+
encode_result_value_list(arr, [])
|
52 |
+
end
|
53 |
+
|
54 |
+
## RESULT VALUE LIST ENCODING ##
|
55 |
+
|
56 |
+
# Arrays can have values that are encoded, or aren't, based on whats laid out above
|
57 |
+
defp encode_result_value_list([current | rest], encoded_responses) do
|
58 |
+
encoded_value = encode_result_value(current)
|
59 |
+
encode_result_value_list(rest, [encoded_value | encoded_responses])
|
60 |
+
end
|
61 |
+
|
62 |
+
defp encode_result_value_list([], encoded_responses) do
|
63 |
+
# There are no responses left, and since we add them backwards, we need to flip the list
|
64 |
+
Enum.reverse(encoded_responses)
|
65 |
+
end
|
66 |
+
end
|