diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index 25a73841..4faef9bc 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -20,17 +20,17 @@ jobs:
contents: read
environment: ${{ github.ref == 'refs/heads/main' && 'production' || 'staging' }}
steps:
- - name: Set CONFIG_ENV from Branch Name
+ - name: Set DEPLOY_ENV from Branch Name
run: |
if [[ $BRANCH == 'refs/heads/main' ]]; then
- echo "CONFIG_ENV=production" >> $GITHUB_ENV
+ echo "DEPLOY_ENV=production" >> $GITHUB_ENV
else
- echo "CONFIG_ENV=$(echo $BRANCH | awk -F/ '{print $NF}')" >> $GITHUB_ENV
+ echo "DEPLOY_ENV=$(echo $BRANCH | awk -F/ '{print $NF}')" >> $GITHUB_ENV
fi
env:
BRANCH: ${{ github.ref }}
- name: Confirm deploy environment
- run: echo "Deploying to '$CONFIG_ENV' environment"
+ run: echo "Deploying to '$DEPLOY_ENV' environment"
- name: Set GitHub Deploy Key
uses: webfactory/ssh-agent@v0.5.3
with:
@@ -49,16 +49,13 @@ jobs:
with:
role-to-assume: arn:aws:iam::${{ secrets.AwsAccount }}:role/github-actions-role
aws-region: us-east-1
- - run: ln -s .tfvars/dc-api/samconfig.toml .
- - run: ln -s .tfvars/dc-api/$CONFIG_ENV.parameters .
+ - run: ln -s .tfvars/dc-api/samconfig.${DEPLOY_ENV}.yaml .
- run: make build
- run: |
sam deploy \
--no-confirm-changeset \
--no-fail-on-empty-changeset \
- --config-env $CONFIG_ENV \
- --config-file ./samconfig.toml \
- --parameter-overrides $(while IFS='=' read -r key value; do params+=" $key=$value"; done < ./$CONFIG_ENV.parameters && echo "$params HoneybadgerRevision=$HONEYBADGER_REVISION") \
+ --config-file ./samconfig.${DEPLOY_ENV}.yaml \
| sed 's/\(Parameter overrides\s*\): .*/\1: ***** REDACTED *****/'
exit ${PIPESTATUS[0]}
env:
diff --git a/.github/workflows/test-node.yml b/.github/workflows/test-node.yml
index 81694682..33e6a0c6 100644
--- a/.github/workflows/test-node.yml
+++ b/.github/workflows/test-node.yml
@@ -3,11 +3,11 @@ on:
push:
paths:
- ".github/workflows/test-node.yml"
- - "node/**"
+ - "api/**"
workflow_dispatch:
defaults:
run:
- working-directory: ./node
+ working-directory: ./api
jobs:
test:
runs-on: ubuntu-latest
@@ -20,7 +20,7 @@ jobs:
with:
node-version: 20.x
cache: "npm"
- cache-dependency-path: 'node/package-lock.json'
+ cache-dependency-path: 'api/package-lock.json'
- run: npm ci
- name: Check code style
run: npm run lint && npm run prettier
diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml
index 88c13697..0bd476dc 100644
--- a/.github/workflows/test-python.yml
+++ b/.github/workflows/test-python.yml
@@ -14,7 +14,6 @@ jobs:
env:
AWS_ACCESS_KEY_ID: ci
AWS_SECRET_ACCESS_KEY: ci
- SKIP_LLM_REQUEST: 'True'
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
@@ -27,5 +26,7 @@ jobs:
run: ruff check .
- name: Run tests
run: |
- coverage run --include='src/**/*' -m unittest
+ coverage run --include='src/**/*' -m pytest -m ""
coverage report
+ env:
+ AWS_REGION: us-east-1
diff --git a/.github/workflows/validate-template.yml b/.github/workflows/validate-template.yml
index d02d6604..75cc6329 100644
--- a/.github/workflows/validate-template.yml
+++ b/.github/workflows/validate-template.yml
@@ -16,13 +16,15 @@ jobs:
- uses: actions/setup-python@v4
with:
python-version: '3.12'
+ - name: Install cfn-lint
+ run: pip install cfn-lint
- uses: aws-actions/setup-sam@v1
- - name: sam fix https://github.com/aws/aws-sam-cli/issues/4527
- run: $(dirname $(readlink $(which sam)))/pip install --force-reinstall "cryptography==38.0.4"
+ # - name: sam fix https://github.com/aws/aws-sam-cli/issues/4527
+ # run: $(dirname $(readlink $(which sam)))/pip install --force-reinstall "cryptography==38.0.4"
- uses: aws-actions/configure-aws-credentials@master
with:
role-to-assume: arn:aws:iam::${{ secrets.AwsAccount }}:role/github-actions-role
aws-region: us-east-1
- uses: actions/checkout@v3
- name: Validate template
- run: sam build && sam validate
\ No newline at end of file
+ run: make build && make validate
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index d4f76c46..7e9247e8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -231,3 +231,5 @@ env.*.json
*.parameters
/schemas
.sam-pids
+
+av-download/layers/
diff --git a/.husky/pre-commit b/.husky/pre-commit
index 01456dba..452caa6f 100755
--- a/.husky/pre-commit
+++ b/.husky/pre-commit
@@ -1,4 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
-cd node && npm run lint && npm run prettier && cd -
+cd api && npm run lint && npm run prettier && cd -
cd chat/src && ruff check . && cd -
diff --git a/.tool-versions b/.tool-versions
index 1caec649..9b9f9e2e 100644
--- a/.tool-versions
+++ b/.tool-versions
@@ -1,4 +1,4 @@
nodejs 20.15.0
java corretto-19.0.1.10.1
-aws-sam-cli 1.107.0
+aws-sam-cli 1.135.0
python 3.12.2
diff --git a/Makefile b/Makefile
index 7febc29a..dc9c4333 100644
--- a/Makefile
+++ b/Makefile
@@ -5,92 +5,138 @@ ENV=dev
SHELL := /bin/bash
help:
- echo "make build | build the SAM project"
- echo "make serve | alias for serve-https"
- echo "make clean | remove all installed dependencies and build artifacts"
- echo "make deps | install all dependencies"
- echo "make link | create hard links to allow for hot reloading of a built project"
- echo "make secrets | symlink secrets files from ../tfvars"
- echo "make start-with-step | run the SAM server locally with step function & download lambdas"
- echo "make style | run all style checks"
- echo "make test | run all tests"
- echo "make cover | run all tests with coverage"
- echo "make env ENV=[env] | activate env.\$$ENV.json file (default: dev)"
- echo "make deps-node | install node dependencies"
- echo "make deps-python | install python dependencies"
- echo "make serve-http | run the SAM server locally (HTTP on port 3000)"
- echo "make serve-https | run the SAM server locally (HTTPS on port 3002)"
- echo "make style-node | run node code style check"
- echo "make style-python | run python code style check"
- echo "make test-node | run node tests"
- echo "make test-python | run python tests"
- echo "make cover-node | run node tests with coverage"
- echo "make cover-python | run python tests with coverage"
-.aws-sam/build.toml: ./template.yaml node/package-lock.json node/src/package-lock.json chat/dependencies/requirements.txt chat/src/requirements.txt
- sed -Ei.orig 's/^(\s+)#\*\s/\1/' template.yaml
- sed -Ei.orig 's/^(\s+)#\*\s/\1/' chat/template.yaml
- sam build --cached --parallel
- mv template.yaml.orig template.yaml
- mv chat/template.yaml.orig chat/template.yaml
+ echo "make build | build the SAM project"
+ echo "make serve | alias for serve-https"
+ echo "make clean | remove all installed dependencies and build artifacts"
+ echo "make deps | install all dependencies"
+ echo "make env.json | create an env.json file for the current user's environment"
+ echo "make link | create hard links to allow for hot reloading of a built project"
+ echo "make secrets | symlink secrets files from ../tfvars"
+ echo "make start-with-step | run the SAM server locally with step function & download lambdas"
+ echo "make style | run all style checks"
+ echo "make test | run all tests"
+ echo "make cover | run all tests with coverage"
+ echo "make env ENV=[env] | activate env.\$$ENV.json file (default: dev)"
+ echo "make deps-node | install node dependencies"
+ echo "make deps-python | install python dependencies"
+ echo "make samconfig.NAME.yaml" | create a user samconfig file for the specified username"
+ echo "make build | build the SAM project for deploying"
+ echo "make deploy | deploy the SAM project to AWS"
+ echo "make sync | sync the SAM project to AWS for quick development"
+ echo "make sync-code | sync the SAM project to AWS (code changes only)"
+ echo "make serve-http | run the SAM server locally (HTTP on port 3000)"
+ echo "make serve-https | run the SAM server locally (HTTPS on port 3002)"
+ echo "make style-node | run node code style check"
+ echo "make style-python | run python code style check"
+ echo "make test-node | run node tests"
+ echo "make test-python | run python tests"
+ echo "make cover-node | run node tests with coverage"
+ echo "make cover-python | run python tests with coverage"
+
+.aws-sam/build.toml: ./template.yaml api/package-lock.json api/src/package-lock.json chat/dependencies/requirements.txt chat/src/requirements.txt
+ sed -Ei.orig 's/"dependencies"/"devDependencies"/' api/src/package.json
+ cp api/src/package-lock.json api/src/package-lock.json.orig
+ cd api/src && npm i --package-lock-only && cd -
+ for d in . api av-download chat docs ; do \
+ sed -Ei.orig 's/^(\s+)#\*\s/\1/' $$d/template.yaml; \
+ done
+
+ -sam build --cached --parallel
+
+ for d in . api av-download chat docs ; do \
+ mv $$d/template.yaml.orig $$d/template.yaml; \
+ done
+ mv api/src/package.json.orig api/src/package.json
+ mv api/src/package-lock.json.orig api/src/package-lock.json
layers/ffmpeg/bin/ffmpeg:
- mkdir -p layers/ffmpeg/bin ;\
+ mkdir -p av-download/layers/ffmpeg/bin ;\
curl -L https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz | \
- tar -C layers/ffmpeg/bin -xJ --strip-components=1 --wildcards '*/ffmpeg' '*/ffprobe'
-deps-node:
- cd node/src ;\
+ tar -C av-download/layers/ffmpeg/bin -xJ --strip-components=1 --wildcards '*/ffmpeg' '*/ffprobe'
+deps-api:
+ cd api/src ;\
npm list >/dev/null 2>&1 ;\
src_deps=$$? ;\
cd .. ;\
npm list >/dev/null 2>&1 ;\
dev_deps=$$? ;\
test $$src_deps -eq 0 -a $$dev_deps -eq 0 || npm ci
-
- cd lambdas ;\
+deps-av-download:
+ cd av-download/lambdas ;\
npm list >/dev/null 2>&1 || npm ci
+deps-node: deps-api deps-av-download
cover-node: deps-node
- cd node && npm run test:coverage
+ cd api && npm run test:coverage
style-node: deps-node
- cd node && npm run prettier
+ cd api && npm run prettier
test-node: deps-node
- cd node && npm run test
+ cd api && npm run test
deps-python:
cd chat/src && pip install -r requirements.txt && pip install -r requirements-dev.txt
cover-python: deps-python
- cd chat && export SKIP_LLM_REQUEST=True && coverage run --source=src -m unittest -v && coverage report --skip-empty
+ cd chat && coverage run --source=src -m pytest -v && coverage report --skip-empty
cover-html-python: deps-python
- cd chat && export SKIP_LLM_REQUEST=True && coverage run --source=src -m unittest -v && coverage html --skip-empty
+ cd chat && coverage run --source=src -m pytest -v && coverage html --skip-empty
style-python: deps-python
cd chat && ruff check .
style-python-fix: deps-python
cd chat && ruff check --fix .
test-python: deps-python
- cd chat && __SKIP_SECRETS__=true SKIP_LLM_REQUEST=True PYTHONPATH=src:test python -m unittest discover -v
+ cd chat && pytest
python-version:
cd chat && python --version
build: layers/ffmpeg/bin/ffmpeg .aws-sam/build.toml
+validate:
+ cfn-lint template.yaml **/template.yaml --ignore-checks E3510 W1028 W8001
serve-http: deps-node
@printf '\033[0;31mWARNING: Serving only the local HTTP API. The chat websocket API is not available in local mode.\033[0m\n'
rm -rf .aws-sam
- sam local start-api --host 0.0.0.0 --log-file dc-api.log ${SERVE_PARAMS}
+ sam local start-api -t api/template.yaml --env-vars $$PWD/env.json --host 0.0.0.0 --log-file dc-api.log ${SERVE_PARAMS}
serve-https: SERVE_PARAMS = --port 3002 --ssl-cert-file $$HOME/.dev_cert/dev.rdc.cert.pem --ssl-key-file $$HOME/.dev_cert/dev.rdc.key.pem
serve-https: serve-http
serve: serve-https
-start-with-step: deps-node
- sam local start-lambda --host 0.0.0.0 --port 3005 --env-vars env.json --log-file lambda.log & \
+start-with-step: deps-node env.json
+ export AWS_DEFAULT_REGION=us-east-1 ;\
+ sam local start-lambda --warm-containers=LAZY -t av-download/template.yaml --host 0.0.0.0 --port 3005 --env-vars $$PWD/env.json --log-file lambda.log & \
echo $$! > .sam-pids ;\
- sam local start-api --host 0.0.0.0 --port 3002 --log-file dc-api.log \
+ sg open all 3005 ;\
+ sam local start-api --warm-containers=LAZY -t api/template.yaml --env-vars $$PWD/env.json --host 0.0.0.0 --port 3002 --log-file dc-api.log \
--ssl-cert-file $$HOME/.dev_cert/dev.rdc.cert.pem --ssl-key-file $$HOME/.dev_cert/dev.rdc.key.pem & \
echo $$! >> .sam-pids ;\
docker run --rm -p 8083:8083 -e LAMBDA_ENDPOINT=http://172.17.0.1:3005/ amazon/aws-stepfunctions-local ;\
+ echo -n "Shutting down..." ;\
+ sg close all 3005 ;\
kill $$(cat .sam-pids) ;\
- rm -f .sam-pids
+ rm -f .sam-pids ;\
+ echo ""
+state-machine:
+ export TEMPLATE_DIR=$$(mktemp -d); \
+ yq -o=json '.Resources.avDownloadStateMachine.Properties.Definition' av-download/template.yaml > $$TEMPLATE_DIR/av_download.json; \
+ aws stepfunctions create-state-machine --endpoint http://localhost:8083 --definition file://$$TEMPLATE_DIR/av_download.json --name "hlsStitcherStepFunction" --role-arn arn:aws:iam::012345678901:role/DummyRole --no-cli-pager
deps: deps-node deps-python
style: style-node style-python
test: test-node test-python
cover: cover-node cover-python
-env:
- ln -fs ./env.${ENV}.json ./env.json
+env.json:
+ ./bin/make_env.sh
+samconfig.%.yaml:
+ DEV_PREFIX=$* ./bin/make_deploy_config.sh
+deploy: build samconfig.$(DEV_PREFIX).yaml
+ if ! aws sts get-caller-identity --query 'Arn' --output text | grep AWSReservedSSO_AWSAdministratorAccess > /dev/null; then \
+ echo "You must be logged in as an admin to deploy"; \
+ exit 1; \
+ fi
+ sam deploy --config-file samconfig.$(DEV_PREFIX).yaml --stack-name dc-api-$(DEV_PREFIX)
+sync: samconfig.$(DEV_PREFIX).yaml
+ if ! aws sts get-caller-identity --query 'Arn' --output text | grep AWSReservedSSO_AWSAdministratorAccess > /dev/null; then \
+ echo "You must be logged in as an admin to sync"; \
+ exit 1; \
+ fi
+ sam sync --config-file samconfig.$(DEV_PREFIX).yaml --stack-name dc-api-$(DEV_PREFIX) --watch $(ARGS)
+sync-code: ARGS=--code
+sync-code: sync
secrets:
- ln -s ../tfvars/dc-api/* .
+ ln -s ../tfvars/dc-api/*.yaml .
clean:
- rm -rf .aws-sam node/node_modules node/src/node_modules python/**/__pycache__ python/.coverage python/.ruff_cache layers/ffmpeg
\ No newline at end of file
+ rm -rf .aws-sam api/.aws-sam chat/.aws-sam av-download/.aws-sam api/node_modules api/src/node_modules chat/**/__pycache__ chat/.coverage chat/.ruff_cache
+reset:
+ for f in $$(find . -maxdepth 2 -name '*.orig'); do mv $$f $${f%%.orig}; done
diff --git a/README.md b/README.md
index 08108883..00084bf3 100644
--- a/README.md
+++ b/README.md
@@ -2,35 +2,17 @@
[](https://github.com/nulib/dc-api-v2/actions/workflows/test-node.yml) [](https://github.com/nulib/dc-api-v2/actions/workflows/test-python.yml)
-## Chat Websocket API development
-
-See the [chat API's README](chat/README.md).
-
## Local development setup
-### `samconfig.toml`
-
-The configuration file that tells the `sam` command how to run and deploy the API is called `samconfig.toml`. There are two ways to get that file. From the local project root:
-
-1. For local development only: `ln -s dev/samconfig.toml .`
-2. If you need to be able to deploy: `ln -s /path/to/tfvars/dc-api/samconfig.toml .` (You will need to have a local working copy of the private `tfvars` repo first.)
-
-Whichever you choose, the resulting file will be ignored by `git`.
-
### `env.json`
-The `env.json` file contains environment variable values for the lambda functions defined in the API for use in local development. An initial (empty) version with all the names of the necessary variables is in `dev/env.json`. Copy (**do not symlink**) this file into the project root and customize it for your own environment. It will also be ignored by `git`.
+The `env.json` file contains environment variable values for the lambda functions defined in the API for use in local development. You can create an `env.json` file containing the values to run the API against your dev data by running:
-Some of the values can be found as follows:
+```shell
+make env.json
+```
-- `API_TOKEN_SECRET` - already defined; value has to exist but doesn't matter in dev mode
-- `OPENSEARCH_ENDPOINT` - run the following command:
- ```
- aws secretsmanager get-secret-value \
- --secret-id dev-environment/config/meadow --query SecretString \
- --output text | jq -r '.index.index_endpoint | sub("^https?://";"")'
- ```
-- `ENV_PREFIX` - The username and environment indicating which index to use. Usually your dev environment user prefix followed by `-dev` (e.g., `mbk-dev`), but if you want to use your test index or someone else's index, adjust the value accordingly.
+If the file already exists, it will not be overwritten unless you include `-B` in the make command.
## Running the API locally
@@ -56,7 +38,7 @@ The API will be available at:
- `https://USER_PREFIX.dev.rdc.library.northwestern.edu:3002/search`
- `https://USER_PREFIX.dev.rdc.library.northwestern.edu:3002/collections`
-[View supported endpoints](https://api.dc.library.northwestern.edu/docs/v2/spec/openapi.html) Questions? [View the production API documention](https://api.dc.library.northwestern.edu/)
+[View supported endpoints](https://api.dc.library.northwestern.edu/docs/v2/spec/openapi.html) Questions? [View the production API documentation](https://api.dc.library.northwestern.edu/)
## Example workflows
@@ -97,55 +79,53 @@ Access the app in a browser at: https://USER_PREFIX.dev.rdc.library.northwestern
# From the repo root
cd dc-api-v2
-# Make sure you've done an `npm install` recently to update any packages in the `lambdas` directory
-npm install
-
-# Open port 3005 (if needed)
-sg open all 3005
-
-# Login as the staging-admin user
-export AWS_PROFILE=staging-admin
-aws sso login
-
# Start the API + step function and associated lambdas
make start-with-step
# Open a second terminal and create the state machine
-aws stepfunctions create-state-machine --endpoint http://localhost:8083 --definition file://state_machines/av_download.json --name "hlsStitcherStepFunction" --role-arn arn:aws:iam::012345678901:role/DummyRole
+make state-machine
```
## Deploying a development branch
+There are two ways to deploy a development branch: `make deploy` and `make sync`. The differences are:
+
+- **Changes:** `deploy` deploys a static stack, and requires another `deploy` to update it. `sync` watches for
+ changes in realtime.
+- **Dependencies:** `deploy` uses the `apiDependencies` resource defined in the template for dependencies, while
+ `sync` uses the AWS SAM CLI's built-in development dependency logic.
+
+Either way, the resulting stack will be accessible at `https://dcapi-USER_PREFIX.rdc-staging.library.northwestern.edu`.
+
+An existing `sync` stack can be reused by running `make sync` again, or by running `make sync-code` to only
+sync code changes (no infrastructure/template changes).
+
+### `samconfig.*.yaml`
+
+Both methods involve a `samconfig.USER_PREFIX.yaml` file. This file, with default values, can be created by
+running (for example):
+
+```shell
+make samconfig.mbk.yaml
```
-# sam sync --watch will do hot deploys as you make changes. If you don't want this, switch below command to sam sync or deploy
-export STACK_NAME=dc-api-yourdevprefix
-export CONFIG_ENV=staging
+This will create a configuration to stand up the default stacks in both `deploy` mode (API, AV Download, and Chat) and
+`sync` mode (Chat only). To deploy a different combination of features, specify them using the `WITH` option:
-sam sync --watch --stack-name $STACK_NAME \
- --config-env $CONFIG_ENV \
- --config-file ./samconfig.toml \
- --parameter-overrides $(while IFS='=' read -r key value; do params+=" $key=$value"; done < ./$CONFIG_ENV.parameters && echo "$params CustomDomainHost=$STACK_NAME")
+```shell
+make samconfig.mbk.yaml WITH=API,DOCS
```
-This will give you API routes like: `https://dc-api-yourdevprefix.rdc-staging.library.northwestern.edu/chat-endpoint`
+Available features are: `API`, `AV_DOWNLOAD`, `CHAT`, and `DOCS`.
-## Deploying the API manually
+⚠️ Be **very** careful including the API in `sync` mode as every change within `/api` will take a long time to deploy.
-- Symlink the `*.parameters` file you need from `tfvars/dc-api/` to the application root
-- Set your `CONFIG_ENV` and `HONEYBADGER_REVISION` environment variables
-- Run `sam deploy`
+As with the `env.json` file, `make` will not overwrite an existing file unless you include `-B`.
-```sh
-# staging environment example:
+### Tearing down a development stack
-ln -s ~/environment/tfvars/dc-api/staging.parameters .
-CONFIG_ENV=staging
-HONEYBADGER_REVISION=$(git rev-parse HEAD)
-sam deploy \
- --config-env $CONFIG_ENV \
- --config-file ./samconfig.toml \
- --parameter-overrides $(while IFS='=' read -r key value; do params+=" $key=$value"; done < ./$CONFIG_ENV.parameters && echo "$params HoneybadgerRevision=$HONEYBADGER_REVISION")
+```shell
+sam delete --stack-name dc-api-USER_PREFIX
```
## Writing Documentation
@@ -168,11 +148,11 @@ In a nutshell:
sg open all 8000
mkdocs serve -a 0.0.0.0:8000
```
- Docs will be accessible at http://[DEV_PREFIX].dev.rdc.library.northwestern.edu:8000/
+ Docs will be accessible at http://USER_PREFIX.dev.rdc.library.northwestern.edu:8000/
### OpenAPI/Swagger Docs
-We also maintain an OpenAPI Specification under the docs directory in [`spec/openapi.yaml`](docs/docs/spec/openapi.yaml). When `mkdocs` is running, the Swagger UI can be found at http://[DEV_PREFIX].dev.rdc.library.northwestern.edu:8000/spec/openapi.html. Like the rest of the documentation, changes to the YAML will be immediately visible in the browser.
+We also maintain an OpenAPI Specification under the docs directory in [`spec/openapi.yaml`](docs/docs/spec/openapi.yaml). When `mkdocs` is running, the Swagger UI can be found at http://USER_PREFIX.dev.rdc.library.northwestern.edu:8000/spec/openapi.html. Like the rest of the documentation, changes to the YAML will be immediately visible in the browser.
The existing spec files ([`openapi.yaml`](docs/docs/spec/openapi.yaml) and [`types.yaml`](docs/docs/spec/types.yaml)) are the best reference for understanding and updating the spec. It's especially important to understand how `openapi.yaml` uses the [`$ref` keyword](https://swagger.io/docs/specification/using-ref/) to refer to reusable elements defined in `types.yaml`.
diff --git a/node/.mocharc.js b/api/.mocharc.js
similarity index 100%
rename from node/.mocharc.js
rename to api/.mocharc.js
diff --git a/node/.npmignore b/api/.npmignore
similarity index 100%
rename from node/.npmignore
rename to api/.npmignore
diff --git a/layers/api_dependencies/package-lock.json b/api/dependencies/package-lock.json
similarity index 100%
rename from layers/api_dependencies/package-lock.json
rename to api/dependencies/package-lock.json
diff --git a/layers/api_dependencies/package.json b/api/dependencies/package.json
similarity index 100%
rename from layers/api_dependencies/package.json
rename to api/dependencies/package.json
diff --git a/node/nyc.config.js b/api/nyc.config.js
similarity index 100%
rename from node/nyc.config.js
rename to api/nyc.config.js
diff --git a/node/package-lock.json b/api/package-lock.json
similarity index 100%
rename from node/package-lock.json
rename to api/package-lock.json
diff --git a/node/package.json b/api/package.json
similarity index 93%
rename from node/package.json
rename to api/package.json
index 31c78b97..b37f7fd6 100644
--- a/node/package.json
+++ b/api/package.json
@@ -10,7 +10,7 @@
},
"scripts": {
"lint": "eslint src/**/*.js test/**/*.js",
- "preinstall": "cd src && npm i && cd - && cd ../lambdas && npm i && cd -",
+ "preinstall": "cd src && npm i && cd - && cd ../av-download/lambdas && npm i && cd -",
"prettier": "prettier -c src test",
"prettier:fix": "prettier -cw src test",
"test": "mocha",
diff --git a/node/src/api/api-token.js b/api/src/api/api-token.js
similarity index 100%
rename from node/src/api/api-token.js
rename to api/src/api/api-token.js
diff --git a/node/src/api/opensearch.js b/api/src/api/opensearch.js
similarity index 100%
rename from node/src/api/opensearch.js
rename to api/src/api/opensearch.js
diff --git a/node/src/api/pagination.js b/api/src/api/pagination.js
similarity index 100%
rename from node/src/api/pagination.js
rename to api/src/api/pagination.js
diff --git a/node/src/api/request/models.js b/api/src/api/request/models.js
similarity index 100%
rename from node/src/api/request/models.js
rename to api/src/api/request/models.js
diff --git a/node/src/api/request/pipeline.js b/api/src/api/request/pipeline.js
similarity index 100%
rename from node/src/api/request/pipeline.js
rename to api/src/api/request/pipeline.js
diff --git a/node/src/api/response/error.js b/api/src/api/response/error.js
similarity index 100%
rename from node/src/api/response/error.js
rename to api/src/api/response/error.js
diff --git a/node/src/api/response/iiif/collection.js b/api/src/api/response/iiif/collection.js
similarity index 100%
rename from node/src/api/response/iiif/collection.js
rename to api/src/api/response/iiif/collection.js
diff --git a/node/src/api/response/iiif/manifest.js b/api/src/api/response/iiif/manifest.js
similarity index 100%
rename from node/src/api/response/iiif/manifest.js
rename to api/src/api/response/iiif/manifest.js
diff --git a/node/src/api/response/iiif/presentation-api/items.js b/api/src/api/response/iiif/presentation-api/items.js
similarity index 100%
rename from node/src/api/response/iiif/presentation-api/items.js
rename to api/src/api/response/iiif/presentation-api/items.js
diff --git a/node/src/api/response/iiif/presentation-api/metadata.js b/api/src/api/response/iiif/presentation-api/metadata.js
similarity index 100%
rename from node/src/api/response/iiif/presentation-api/metadata.js
rename to api/src/api/response/iiif/presentation-api/metadata.js
diff --git a/node/src/api/response/iiif/presentation-api/placeholder-canvas.js b/api/src/api/response/iiif/presentation-api/placeholder-canvas.js
similarity index 100%
rename from node/src/api/response/iiif/presentation-api/placeholder-canvas.js
rename to api/src/api/response/iiif/presentation-api/placeholder-canvas.js
diff --git a/node/src/api/response/iiif/presentation-api/provider.js b/api/src/api/response/iiif/presentation-api/provider.js
similarity index 100%
rename from node/src/api/response/iiif/presentation-api/provider.js
rename to api/src/api/response/iiif/presentation-api/provider.js
diff --git a/node/src/api/response/opensearch/index.js b/api/src/api/response/opensearch/index.js
similarity index 100%
rename from node/src/api/response/opensearch/index.js
rename to api/src/api/response/opensearch/index.js
diff --git a/node/src/api/response/transformer.js b/api/src/api/response/transformer.js
similarity index 100%
rename from node/src/api/response/transformer.js
rename to api/src/api/response/transformer.js
diff --git a/node/src/aws/fetch.js b/api/src/aws/fetch.js
similarity index 100%
rename from node/src/aws/fetch.js
rename to api/src/aws/fetch.js
diff --git a/node/src/environment.js b/api/src/environment.js
similarity index 67%
rename from node/src/environment.js
rename to api/src/environment.js
index 3ea17f29..29b8fa84 100644
--- a/node/src/environment.js
+++ b/api/src/environment.js
@@ -1,37 +1,10 @@
const fs = require("fs");
const jwt = require("jsonwebtoken");
const path = require("path");
-const {
- GetSecretValueCommand,
- SecretsManagerClient,
-} = require("@aws-sdk/client-secrets-manager");
const PackageInfo = JSON.parse(
fs.readFileSync(path.join(__dirname, "package.json"))
);
-const { SECRETS_PATH } = process.env;
-const SecretIds = {
- index: `${SECRETS_PATH}/infrastructure/index`,
- meadow: "config/meadow",
-};
-let Initialized = false;
-let Secrets = {};
-
-async function initialize() {
- if (Initialized) return;
-
- const client = new SecretsManagerClient();
- for (const source in SecretIds) {
- const SecretId = SecretIds[source];
- console.debug("loading", SecretId, "from", source);
- const cmd = new GetSecretValueCommand({ SecretId });
- const { SecretString } = await client.send(cmd);
- Secrets[source] = JSON.parse(SecretString);
- }
- Initialized = true;
- return Secrets;
-}
-
function apiToken() {
const token = {
displayName: ["Digital Collection API v2"],
@@ -97,7 +70,6 @@ module.exports = {
dcUrl,
defaultSearchSize,
devTeamNetIds,
- initialize,
openSearchEndpoint,
prefix,
region,
diff --git a/node/src/handlers/authorize-document.js b/api/src/handlers/authorize-document.js
similarity index 100%
rename from node/src/handlers/authorize-document.js
rename to api/src/handlers/authorize-document.js
diff --git a/node/src/handlers/get-auth-callback.js b/api/src/handlers/get-auth-callback.js
similarity index 100%
rename from node/src/handlers/get-auth-callback.js
rename to api/src/handlers/get-auth-callback.js
diff --git a/node/src/handlers/get-auth-login.js b/api/src/handlers/get-auth-login.js
similarity index 100%
rename from node/src/handlers/get-auth-login.js
rename to api/src/handlers/get-auth-login.js
diff --git a/node/src/handlers/get-auth-logout.js b/api/src/handlers/get-auth-logout.js
similarity index 100%
rename from node/src/handlers/get-auth-logout.js
rename to api/src/handlers/get-auth-logout.js
diff --git a/node/src/handlers/get-auth-token.js b/api/src/handlers/get-auth-token.js
similarity index 100%
rename from node/src/handlers/get-auth-token.js
rename to api/src/handlers/get-auth-token.js
diff --git a/node/src/handlers/get-auth-whoami.js b/api/src/handlers/get-auth-whoami.js
similarity index 100%
rename from node/src/handlers/get-auth-whoami.js
rename to api/src/handlers/get-auth-whoami.js
diff --git a/node/src/handlers/get-chat-endpoint.js b/api/src/handlers/get-chat-endpoint.js
similarity index 100%
rename from node/src/handlers/get-chat-endpoint.js
rename to api/src/handlers/get-chat-endpoint.js
diff --git a/node/src/handlers/get-collection-by-id.js b/api/src/handlers/get-collection-by-id.js
similarity index 100%
rename from node/src/handlers/get-collection-by-id.js
rename to api/src/handlers/get-collection-by-id.js
diff --git a/node/src/handlers/get-collections.js b/api/src/handlers/get-collections.js
similarity index 100%
rename from node/src/handlers/get-collections.js
rename to api/src/handlers/get-collections.js
diff --git a/node/src/handlers/get-file-set-auth.js b/api/src/handlers/get-file-set-auth.js
similarity index 100%
rename from node/src/handlers/get-file-set-auth.js
rename to api/src/handlers/get-file-set-auth.js
diff --git a/node/src/handlers/get-file-set-by-id.js b/api/src/handlers/get-file-set-by-id.js
similarity index 100%
rename from node/src/handlers/get-file-set-by-id.js
rename to api/src/handlers/get-file-set-by-id.js
diff --git a/node/src/handlers/get-file-set-download.js b/api/src/handlers/get-file-set-download.js
similarity index 78%
rename from node/src/handlers/get-file-set-download.js
rename to api/src/handlers/get-file-set-download.js
index 2b321117..6215ef23 100644
--- a/node/src/handlers/get-file-set-download.js
+++ b/api/src/handlers/get-file-set-download.js
@@ -1,3 +1,7 @@
+const {
+ GetSecretValueCommand,
+ SecretsManagerClient,
+} = require("@aws-sdk/client-secrets-manager");
const { SFNClient, StartExecutionCommand } = require("@aws-sdk/client-sfn");
const { wrap } = require("./middleware");
const { getFileSet } = require("../api/opensearch");
@@ -12,10 +16,20 @@ const mime = require("mime-types");
const opensearchResponse = require("../api/response/opensearch");
const path = require("path");
+let Secrets;
+
+const getSecret = (key) => {
+ return process.env[key.toUpperCase()] || Secrets[key];
+};
+
/**
* Handler for download file set endpoint
*/
-exports.handler = wrap(async (event) => {
+exports.handler = wrap(async (event, context) => {
+ const secretsManagerClient =
+ context?.injections?.secretsManagerClient || new SecretsManagerClient({});
+ await loadSecrets(secretsManagerClient);
+
const id = event.pathParameters.id;
const email = event.queryStringParameters?.email;
const referer = event.headers?.referer;
@@ -57,6 +71,24 @@ exports.handler = wrap(async (event) => {
}
});
+async function loadSecrets(client) {
+ if (Secrets) return Secrets;
+
+ const SECRETS_PATH =
+ process.env?.API_CONFIG_PREFIX || process.env.SECRETS_PATH;
+ const SecretId = `${SECRETS_PATH}/config/av-download`;
+ try {
+ const cmd = new GetSecretValueCommand({ SecretId });
+ const secretsResponse = await client.send(cmd);
+ if (secretsResponse.SecretString) {
+ Secrets = JSON.parse(secretsResponse.SecretString);
+ }
+ } catch (err) {
+ console.warn("Error loading secrets from", SecretId);
+ }
+ return Secrets;
+}
+
function isAltFileDownload(doc) {
const acceptedTypes = [
"application/pdf",
@@ -109,7 +141,7 @@ function derivativeKey(doc) {
async function getDownloadLink(doc) {
const clientParams = {};
- const bucket = process.env.PYRAMID_BUCKET;
+ const bucket = getSecret("pyramid_bucket");
const key = derivativeKey(doc);
const getObjectParams = {
@@ -173,8 +205,8 @@ const IIIFImageRequest = async (doc) => {
};
async function processAVDownload(doc, email, referer) {
- const stepFunctionConfig = process.env.STEP_FUNCTION_ENDPOINT
- ? { endpoint: process.env.STEP_FUNCTION_ENDPOINT }
+ const stepFunctionConfig = getSecret("step_function_endpoint")
+ ? { endpoint: getSecret("step_function_endpoint") }
: {};
const client = new SFNClient(stepFunctionConfig);
@@ -182,7 +214,7 @@ async function processAVDownload(doc, email, referer) {
const url = new URL(fileSet.streaming_url);
const sourceLocation = s3Location(fileSet.streaming_url);
- const destinationBucket = process.env.MEDIA_CONVERT_DESTINATION_BUCKET;
+ const destinationBucket = getSecret("media_convert_destination_bucket");
const fileSetId = path.parse(url.pathname).name;
const fileSetLabel = fileSet.label;
const workId = fileSet.work_id;
@@ -197,14 +229,16 @@ async function processAVDownload(doc, email, referer) {
const filename = isAudio(doc) ? `${fileSetId}.mp3` : `${fileSetId}.mp4`;
var params = {
- stateMachineArn: process.env.AV_DOWNLOAD_STATE_MACHINE_ARN,
+ stateMachineArn: getSecret("av_download_state_machine_arn"),
input: JSON.stringify({
configuration: {
- startAudioTranscodeFunction: process.env.START_AUDIO_TRANSCODE_FUNCTION,
- startTranscodeFunction: process.env.START_TRANSCODE_FUNCTION,
- transcodeStatusFunction: process.env.TRANSCODE_STATUS_FUNCTION,
- getDownloadLinkFunction: process.env.GET_DOWNLOAD_LINK_FUNCTION,
- sendTemplatedEmailFunction: process.env.SEND_TEMPLATED_EMAIL_FUNCTION,
+ startAudioTranscodeFunction: getSecret(
+ "start_audio_transcode_function"
+ ),
+ startTranscodeFunction: getSecret("start_transcode_function"),
+ transcodeStatusFunction: getSecret("transcode_status_function"),
+ getDownloadLinkFunction: getSecret("get_download_link_function"),
+ sendTemplatedEmailFunction: getSecret("send_templated_email_function"),
},
transcodeInput: {
settings: settings,
@@ -221,8 +255,8 @@ async function processAVDownload(doc, email, referer) {
},
sendEmailInput: {
to: email,
- template: process.env.AV_DOWNLOAD_EMAIL_TEMPLATE,
- from: process.env.REPOSITORY_EMAIL,
+ template: getSecret("av_download_email_template"),
+ from: getSecret("repository_email"),
params: {
downloadLink: "",
fileSetId,
@@ -253,7 +287,7 @@ async function processAVDownload(doc, email, referer) {
function s3Location(streaming_url) {
const url = new URL(streaming_url);
- return `s3://${process.env.STREAMING_BUCKET}${url.pathname}`;
+ return `s3://${getSecret("streaming_bucket")}${url.pathname}`;
}
function invalidRequest(code, message) {
diff --git a/node/src/handlers/get-shared-link-by-id.js b/api/src/handlers/get-shared-link-by-id.js
similarity index 100%
rename from node/src/handlers/get-shared-link-by-id.js
rename to api/src/handlers/get-shared-link-by-id.js
diff --git a/node/src/handlers/get-similar.js b/api/src/handlers/get-similar.js
similarity index 100%
rename from node/src/handlers/get-similar.js
rename to api/src/handlers/get-similar.js
diff --git a/node/src/handlers/get-thumbnail.js b/api/src/handlers/get-thumbnail.js
similarity index 100%
rename from node/src/handlers/get-thumbnail.js
rename to api/src/handlers/get-thumbnail.js
diff --git a/node/src/handlers/get-work-auth.js b/api/src/handlers/get-work-auth.js
similarity index 100%
rename from node/src/handlers/get-work-auth.js
rename to api/src/handlers/get-work-auth.js
diff --git a/node/src/handlers/get-work-by-id.js b/api/src/handlers/get-work-by-id.js
similarity index 100%
rename from node/src/handlers/get-work-by-id.js
rename to api/src/handlers/get-work-by-id.js
diff --git a/node/src/handlers/middleware.js b/api/src/handlers/middleware.js
similarity index 94%
rename from node/src/handlers/middleware.js
rename to api/src/handlers/middleware.js
index 107ca712..c2984695 100644
--- a/node/src/handlers/middleware.js
+++ b/api/src/handlers/middleware.js
@@ -19,14 +19,16 @@ const debug = require("debug")("api.middleware");
const Honeybadger = require("../honeybadger-setup");
const { StatusCodes } = require("http-status-codes");
const { SECRETS_PATH } = process.env;
+const API_CONFIG_PREFIX = process.env?.API_CONFIG_PREFIX || SECRETS_PATH;
const SecretPaths = [
- `${SECRETS_PATH}/config/dcapi`,
+ `${API_CONFIG_PREFIX}/config/dcapi`,
`${SECRETS_PATH}/infrastructure/index`,
`${SECRETS_PATH}/infrastructure/nusso`,
];
const wrap = function (handler) {
return async (event, context) => {
+ console.log("getSearch event", JSON.stringify(event));
await _initializeEnvironment();
let response;
diff --git a/node/src/handlers/oai.js b/api/src/handlers/oai.js
similarity index 100%
rename from node/src/handlers/oai.js
rename to api/src/handlers/oai.js
diff --git a/node/src/handlers/oai/search.js b/api/src/handlers/oai/search.js
similarity index 100%
rename from node/src/handlers/oai/search.js
rename to api/src/handlers/oai/search.js
diff --git a/node/src/handlers/oai/verbs.js b/api/src/handlers/oai/verbs.js
similarity index 100%
rename from node/src/handlers/oai/verbs.js
rename to api/src/handlers/oai/verbs.js
diff --git a/node/src/handlers/oai/xml-transformer.js b/api/src/handlers/oai/xml-transformer.js
similarity index 100%
rename from node/src/handlers/oai/xml-transformer.js
rename to api/src/handlers/oai/xml-transformer.js
diff --git a/node/src/handlers/options-request.js b/api/src/handlers/options-request.js
similarity index 100%
rename from node/src/handlers/options-request.js
rename to api/src/handlers/options-request.js
diff --git a/node/src/handlers/post-chat-feedback.js b/api/src/handlers/post-chat-feedback.js
similarity index 61%
rename from node/src/handlers/post-chat-feedback.js
rename to api/src/handlers/post-chat-feedback.js
index 4ab8a6c4..5939e1ca 100644
--- a/node/src/handlers/post-chat-feedback.js
+++ b/api/src/handlers/post-chat-feedback.js
@@ -8,18 +8,63 @@ const feedbackSchema = {
type: "object",
properties: {
sentiment: { enum: ["positive", "negative"] },
+ timestamp: { type: "string" },
+ ref: { type: "string" },
+ refIndex: { type: "number" },
context: {
type: "object",
properties: {
ref: { type: "string" },
- question: { type: "string" },
- answer: { type: "string" },
- source_documents: {
+ initialQuestion: { type: "string" },
+ turns: {
type: "array",
- items: { type: "string" },
+ items: {
+ type: "object",
+ properties: {
+ question: { type: "string" },
+ answer: { type: "string" },
+ works: {
+ type: "array",
+ items: {
+ type: "array",
+ items: {
+ type: "object",
+ properties: {
+ id: { type: "string" },
+ title: { type: "string" },
+ visibility: { type: "string" },
+ work_type: { type: "string" },
+ thumbnail: { type: "string" },
+ },
+ },
+ },
+ },
+ aggregations: {
+ type: "array",
+ items: {
+ type: "object",
+ properties: {
+ doc_count_error_upper_bound: { type: "number" },
+ sum_other_doc_count: { type: "number" },
+ buckets: {
+ type: "array",
+ items: {
+ type: "object",
+ properties: {
+ key: { type: "string" },
+ doc_count: { type: "number" },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ required: ["question", "answer", "works", "aggregations"],
+ },
},
},
- required: ["ref", "question", "answer", "source_documents"],
+ required: ["ref", "initialQuestion", "turns"],
additionalProperties: false,
},
feedback: {
@@ -33,7 +78,14 @@ const feedbackSchema = {
additionalProperties: false,
},
},
- required: ["sentiment", "context", "feedback"],
+ required: [
+ "sentiment",
+ "timestamp",
+ "ref",
+ "refIndex",
+ "context",
+ "feedback",
+ ],
additionalProperties: false,
};
@@ -66,12 +118,12 @@ const handler = wrap(async (event, context) => {
return {
statusCode: 400,
headers: { "Content-Type": "text/plain" },
- body: JSON.stringify(errors.join(", ")),
+ body: errors.join(", "),
};
}
await uploadToS3(
s3Client,
- `${content.sentiment}/${content.context.ref}.json`,
+ `${content.sentiment}/${content.ref}_${content.refIndex}.json`,
content
);
diff --git a/node/src/handlers/search-runner.js b/api/src/handlers/search-runner.js
similarity index 100%
rename from node/src/handlers/search-runner.js
rename to api/src/handlers/search-runner.js
diff --git a/node/src/handlers/search.js b/api/src/handlers/search.js
similarity index 100%
rename from node/src/handlers/search.js
rename to api/src/handlers/search.js
diff --git a/node/src/handlers/transcode-templates.js b/api/src/handlers/transcode-templates.js
similarity index 100%
rename from node/src/handlers/transcode-templates.js
rename to api/src/handlers/transcode-templates.js
diff --git a/node/src/helpers.js b/api/src/helpers.js
similarity index 100%
rename from node/src/helpers.js
rename to api/src/helpers.js
diff --git a/node/src/honeybadger-setup.js b/api/src/honeybadger-setup.js
similarity index 100%
rename from node/src/honeybadger-setup.js
rename to api/src/honeybadger-setup.js
diff --git a/node/src/package-lock.json b/api/src/package-lock.json
similarity index 93%
rename from node/src/package-lock.json
rename to api/src/package-lock.json
index 081860f4..4ef994d8 100644
--- a/node/src/package-lock.json
+++ b/api/src/package-lock.json
@@ -8,7 +8,7 @@
"name": "dc-api",
"version": "2.7.0",
"license": "Apache-2.0",
- "devDependencies": {
+ "dependencies": {
"@aws-crypto/sha256-browser": "^2.0.1",
"@aws-sdk/client-s3": "^3.565.0",
"@aws-sdk/client-secrets-manager": "^3.563.0",
@@ -38,7 +38,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/crc32/-/crc32-5.2.0.tgz",
"integrity": "sha512-nLbCWqQNgUiwwtFsen1AdzAtvuLRsQS8rYgMuxCrdKf9kOssamGLuPwyTY9wyYblNr9+1XM8v6zoDTPPSIeANg==",
- "dev": true,
"dependencies": {
"@aws-crypto/util": "^5.2.0",
"@aws-sdk/types": "^3.222.0",
@@ -52,7 +51,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz",
"integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "^3.222.0",
"@smithy/util-utf8": "^2.0.0",
@@ -63,7 +61,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz",
"integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -75,7 +72,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz",
"integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^2.2.0",
"tslib": "^2.6.2"
@@ -88,7 +84,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -100,14 +95,12 @@
"node_modules/@aws-crypto/crc32/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-crypto/crc32c": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/crc32c/-/crc32c-5.2.0.tgz",
"integrity": "sha512-+iWb8qaHLYKrNvGRbiYRHSdKRWhto5XlZUEBwDjYNf+ly5SVYG6zEoYIdxvf5R3zyeP16w4PLBn3rH1xc74Rag==",
- "dev": true,
"dependencies": {
"@aws-crypto/util": "^5.2.0",
"@aws-sdk/types": "^3.222.0",
@@ -118,7 +111,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz",
"integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "^3.222.0",
"@smithy/util-utf8": "^2.0.0",
@@ -129,7 +121,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz",
"integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -141,7 +132,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz",
"integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^2.2.0",
"tslib": "^2.6.2"
@@ -154,7 +144,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -166,14 +155,12 @@
"node_modules/@aws-crypto/crc32c/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-crypto/ie11-detection": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/@aws-crypto/ie11-detection/-/ie11-detection-2.0.2.tgz",
"integrity": "sha512-5XDMQY98gMAf/WRTic5G++jfmS/VLM0rwpiOpaainKi4L0nqWMSB1SzsrEG5rjFZGYN6ZAefO+/Yta2dFM0kMw==",
- "dev": true,
"dependencies": {
"tslib": "^1.11.1"
}
@@ -182,7 +169,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha1-browser/-/sha1-browser-5.2.0.tgz",
"integrity": "sha512-OH6lveCFfcDjX4dbAvCFSYUjJZjDr/3XJ3xHtjn3Oj5b9RjojQo8npoLeA/bNwkOkrSQ0wgrHzXk4tDRxGKJeg==",
- "dev": true,
"dependencies": {
"@aws-crypto/supports-web-crypto": "^5.2.0",
"@aws-crypto/util": "^5.2.0",
@@ -196,7 +182,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz",
"integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
}
@@ -205,7 +190,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz",
"integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "^3.222.0",
"@smithy/util-utf8": "^2.0.0",
@@ -216,7 +200,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz",
"integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -228,7 +211,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz",
"integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^2.2.0",
"tslib": "^2.6.2"
@@ -241,7 +223,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -253,14 +234,12 @@
"node_modules/@aws-crypto/sha1-browser/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-crypto/sha256-browser": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-2.0.2.tgz",
"integrity": "sha512-V7nEV6nKYHqiWVksjQ/BnIppDHrvALDrLoL9lsxvhn/iVo77L7zGLjR+/+nFFvqg/EUz/AJr7YnVGimf1e9X7Q==",
- "dev": true,
"dependencies": {
"@aws-crypto/ie11-detection": "^2.0.2",
"@aws-crypto/sha256-js": "^2.0.2",
@@ -276,7 +255,6 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-2.0.2.tgz",
"integrity": "sha512-iXLdKH19qPmIC73fVCrHWCSYjN/sxaAvZ3jNNyw6FclmHyjLKg0f69WlC9KTnyElxCR5MO9SKaG00VwlJwyAkQ==",
- "dev": true,
"dependencies": {
"@aws-crypto/util": "^2.0.2",
"@aws-sdk/types": "^3.110.0",
@@ -287,7 +265,6 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-2.0.2.tgz",
"integrity": "sha512-6mbSsLHwZ99CTOOswvCRP3C+VCWnzBf+1SnbWxzzJ9lR0mA0JnY2JEAhp8rqmTE0GPFy88rrM27ffgp62oErMQ==",
- "dev": true,
"dependencies": {
"tslib": "^1.11.1"
}
@@ -296,7 +273,6 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-2.0.2.tgz",
"integrity": "sha512-Lgu5v/0e/BcrZ5m/IWqzPUf3UYFTy/PpeED+uc9SWUR1iZQL8XXbGQg10UfllwwBryO3hFF5dizK+78aoXC1eA==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "^3.110.0",
"@aws-sdk/util-utf8-browser": "^3.0.0",
@@ -307,7 +283,6 @@
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.677.0.tgz",
"integrity": "sha512-TrfT7hyjs0FRh5MosduFZsskHhjvBKL451Wo5f80IcnxvNL8YZouh6kblUSiHKUxuc32WSiAKbXJJaPWdXs75Q==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha1-browser": "5.2.0",
"@aws-crypto/sha256-browser": "5.2.0",
@@ -376,7 +351,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz",
"integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-js": "^5.2.0",
"@aws-crypto/supports-web-crypto": "^5.2.0",
@@ -391,7 +365,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -404,7 +377,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz",
"integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==",
- "dev": true,
"dependencies": {
"@aws-crypto/util": "^5.2.0",
"@aws-sdk/types": "^3.222.0",
@@ -418,7 +390,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz",
"integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
}
@@ -427,7 +398,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz",
"integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "^3.222.0",
"@smithy/util-utf8": "^2.0.0",
@@ -438,7 +408,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -451,7 +420,6 @@
"version": "3.1.6",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.6.tgz",
"integrity": "sha512-0XuhuHQlEqbNQZp7QxxrFTdVWdwxch4vjxYgfInF91hZFkPxf9QDrdQka0KfxFMPqLNzSw0b95uGTrLliQUavQ==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -464,7 +432,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz",
"integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -476,7 +443,6 @@
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-3.2.5.tgz",
"integrity": "sha512-PkOwPNeKdvX/jCpn0A8n9/TyoxjGZB8WVoJmm9YzsnAgggTj4CrjpRHlTQw7dlLZ320n1mY1y+nTRUDViKi/3w==",
- "dev": true,
"dependencies": {
"@smithy/abort-controller": "^3.1.6",
"@smithy/protocol-http": "^4.1.5",
@@ -492,7 +458,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -505,7 +470,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz",
"integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^2.2.0",
"tslib": "^2.6.2"
@@ -517,14 +481,12 @@
"node_modules/@aws-sdk/client-s3/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/client-secrets-manager": {
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-secrets-manager/-/client-secrets-manager-3.686.0.tgz",
"integrity": "sha512-cGp4ZWS1X8p4ZmmG5GXc9nup1LsmbcBLNjZqAm4OXUINSTymrL9WKSQC/Yv48WnVlloZnQvwbK7/biCPrAS06Q==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
"@aws-crypto/sha256-js": "5.2.0",
@@ -578,7 +540,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz",
"integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-js": "^5.2.0",
"@aws-crypto/supports-web-crypto": "^5.2.0",
@@ -593,7 +554,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -606,7 +566,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz",
"integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==",
- "dev": true,
"dependencies": {
"@aws-crypto/util": "^5.2.0",
"@aws-sdk/types": "^3.222.0",
@@ -620,7 +579,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz",
"integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
}
@@ -629,7 +587,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz",
"integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "^3.222.0",
"@smithy/util-utf8": "^2.0.0",
@@ -640,7 +597,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -653,7 +609,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.686.0.tgz",
"integrity": "sha512-D8huL2BSHNP9QdQrqPcx4DCJXcG/vrPimNbymgCBgnYyS1HNs11Hu27ZPrbWCZFC8n/bvfXGXOhm8WAHOi4Vtw==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
"@aws-crypto/sha256-js": "5.2.0",
@@ -702,7 +657,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.686.0.tgz",
"integrity": "sha512-bV8yw1tpEj9WOVEnIJTcHPmTqikGccvh9RCg9ohc5DVKLajt/pUF4b+8dDyqNrEijUqlpDDwpSnh1GFhfe298A==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
"@aws-crypto/sha256-js": "5.2.0",
@@ -755,7 +709,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.686.0.tgz",
"integrity": "sha512-WVyOYdK3w7RhK6UrA2MY8KPIbcZ88BGIoKmRhcOXdIUC8CLL1UIECgdRthFXOU+MBqDPFS+VeF+COk0CpRhE8Q==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
"@aws-crypto/sha256-js": "5.2.0",
@@ -806,7 +759,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.686.0.tgz",
"integrity": "sha512-Xt3DV4DnAT3v2WURwzTxWQK34Ew+iiLzoUoguvLaZrVMFOqMMrwVjP+sizqIaHp1j7rGmFcN5I8saXnsDLuQLA==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.686.0",
"@smithy/core": "^2.5.1",
@@ -828,7 +780,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.686.0.tgz",
"integrity": "sha512-osD7lPO8OREkgxPiTWmA1i6XEmOth1uW9HWWj/+A2YGCj1G/t2sHu931w4Qj9NWHYZtbTTXQYVRg+TErALV7nQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/core": "3.686.0",
"@aws-sdk/types": "3.686.0",
@@ -844,7 +795,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.686.0.tgz",
"integrity": "sha512-xyGAD/f3vR/wssUiZrNFWQWXZvI4zRm2wpHhoHA1cC2fbRMNFYtFn365yw6dU7l00ZLcdFB1H119AYIUZS7xbw==",
- "dev": true,
"dependencies": {
"@aws-sdk/core": "3.686.0",
"@aws-sdk/types": "3.686.0",
@@ -865,7 +815,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.686.0.tgz",
"integrity": "sha512-90yr47QsduNiuVizMaJ2GctXZfp/z6s9eSk8ryMxMEJ2zJtaQHmJXIxaNnXj5Kh7V+HhCK7rYu58eyhZvz2Seg==",
- "dev": true,
"dependencies": {
"@aws-sdk/core": "3.686.0",
"@aws-sdk/credential-provider-env": "3.686.0",
@@ -891,7 +840,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.686.0.tgz",
"integrity": "sha512-d5etJJD5rE3ALxrZag80EuFYI+tmJrS4E4dvFNRCosVFKvIC89VVpVY0W+OaA0J+D4FD3OzBwxan31BQAW3IyA==",
- "dev": true,
"dependencies": {
"@aws-sdk/credential-provider-env": "3.686.0",
"@aws-sdk/credential-provider-http": "3.686.0",
@@ -914,7 +862,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.686.0.tgz",
"integrity": "sha512-sXqaAgyzMOc+dm4CnzAR5Q6S9OWVHyZjLfW6IQkmGjqeQXmZl24c4E82+w64C+CTkJrFLzH1VNOYp1Hy5gE6Qw==",
- "dev": true,
"dependencies": {
"@aws-sdk/core": "3.686.0",
"@aws-sdk/types": "3.686.0",
@@ -931,7 +878,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.686.0.tgz",
"integrity": "sha512-bGDFRcqpGUe2YBL5gmRZTLcxGwbtFd916JsdqmNgJwhhlOXPF6nqjGil5ZYruS3AMPy0BMntnG0Mvn/ZbusT/A==",
- "dev": true,
"dependencies": {
"@aws-sdk/client-sso": "3.686.0",
"@aws-sdk/core": "3.686.0",
@@ -950,7 +896,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.686.0.tgz",
"integrity": "sha512-40UqCpPxyHCXDP7CGd9JIOZDgDZf+u1OyLaGBpjQJlz1HYuEsIWnnbTe29Yg3Ah/Zc3g4NBWcUdlGVotlnpnDg==",
- "dev": true,
"dependencies": {
"@aws-sdk/core": "3.686.0",
"@aws-sdk/types": "3.686.0",
@@ -969,7 +914,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.686.0.tgz",
"integrity": "sha512-+Yc6rO02z+yhFbHmRZGvEw1vmzf/ifS9a4aBjJGeVVU+ZxaUvnk+IUZWrj4YQopUQ+bSujmMUzJLXSkbDq7yuw==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.686.0",
"@smithy/protocol-http": "^4.1.5",
@@ -984,7 +928,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.686.0.tgz",
"integrity": "sha512-cX43ODfA2+SPdX7VRxu6gXk4t4bdVJ9pkktbfnkE5t27OlwNfvSGGhnHrQL8xTOFeyQ+3T+oowf26gf1OI+vIg==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.686.0",
"@smithy/types": "^3.6.0",
@@ -998,7 +941,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.686.0.tgz",
"integrity": "sha512-jF9hQ162xLgp9zZ/3w5RUNhmwVnXDBlABEUX8jCgzaFpaa742qR/KKtjjZQ6jMbQnP+8fOCSXFAVNMU+s6v81w==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.686.0",
"@smithy/protocol-http": "^4.1.5",
@@ -1013,7 +955,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.686.0.tgz",
"integrity": "sha512-/GRU68H5J66OD2a/RtX5s2ECtXTlMq6NneLlzcx0mIWnZ2VRMS2vFW2j2jrBEPJ5Y5us1/lK/fbun6gNo3qh7Q==",
- "dev": true,
"dependencies": {
"@aws-sdk/core": "3.686.0",
"@aws-sdk/types": "3.686.0",
@@ -1031,7 +972,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.686.0.tgz",
"integrity": "sha512-6zXD3bSD8tcsMAVVwO1gO7rI1uy2fCD3czgawuPGPopeLiPpo6/3FoUWCQzk2nvEhj7p9Z4BbjwZGSlRkVrXTw==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.686.0",
"@smithy/node-config-provider": "^3.1.9",
@@ -1048,7 +988,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.686.0.tgz",
"integrity": "sha512-9oL4kTCSePFmyKPskibeiOXV6qavPZ63/kXM9Wh9V6dTSvBtLeNnMxqGvENGKJcTdIgtoqyqA6ET9u0PJ5IRIg==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.686.0",
"@smithy/property-provider": "^3.1.7",
@@ -1067,7 +1006,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.686.0.tgz",
"integrity": "sha512-xFnrb3wxOoJcW2Xrh63ZgFo5buIu9DF7bOHnwoUxHdNpUXicUh0AHw85TjXxyxIAd0d1psY/DU7QHoNI3OswgQ==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -1080,7 +1018,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.686.0.tgz",
"integrity": "sha512-7msZE2oYl+6QYeeRBjlDgxQUhq/XRky3cXE0FqLFs2muLS7XSuQEXkpOXB3R782ygAP6JX0kmBxPTLurRTikZg==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.686.0",
"@smithy/types": "^3.6.0",
@@ -1095,7 +1032,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.686.0.tgz",
"integrity": "sha512-YiQXeGYZegF1b7B2GOR61orhgv79qmI0z7+Agm3NXLO6hGfVV3kFUJbXnjtH1BgWo5hbZYW7HQ2omGb3dnb6Lg==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.686.0",
"@smithy/types": "^3.6.0",
@@ -1107,7 +1043,6 @@
"version": "3.686.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.686.0.tgz",
"integrity": "sha512-XXUhZPeacJt5BmWc0qNXA4/yyQGXPmFcTOFe5aqXuZbhtTCNVJ0fPQHFip37iGSHCg8eAFykiBn9W8hD4swolQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/middleware-user-agent": "3.686.0",
"@aws-sdk/types": "3.686.0",
@@ -1131,7 +1066,6 @@
"version": "3.1.6",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.6.tgz",
"integrity": "sha512-0XuhuHQlEqbNQZp7QxxrFTdVWdwxch4vjxYgfInF91hZFkPxf9QDrdQka0KfxFMPqLNzSw0b95uGTrLliQUavQ==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -1144,7 +1078,6 @@
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-4.0.0.tgz",
"integrity": "sha512-MLb1f5tbBO2X6K4lMEKJvxeLooyg7guq48C2zKr4qM7F2Gpkz4dc+hdSgu77pCJ76jVqFBjZczHYAs6dp15N+g==",
- "dev": true,
"dependencies": {
"@smithy/protocol-http": "^4.1.5",
"@smithy/querystring-builder": "^3.0.8",
@@ -1157,7 +1090,6 @@
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-3.2.5.tgz",
"integrity": "sha512-PkOwPNeKdvX/jCpn0A8n9/TyoxjGZB8WVoJmm9YzsnAgggTj4CrjpRHlTQw7dlLZ320n1mY1y+nTRUDViKi/3w==",
- "dev": true,
"dependencies": {
"@smithy/abort-controller": "^3.1.6",
"@smithy/protocol-http": "^4.1.5",
@@ -1173,7 +1105,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -1186,7 +1117,6 @@
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-4.2.1.tgz",
"integrity": "sha512-NsV1jF4EvmO5wqmaSzlnTVetemBS3FZHdyc5CExbDljcyJCEEkJr8ANu2JvtNbVg/9MvKAWV44kTrGS+Pi4INg==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^3.0.0",
"@smithy/protocol-http": "^4.1.5",
@@ -1205,7 +1135,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz",
"integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^2.2.0",
"tslib": "^2.6.2"
@@ -1218,7 +1147,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz",
"integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -1229,14 +1157,12 @@
"node_modules/@aws-sdk/client-secrets-manager/node_modules/tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
- "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
- "dev": true
+ "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
},
"node_modules/@aws-sdk/client-sfn": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sfn/-/client-sfn-3.677.0.tgz",
"integrity": "sha512-mEmJwlHCOf2khp3w1cnImSZCfYrOrbpHAi/ucxX9eYvnWvFhhORVeY+g6clmw1EZWZO+BKMLIAGzAK9ATl9Thw==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
"@aws-crypto/sha256-js": "5.2.0",
@@ -1290,7 +1216,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz",
"integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-js": "^5.2.0",
"@aws-crypto/supports-web-crypto": "^5.2.0",
@@ -1305,7 +1230,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -1318,7 +1242,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz",
"integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==",
- "dev": true,
"dependencies": {
"@aws-crypto/util": "^5.2.0",
"@aws-sdk/types": "^3.222.0",
@@ -1332,7 +1255,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz",
"integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
}
@@ -1341,7 +1263,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz",
"integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "^3.222.0",
"@smithy/util-utf8": "^2.0.0",
@@ -1352,7 +1273,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -1365,7 +1285,6 @@
"version": "3.1.6",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.6.tgz",
"integrity": "sha512-0XuhuHQlEqbNQZp7QxxrFTdVWdwxch4vjxYgfInF91hZFkPxf9QDrdQka0KfxFMPqLNzSw0b95uGTrLliQUavQ==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -1378,7 +1297,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz",
"integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -1390,7 +1308,6 @@
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-3.2.5.tgz",
"integrity": "sha512-PkOwPNeKdvX/jCpn0A8n9/TyoxjGZB8WVoJmm9YzsnAgggTj4CrjpRHlTQw7dlLZ320n1mY1y+nTRUDViKi/3w==",
- "dev": true,
"dependencies": {
"@smithy/abort-controller": "^3.1.6",
"@smithy/protocol-http": "^4.1.5",
@@ -1406,7 +1323,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -1419,7 +1335,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz",
"integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^2.2.0",
"tslib": "^2.6.2"
@@ -1431,14 +1346,12 @@
"node_modules/@aws-sdk/client-sfn/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/client-sns": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sns/-/client-sns-3.677.0.tgz",
"integrity": "sha512-jK9gqYH5mSH8rsg4d/pXQMJYx6a4Xd8xKnfg49g4qWbhNdQMfYpLryRsNkkBEPn3EylACu+fDbP+Pz/6GeOSgg==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
"@aws-crypto/sha256-js": "5.2.0",
@@ -1490,7 +1403,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz",
"integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-js": "^5.2.0",
"@aws-crypto/supports-web-crypto": "^5.2.0",
@@ -1505,7 +1417,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -1518,7 +1429,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz",
"integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==",
- "dev": true,
"dependencies": {
"@aws-crypto/util": "^5.2.0",
"@aws-sdk/types": "^3.222.0",
@@ -1532,7 +1442,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz",
"integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
}
@@ -1541,7 +1450,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz",
"integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "^3.222.0",
"@smithy/util-utf8": "^2.0.0",
@@ -1552,7 +1460,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -1565,7 +1472,6 @@
"version": "3.1.6",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.6.tgz",
"integrity": "sha512-0XuhuHQlEqbNQZp7QxxrFTdVWdwxch4vjxYgfInF91hZFkPxf9QDrdQka0KfxFMPqLNzSw0b95uGTrLliQUavQ==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -1578,7 +1484,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz",
"integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -1590,7 +1495,6 @@
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-3.2.5.tgz",
"integrity": "sha512-PkOwPNeKdvX/jCpn0A8n9/TyoxjGZB8WVoJmm9YzsnAgggTj4CrjpRHlTQw7dlLZ320n1mY1y+nTRUDViKi/3w==",
- "dev": true,
"dependencies": {
"@smithy/abort-controller": "^3.1.6",
"@smithy/protocol-http": "^4.1.5",
@@ -1606,7 +1510,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -1619,7 +1522,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz",
"integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^2.2.0",
"tslib": "^2.6.2"
@@ -1631,14 +1533,12 @@
"node_modules/@aws-sdk/client-sns/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/client-sso": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.677.0.tgz",
"integrity": "sha512-/y6EskFhOa2w9VwXaXoyOrGeBjnOj/72wsxDOslS908qH+nf7m40pBK6e/iBelg04vlx0gqhlbfK8hLbaT6KHA==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
"@aws-crypto/sha256-js": "5.2.0",
@@ -1687,7 +1587,6 @@
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.677.0.tgz",
"integrity": "sha512-2zgZkRIU7DsnUVOy+9bjfJ0IYMzi9ONWXQt/WqMa7HOnj4RfenfpipyhHYxGZR5kmehgv53EI79yvUu+SAfGNg==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
"@aws-crypto/sha256-js": "5.2.0",
@@ -1740,7 +1639,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz",
"integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-js": "^5.2.0",
"@aws-crypto/supports-web-crypto": "^5.2.0",
@@ -1755,7 +1653,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -1768,7 +1665,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz",
"integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==",
- "dev": true,
"dependencies": {
"@aws-crypto/util": "^5.2.0",
"@aws-sdk/types": "^3.222.0",
@@ -1782,7 +1678,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz",
"integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
}
@@ -1791,7 +1686,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz",
"integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "^3.222.0",
"@smithy/util-utf8": "^2.0.0",
@@ -1802,7 +1696,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -1815,7 +1708,6 @@
"version": "3.1.6",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.6.tgz",
"integrity": "sha512-0XuhuHQlEqbNQZp7QxxrFTdVWdwxch4vjxYgfInF91hZFkPxf9QDrdQka0KfxFMPqLNzSw0b95uGTrLliQUavQ==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -1828,7 +1720,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz",
"integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -1840,7 +1731,6 @@
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-3.2.5.tgz",
"integrity": "sha512-PkOwPNeKdvX/jCpn0A8n9/TyoxjGZB8WVoJmm9YzsnAgggTj4CrjpRHlTQw7dlLZ320n1mY1y+nTRUDViKi/3w==",
- "dev": true,
"dependencies": {
"@smithy/abort-controller": "^3.1.6",
"@smithy/protocol-http": "^4.1.5",
@@ -1856,7 +1746,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -1869,7 +1758,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz",
"integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^2.2.0",
"tslib": "^2.6.2"
@@ -1881,14 +1769,12 @@
"node_modules/@aws-sdk/client-sso-oidc/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/client-sso/node_modules/@aws-crypto/sha256-browser": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz",
"integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-js": "^5.2.0",
"@aws-crypto/supports-web-crypto": "^5.2.0",
@@ -1903,7 +1789,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -1916,7 +1801,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz",
"integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==",
- "dev": true,
"dependencies": {
"@aws-crypto/util": "^5.2.0",
"@aws-sdk/types": "^3.222.0",
@@ -1930,7 +1814,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz",
"integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
}
@@ -1939,7 +1822,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz",
"integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "^3.222.0",
"@smithy/util-utf8": "^2.0.0",
@@ -1950,7 +1832,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -1963,7 +1844,6 @@
"version": "3.1.6",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.6.tgz",
"integrity": "sha512-0XuhuHQlEqbNQZp7QxxrFTdVWdwxch4vjxYgfInF91hZFkPxf9QDrdQka0KfxFMPqLNzSw0b95uGTrLliQUavQ==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -1976,7 +1856,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz",
"integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -1988,7 +1867,6 @@
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-3.2.5.tgz",
"integrity": "sha512-PkOwPNeKdvX/jCpn0A8n9/TyoxjGZB8WVoJmm9YzsnAgggTj4CrjpRHlTQw7dlLZ320n1mY1y+nTRUDViKi/3w==",
- "dev": true,
"dependencies": {
"@smithy/abort-controller": "^3.1.6",
"@smithy/protocol-http": "^4.1.5",
@@ -2004,7 +1882,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2017,7 +1894,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz",
"integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^2.2.0",
"tslib": "^2.6.2"
@@ -2029,14 +1905,12 @@
"node_modules/@aws-sdk/client-sso/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/client-sts": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.677.0.tgz",
"integrity": "sha512-N5fs1GLSthnwrs44b4IJI//dcShuIT42g4pM8FCUJZwbrWn9Sp9F876R1mvb8A9TAy2S4qCXi7TkHS0REnuicQ==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
"@aws-crypto/sha256-js": "5.2.0",
@@ -2087,7 +1961,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz",
"integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==",
- "dev": true,
"dependencies": {
"@aws-crypto/sha256-js": "^5.2.0",
"@aws-crypto/supports-web-crypto": "^5.2.0",
@@ -2102,7 +1975,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -2115,7 +1987,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz",
"integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==",
- "dev": true,
"dependencies": {
"@aws-crypto/util": "^5.2.0",
"@aws-sdk/types": "^3.222.0",
@@ -2129,7 +2000,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz",
"integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
}
@@ -2138,7 +2008,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz",
"integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "^3.222.0",
"@smithy/util-utf8": "^2.0.0",
@@ -2149,7 +2018,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -2162,7 +2030,6 @@
"version": "3.1.6",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.6.tgz",
"integrity": "sha512-0XuhuHQlEqbNQZp7QxxrFTdVWdwxch4vjxYgfInF91hZFkPxf9QDrdQka0KfxFMPqLNzSw0b95uGTrLliQUavQ==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2175,7 +2042,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz",
"integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -2187,7 +2053,6 @@
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-3.2.5.tgz",
"integrity": "sha512-PkOwPNeKdvX/jCpn0A8n9/TyoxjGZB8WVoJmm9YzsnAgggTj4CrjpRHlTQw7dlLZ320n1mY1y+nTRUDViKi/3w==",
- "dev": true,
"dependencies": {
"@smithy/abort-controller": "^3.1.6",
"@smithy/protocol-http": "^4.1.5",
@@ -2203,7 +2068,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2216,7 +2080,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz",
"integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^2.2.0",
"tslib": "^2.6.2"
@@ -2228,14 +2091,12 @@
"node_modules/@aws-sdk/client-sts/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/core": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.677.0.tgz",
"integrity": "sha512-5auvc1wmXmd7u9Y9nM95Ia+VX7J2FiZLuADitHqE4mHPH9riDgOY+uK/yM+UKr+lfq4zKiZQG7i8cfabZlCY8g==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.667.0",
"@smithy/core": "^2.4.8",
@@ -2257,7 +2118,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2270,7 +2130,6 @@
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-4.2.1.tgz",
"integrity": "sha512-NsV1jF4EvmO5wqmaSzlnTVetemBS3FZHdyc5CExbDljcyJCEEkJr8ANu2JvtNbVg/9MvKAWV44kTrGS+Pi4INg==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^3.0.0",
"@smithy/protocol-http": "^4.1.5",
@@ -2288,14 +2147,12 @@
"node_modules/@aws-sdk/core/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/credential-provider-env": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.677.0.tgz",
"integrity": "sha512-0ctcqKzclr9TiNIkB8I+YRogjWH/4mLWQGv/bgb8ElHqph+rPy4pOubj1Ax01sbs7XdwDaImjBYV5xXE+BEsYw==",
- "dev": true,
"dependencies": {
"@aws-sdk/core": "3.677.0",
"@aws-sdk/types": "3.667.0",
@@ -2310,14 +2167,12 @@
"node_modules/@aws-sdk/credential-provider-env/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/credential-provider-http": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.677.0.tgz",
"integrity": "sha512-c4TnShdzk37dhL1HGGzZ2PDKIIEmo1IbT/4y5hSRdNc8Z8fu6spE5GoeVsv6p/HdSGPS7XTy6aOFCMCk4AeIzQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/core": "3.677.0",
"@aws-sdk/types": "3.667.0",
@@ -2338,7 +2193,6 @@
"version": "3.1.6",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.6.tgz",
"integrity": "sha512-0XuhuHQlEqbNQZp7QxxrFTdVWdwxch4vjxYgfInF91hZFkPxf9QDrdQka0KfxFMPqLNzSw0b95uGTrLliQUavQ==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2351,7 +2205,6 @@
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-3.2.5.tgz",
"integrity": "sha512-PkOwPNeKdvX/jCpn0A8n9/TyoxjGZB8WVoJmm9YzsnAgggTj4CrjpRHlTQw7dlLZ320n1mY1y+nTRUDViKi/3w==",
- "dev": true,
"dependencies": {
"@smithy/abort-controller": "^3.1.6",
"@smithy/protocol-http": "^4.1.5",
@@ -2367,7 +2220,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2379,14 +2231,12 @@
"node_modules/@aws-sdk/credential-provider-http/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/credential-provider-ini": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.677.0.tgz",
"integrity": "sha512-hW+oHj5zplPLzTk74LG+gZVOKQnmBPyRIbwg3uZWr23xfOxh/Osu9Wq8qwgu2+UyFHr+6/DRFjZJ6avNA2jpKw==",
- "dev": true,
"dependencies": {
"@aws-sdk/core": "3.677.0",
"@aws-sdk/credential-provider-env": "3.677.0",
@@ -2411,14 +2261,12 @@
"node_modules/@aws-sdk/credential-provider-ini/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/credential-provider-node": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.677.0.tgz",
"integrity": "sha512-DwFriiDx2SSdj7VhRv/0fm8UIK7isy+WZAlqUdZ9xDsX4x1AD5KwMv9AwGhJrMuTjnPSxRSwjt23S7ZXwUfhdw==",
- "dev": true,
"dependencies": {
"@aws-sdk/credential-provider-env": "3.677.0",
"@aws-sdk/credential-provider-http": "3.677.0",
@@ -2440,14 +2288,12 @@
"node_modules/@aws-sdk/credential-provider-node/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/credential-provider-process": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.677.0.tgz",
"integrity": "sha512-pBqHjIFvHBJb2NOsVqdIHWcOzXDoNXBokxTvMggb3WYML6ixwrH7kpd1CAzegeQlvZD4SCcRoy3ahv5rbuR+og==",
- "dev": true,
"dependencies": {
"@aws-sdk/core": "3.677.0",
"@aws-sdk/types": "3.667.0",
@@ -2463,14 +2309,12 @@
"node_modules/@aws-sdk/credential-provider-process/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/credential-provider-sso": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.677.0.tgz",
"integrity": "sha512-OkRP3z8yI22t9LS9At5fYr6RN7zKSDiGgeyjEnrqiGHOWGPMJN2GKa8IAFC4dgXt4Nm/EfmEW7UweiqzEKJKOA==",
- "dev": true,
"dependencies": {
"@aws-sdk/client-sso": "3.677.0",
"@aws-sdk/core": "3.677.0",
@@ -2488,14 +2332,12 @@
"node_modules/@aws-sdk/credential-provider-sso/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/credential-provider-web-identity": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.677.0.tgz",
"integrity": "sha512-yjuI6hSt1rLFqBQiNKx/nF75Ao72xR8ybqKztzebtFNCrYl8oXVkRiigg5XKNCDmelsx1lcU9IcSiuPHzlGtUQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/core": "3.677.0",
"@aws-sdk/types": "3.667.0",
@@ -2513,14 +2355,12 @@
"node_modules/@aws-sdk/credential-provider-web-identity/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/middleware-bucket-endpoint": {
"version": "3.667.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.667.0.tgz",
"integrity": "sha512-XGz4jMAkDoTyFdtLz7ZF+C05IAhCTC1PllpvTBaj821z/L0ilhbqVhrT/f2Buw8Id/K5A390csGXgusXyrFFjA==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.667.0",
"@aws-sdk/util-arn-parser": "3.568.0",
@@ -2538,7 +2378,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2550,14 +2389,12 @@
"node_modules/@aws-sdk/middleware-bucket-endpoint/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/middleware-expect-continue": {
"version": "3.667.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.667.0.tgz",
"integrity": "sha512-0TiSL9S5DSG95NHGIz6qTMuV7GDKVn8tvvGSrSSZu/wXO3JaYSH0AElVpYfc4PtPRqVpEyNA7nnc7W56mMCLWQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.667.0",
"@smithy/protocol-http": "^4.1.4",
@@ -2572,7 +2409,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2584,14 +2420,12 @@
"node_modules/@aws-sdk/middleware-expect-continue/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/middleware-flexible-checksums": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.677.0.tgz",
"integrity": "sha512-mTv3zRH+3/hW8hY0K855UrBmzW4pLb7n8MjlyL2dR8+wfXXw6DsSxGmEb4Jq13OjLTwSxyZs00JdpKtzgiIieA==",
- "dev": true,
"dependencies": {
"@aws-crypto/crc32": "5.2.0",
"@aws-crypto/crc32c": "5.2.0",
@@ -2613,7 +2447,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2625,14 +2458,12 @@
"node_modules/@aws-sdk/middleware-flexible-checksums/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/middleware-host-header": {
"version": "3.667.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.667.0.tgz",
"integrity": "sha512-Z7fIAMQnPegs7JjAQvlOeWXwpMRfegh5eCoIP6VLJIeR6DLfYKbP35JBtt98R6DXslrN2RsbTogjbxPEDQfw1w==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.667.0",
"@smithy/protocol-http": "^4.1.4",
@@ -2647,7 +2478,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2659,14 +2489,12 @@
"node_modules/@aws-sdk/middleware-host-header/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/middleware-location-constraint": {
"version": "3.667.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.667.0.tgz",
"integrity": "sha512-ob85H3HhT3/u5O+x0o557xGZ78vSNeSSwMaSitxdsfs2hOuoUl1uk+OeLpi1hkuJnL41FPpokV7TVII2XrFfmg==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.667.0",
"@smithy/types": "^3.5.0",
@@ -2679,14 +2507,12 @@
"node_modules/@aws-sdk/middleware-location-constraint/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/middleware-logger": {
"version": "3.667.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.667.0.tgz",
"integrity": "sha512-PtTRNpNm/5c746jRgZCNg4X9xEJIwggkGJrF0GP9AB1ANg4pc/sF2Fvn1NtqPe9wtQ2stunJprnm5WkCHN7QiA==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.667.0",
"@smithy/types": "^3.5.0",
@@ -2699,14 +2525,12 @@
"node_modules/@aws-sdk/middleware-logger/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/middleware-recursion-detection": {
"version": "3.667.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.667.0.tgz",
"integrity": "sha512-U5glWD3ehFohzpUpopLtmqAlDurGWo2wRGPNgi4SwhWU7UDt6LS7E/UvJjqC0CUrjlzOw+my2A+Ncf+fisMhxQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.667.0",
"@smithy/protocol-http": "^4.1.4",
@@ -2721,7 +2545,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2733,14 +2556,12 @@
"node_modules/@aws-sdk/middleware-recursion-detection/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/middleware-sdk-s3": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.677.0.tgz",
"integrity": "sha512-3U8FHgWuxhAl97HMdaFs/SJlhpb5+i//FHv0JWYm2oAPZflIRFeJn1bgVtD7ka1NY2iJjpnqX8hHJPS547MnFQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/core": "3.677.0",
"@aws-sdk/types": "3.667.0",
@@ -2765,7 +2586,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2778,7 +2598,6 @@
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-4.2.1.tgz",
"integrity": "sha512-NsV1jF4EvmO5wqmaSzlnTVetemBS3FZHdyc5CExbDljcyJCEEkJr8ANu2JvtNbVg/9MvKAWV44kTrGS+Pi4INg==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^3.0.0",
"@smithy/protocol-http": "^4.1.5",
@@ -2796,14 +2615,12 @@
"node_modules/@aws-sdk/middleware-sdk-s3/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/middleware-ssec": {
"version": "3.667.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.667.0.tgz",
"integrity": "sha512-1wuAUZIkmZIvOmGg5qNQU821CGFHhkuKioxXgNh0DpUxZ9+AeiV7yorJr+bqkb2KBFv1i1TnzGRecvKf/KvZIQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.667.0",
"@smithy/types": "^3.5.0",
@@ -2816,14 +2633,12 @@
"node_modules/@aws-sdk/middleware-ssec/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/middleware-user-agent": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.677.0.tgz",
"integrity": "sha512-A3gzUsTsvyv/JCmD0p2fkbiOyp+tpAiAADDwzi+eYeyzH4xzqnrzSkGk5KSb58uUQo27eeBzRXHd46d0u+sMrQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/core": "3.677.0",
"@aws-sdk/types": "3.667.0",
@@ -2841,7 +2656,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2853,14 +2667,12 @@
"node_modules/@aws-sdk/middleware-user-agent/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/region-config-resolver": {
"version": "3.667.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.667.0.tgz",
"integrity": "sha512-iNr+JhhA902JMKHG9IwT9YdaEx6KGl6vjAL5BRNeOjfj4cZYMog6Lz/IlfOAltMtT0w88DAHDEFrBd2uO0l2eg==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.667.0",
"@smithy/node-config-provider": "^3.1.8",
@@ -2876,14 +2688,12 @@
"node_modules/@aws-sdk/region-config-resolver/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/s3-request-presigner": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/s3-request-presigner/-/s3-request-presigner-3.677.0.tgz",
"integrity": "sha512-oudJgBX8FPFWiDt/jQ1fW+sJ+XHRMsQ2XXV1aAMCBNsopTBIfn4N8tAymLI3AVJl4UAsxToUITJb5k4NAx2UTw==",
- "dev": true,
"dependencies": {
"@aws-sdk/signature-v4-multi-region": "3.677.0",
"@aws-sdk/types": "3.667.0",
@@ -2902,7 +2712,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2914,14 +2723,12 @@
"node_modules/@aws-sdk/s3-request-presigner/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/signature-v4-multi-region": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.677.0.tgz",
"integrity": "sha512-VJvYwPnyPMBbvKDAO58t90/y2AtdRp4epax6QR0XScZEBuS777gQ3wJb1JyHLeEAEolKj/dd6jV5Iq+/lsZIIQ==",
- "dev": true,
"dependencies": {
"@aws-sdk/middleware-sdk-s3": "3.677.0",
"@aws-sdk/types": "3.667.0",
@@ -2938,7 +2745,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -2951,7 +2757,6 @@
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-4.2.1.tgz",
"integrity": "sha512-NsV1jF4EvmO5wqmaSzlnTVetemBS3FZHdyc5CExbDljcyJCEEkJr8ANu2JvtNbVg/9MvKAWV44kTrGS+Pi4INg==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^3.0.0",
"@smithy/protocol-http": "^4.1.5",
@@ -2969,14 +2774,12 @@
"node_modules/@aws-sdk/signature-v4-multi-region/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/token-providers": {
"version": "3.667.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.667.0.tgz",
"integrity": "sha512-ZecJlG8p6D4UTYlBHwOWX6nknVtw/OBJ3yPXTSajBjhUlj9lE2xvejI8gl4rqkyLXk7z3bki+KR4tATbMaM9yg==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.667.0",
"@smithy/property-provider": "^3.1.7",
@@ -2994,14 +2797,12 @@
"node_modules/@aws-sdk/token-providers/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/types": {
"version": "3.667.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz",
"integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.5.0",
"tslib": "^2.6.2"
@@ -3013,14 +2814,12 @@
"node_modules/@aws-sdk/types/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/util-arn-parser": {
"version": "3.568.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.568.0.tgz",
"integrity": "sha512-XUKJWWo+KOB7fbnPP0+g/o5Ulku/X53t7i/h+sPHr5xxYTJJ9CYnbToo95mzxe7xWvkLrsNtJ8L+MnNn9INs2w==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -3031,14 +2830,12 @@
"node_modules/@aws-sdk/util-arn-parser/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/util-endpoints": {
"version": "3.667.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.667.0.tgz",
"integrity": "sha512-X22SYDAuQJWnkF1/q17pkX3nGw5XMD9YEUbmt87vUnRq7iyJ3JOpl6UKOBeUBaL838wA5yzdbinmCITJ/VZ1QA==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.667.0",
"@smithy/types": "^3.5.0",
@@ -3052,14 +2849,12 @@
"node_modules/@aws-sdk/util-endpoints/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/util-format-url": {
"version": "3.667.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/util-format-url/-/util-format-url-3.667.0.tgz",
"integrity": "sha512-S0D731SnEPnTfbJ/Dldw5dDrOc8uipK6NLXHDs2xIq0t61iwZLMEiN8yWCs2wAZVVJKpldUM1THLaaufU9SSSA==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.667.0",
"@smithy/querystring-builder": "^3.0.7",
@@ -3073,14 +2868,12 @@
"node_modules/@aws-sdk/util-format-url/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/util-locate-window": {
"version": "3.568.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.568.0.tgz",
"integrity": "sha512-3nh4TINkXYr+H41QaPelCceEB2FXP3fxp93YZXB/kqJvX0U9j0N0Uk45gvsjmEPzG8XxkPEeLIfT2I1M7A6Lig==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -3091,14 +2884,12 @@
"node_modules/@aws-sdk/util-locate-window/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/util-user-agent-browser": {
"version": "3.675.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.675.0.tgz",
"integrity": "sha512-HW4vGfRiX54RLcsYjLuAhcBBJ6lRVEZd7njfGpAwBB9s7BH8t48vrpYbyA5XbbqbTvXfYBnugQCUw9HWjEa1ww==",
- "dev": true,
"dependencies": {
"@aws-sdk/types": "3.667.0",
"@smithy/types": "^3.5.0",
@@ -3109,14 +2900,12 @@
"node_modules/@aws-sdk/util-user-agent-browser/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/util-user-agent-node": {
"version": "3.677.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.677.0.tgz",
"integrity": "sha512-gFhL0zVY/um0Eu2aWil82pjWaZL4yBmOnjz0+RDz18okFBHaz1Om8o/H+1Vvj+xsnuDYV4ezVMyAaXVtTcYOnw==",
- "dev": true,
"dependencies": {
"@aws-sdk/middleware-user-agent": "3.677.0",
"@aws-sdk/types": "3.667.0",
@@ -3139,14 +2928,12 @@
"node_modules/@aws-sdk/util-user-agent-node/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/util-utf8-browser": {
"version": "3.259.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz",
"integrity": "sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw==",
- "dev": true,
"dependencies": {
"tslib": "^2.3.1"
}
@@ -3154,14 +2941,12 @@
"node_modules/@aws-sdk/util-utf8-browser/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@aws-sdk/xml-builder": {
"version": "3.662.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.662.0.tgz",
"integrity": "sha512-ikLkXn0igUpnJu2mCZjklvmcDGWT9OaLRv3JyC/cRkTaaSrblPjPM7KKsltxdMTLQ+v7fjCN0TsJpxphMfaOPA==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.5.0",
"tslib": "^2.6.2"
@@ -3173,14 +2958,12 @@
"node_modules/@aws-sdk/xml-builder/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@babel/runtime": {
"version": "7.25.9",
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.9.tgz",
"integrity": "sha512-4zpTHZ9Cm6L9L+uIqghQX8ZXg8HKFcjYO3qHoO8zTmRm6HQUJ8SSJ+KRvbMBZn0EGVlT4DRYeQ/6hjlyXBh+Kg==",
- "dev": true,
"dependencies": {
"regenerator-runtime": "^0.14.0"
},
@@ -3192,7 +2975,6 @@
"version": "4.9.2",
"resolved": "https://registry.npmjs.org/@honeybadger-io/core/-/core-4.9.2.tgz",
"integrity": "sha512-l1dabSseq+qDAspHhUg4N9UjEhbu1W7ESJRXyyrWxuUxcPXDrk24ssSgAVowG/NLlCfUMLQ1iWYKIYY6f+25sw==",
- "dev": true,
"dependencies": {
"stacktrace-parser": "^0.1.10"
}
@@ -3201,7 +2983,6 @@
"version": "4.9.3",
"resolved": "https://registry.npmjs.org/@honeybadger-io/js/-/js-4.9.3.tgz",
"integrity": "sha512-pk76zGxEWLj0I3h/ujzrWAkJJ5rq3YMJwjk4Bs+5ToJMD5Lwpb7B5dW56ivVCfffDPkFXDpXvhVJ8BcycrwRcA==",
- "dev": true,
"dependencies": {
"@honeybadger-io/core": "^4.9.2",
"@types/aws-lambda": "^8.10.89",
@@ -3212,7 +2993,6 @@
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@iiif/parser/-/parser-1.1.2.tgz",
"integrity": "sha512-yjbhSWBB+cWHjAgeWlMYgNydMxDGU1BO3JnmgxCclMcfi59JDsKHMXpgZpCNw+svcirBtIMD2u70KPFinr2pUA==",
- "dev": true,
"dependencies": {
"@iiif/presentation-2": "^1.0.4",
"@iiif/presentation-3": "^1.1.3",
@@ -3223,7 +3003,6 @@
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@iiif/presentation-2/-/presentation-2-1.0.4.tgz",
"integrity": "sha512-hJakpq62VBajesLJrYPtFm6hcn6c/HkKP7CmKZ5atuzu40m0nifWYsqigR1l9sZGvhhHb/DRshPmiW/0GNrJoA==",
- "dev": true,
"peerDependencies": {
"@iiif/presentation-3": "*"
}
@@ -3232,7 +3011,6 @@
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/@iiif/presentation-3/-/presentation-3-1.1.3.tgz",
"integrity": "sha512-Ek+25nkQouo0pXAqCsWYbAeS4jLDEBQA7iul2jzgnvoJrucxDQN2lXyNLgOUDRqpTdSqJ69iz5lm6DLaxil+Nw==",
- "dev": true,
"dependencies": {
"@types/geojson": "^7946.0.7"
}
@@ -3241,7 +3019,6 @@
"version": "0.9.22",
"resolved": "https://registry.npmjs.org/@iiif/vault/-/vault-0.9.22.tgz",
"integrity": "sha512-HaFX1u9TSZha0i/esZR5sZzydZgjZgITeO0JrT1qXm+qSaB1Oc0PRNzatXW48Xa0q3PPYbBB71zCL1/D1i1i1A==",
- "dev": true,
"dependencies": {
"@iiif/parser": "^1.1.2",
"@iiif/presentation-2": "1.*",
@@ -3257,7 +3034,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-2.2.0.tgz",
"integrity": "sha512-wRlta7GuLWpTqtFfGo+nZyOO1vEvewdNR1R4rTxpC8XU6vG/NDyrFBhwLZsqg1NUoR1noVaXJPC/7ZK47QCySw==",
- "dev": true,
"dependencies": {
"@smithy/types": "^2.12.0",
"tslib": "^2.6.2"
@@ -3270,7 +3046,6 @@
"version": "2.12.0",
"resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz",
"integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -3281,14 +3056,12 @@
"node_modules/@smithy/abort-controller/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/chunked-blob-reader": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@smithy/chunked-blob-reader/-/chunked-blob-reader-4.0.0.tgz",
"integrity": "sha512-jSqRnZvkT4egkq/7b6/QRCNXmmYVcHwnJldqJ3IhVpQE2atObVJ137xmGeuGFhjFUr8gCEVAOKwSY79OvpbDaQ==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
}
@@ -3297,7 +3070,6 @@
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/@smithy/chunked-blob-reader-native/-/chunked-blob-reader-native-3.0.1.tgz",
"integrity": "sha512-VEYtPvh5rs/xlyqpm5NRnfYLZn+q0SRPELbvBV+C/G7IQ+ouTuo+NKKa3ShG5OaFR8NYVMXls9hPYLTvIKKDrQ==",
- "dev": true,
"dependencies": {
"@smithy/util-base64": "^3.0.0",
"tslib": "^2.6.2"
@@ -3306,20 +3078,17 @@
"node_modules/@smithy/chunked-blob-reader-native/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/chunked-blob-reader/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/config-resolver": {
"version": "3.0.10",
"resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-3.0.10.tgz",
"integrity": "sha512-Uh0Sz9gdUuz538nvkPiyv1DZRX9+D15EKDtnQP5rYVAzM/dnYk3P8cg73jcxyOitPgT3mE3OVj7ky7sibzHWkw==",
- "dev": true,
"dependencies": {
"@smithy/node-config-provider": "^3.1.9",
"@smithy/types": "^3.6.0",
@@ -3334,14 +3103,12 @@
"node_modules/@smithy/config-resolver/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/core": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@smithy/core/-/core-2.5.1.tgz",
"integrity": "sha512-DujtuDA7BGEKExJ05W5OdxCoyekcKT3Rhg1ZGeiUWaz2BJIWXjZmsG/DIP4W48GHno7AQwRsaCb8NcBgH3QZpg==",
- "dev": true,
"dependencies": {
"@smithy/middleware-serde": "^3.0.8",
"@smithy/protocol-http": "^4.1.5",
@@ -3360,7 +3127,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -3372,14 +3138,12 @@
"node_modules/@smithy/core/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/credential-provider-imds": {
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-3.2.5.tgz",
"integrity": "sha512-4FTQGAsuwqTzVMmiRVTn0RR9GrbRfkP0wfu/tXWVHd2LgNpTY0uglQpIScXK4NaEyXbB3JmZt8gfVqO50lP8wg==",
- "dev": true,
"dependencies": {
"@smithy/node-config-provider": "^3.1.9",
"@smithy/property-provider": "^3.1.8",
@@ -3394,14 +3158,12 @@
"node_modules/@smithy/credential-provider-imds/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/eventstream-codec": {
"version": "3.1.7",
"resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-3.1.7.tgz",
"integrity": "sha512-kVSXScIiRN7q+s1x7BrQtZ1Aa9hvvP9FeCqCdBxv37GimIHgBCOnZ5Ip80HLt0DhnAKpiobFdGqTFgbaJNrazA==",
- "dev": true,
"dependencies": {
"@aws-crypto/crc32": "5.2.0",
"@smithy/types": "^3.6.0",
@@ -3412,14 +3174,12 @@
"node_modules/@smithy/eventstream-codec/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/eventstream-serde-browser": {
"version": "3.0.11",
"resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-3.0.11.tgz",
"integrity": "sha512-Pd1Wnq3CQ/v2SxRifDUihvpXzirJYbbtXfEnnLV/z0OGCTx/btVX74P86IgrZkjOydOASBGXdPpupYQI+iO/6A==",
- "dev": true,
"dependencies": {
"@smithy/eventstream-serde-universal": "^3.0.10",
"@smithy/types": "^3.6.0",
@@ -3432,14 +3192,12 @@
"node_modules/@smithy/eventstream-serde-browser/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/eventstream-serde-config-resolver": {
"version": "3.0.8",
"resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-3.0.8.tgz",
"integrity": "sha512-zkFIG2i1BLbfoGQnf1qEeMqX0h5qAznzaZmMVNnvPZz9J5AWBPkOMckZWPedGUPcVITacwIdQXoPcdIQq5FRcg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -3451,14 +3209,12 @@
"node_modules/@smithy/eventstream-serde-config-resolver/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/eventstream-serde-node": {
"version": "3.0.10",
"resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-3.0.10.tgz",
"integrity": "sha512-hjpU1tIsJ9qpcoZq9zGHBJPBOeBGYt+n8vfhDwnITPhEre6APrvqq/y3XMDEGUT2cWQ4ramNqBPRbx3qn55rhw==",
- "dev": true,
"dependencies": {
"@smithy/eventstream-serde-universal": "^3.0.10",
"@smithy/types": "^3.6.0",
@@ -3471,14 +3227,12 @@
"node_modules/@smithy/eventstream-serde-node/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/eventstream-serde-universal": {
"version": "3.0.10",
"resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-3.0.10.tgz",
"integrity": "sha512-ewG1GHbbqsFZ4asaq40KmxCmXO+AFSM1b+DcO2C03dyJj/ZH71CiTg853FSE/3SHK9q3jiYQIFjlGSwfxQ9kww==",
- "dev": true,
"dependencies": {
"@smithy/eventstream-codec": "^3.1.7",
"@smithy/types": "^3.6.0",
@@ -3491,14 +3245,12 @@
"node_modules/@smithy/eventstream-serde-universal/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/fetch-http-handler": {
"version": "3.2.9",
"resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-3.2.9.tgz",
"integrity": "sha512-hYNVQOqhFQ6vOpenifFME546f0GfJn2OiQ3M0FDmuUu8V/Uiwy2wej7ZXxFBNqdx0R5DZAqWM1l6VRhGz8oE6A==",
- "dev": true,
"dependencies": {
"@smithy/protocol-http": "^4.1.4",
"@smithy/querystring-builder": "^3.0.7",
@@ -3511,7 +3263,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -3523,14 +3274,12 @@
"node_modules/@smithy/fetch-http-handler/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/hash-blob-browser": {
"version": "3.1.7",
"resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-3.1.7.tgz",
"integrity": "sha512-4yNlxVNJifPM5ThaA5HKnHkn7JhctFUHvcaz6YXxHlYOSIrzI6VKQPTN8Gs1iN5nqq9iFcwIR9THqchUCouIfg==",
- "dev": true,
"dependencies": {
"@smithy/chunked-blob-reader": "^4.0.0",
"@smithy/chunked-blob-reader-native": "^3.0.1",
@@ -3541,14 +3290,12 @@
"node_modules/@smithy/hash-blob-browser/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/hash-node": {
"version": "3.0.8",
"resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-3.0.8.tgz",
"integrity": "sha512-tlNQYbfpWXHimHqrvgo14DrMAgUBua/cNoz9fMYcDmYej7MAmUcjav/QKQbFc3NrcPxeJ7QClER4tWZmfwoPng==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"@smithy/util-buffer-from": "^3.0.0",
@@ -3562,14 +3309,12 @@
"node_modules/@smithy/hash-node/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/hash-stream-node": {
"version": "3.1.7",
"resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-3.1.7.tgz",
"integrity": "sha512-xMAsvJ3hLG63lsBVi1Hl6BBSfhd8/Qnp8fC06kjOpJvyyCEXdwHITa5Kvdsk6gaAXLhbZMhQMIGvgUbfnJDP6Q==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"@smithy/util-utf8": "^3.0.0",
@@ -3582,14 +3327,12 @@
"node_modules/@smithy/hash-stream-node/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/invalid-dependency": {
"version": "3.0.8",
"resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-3.0.8.tgz",
"integrity": "sha512-7Qynk6NWtTQhnGTTZwks++nJhQ1O54Mzi7fz4PqZOiYXb4Z1Flpb2yRvdALoggTS8xjtohWUM+RygOtB30YL3Q==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -3598,14 +3341,12 @@
"node_modules/@smithy/invalid-dependency/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/is-array-buffer": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-3.0.0.tgz",
"integrity": "sha512-+Fsu6Q6C4RSJiy81Y8eApjEB5gVtM+oFKTffg+jSuwtvomJJrhUJBu2zS8wjXSgH/g1MKEWrzyChTBe6clb5FQ==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -3616,14 +3357,12 @@
"node_modules/@smithy/is-array-buffer/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/md5-js": {
"version": "3.0.8",
"resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-3.0.8.tgz",
"integrity": "sha512-LwApfTK0OJ/tCyNUXqnWCKoE2b4rDSr4BJlDAVCkiWYeHESr+y+d5zlAanuLW6fnitVJRD/7d9/kN/ZM9Su4mA==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"@smithy/util-utf8": "^3.0.0",
@@ -3633,14 +3372,12 @@
"node_modules/@smithy/md5-js/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/middleware-content-length": {
"version": "3.0.10",
"resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-3.0.10.tgz",
"integrity": "sha512-T4dIdCs1d/+/qMpwhJ1DzOhxCZjZHbHazEPJWdB4GDi2HjIZllVzeBEcdJUN0fomV8DURsgOyrbEUzg3vzTaOg==",
- "dev": true,
"dependencies": {
"@smithy/protocol-http": "^4.1.5",
"@smithy/types": "^3.6.0",
@@ -3654,7 +3391,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -3666,14 +3402,12 @@
"node_modules/@smithy/middleware-content-length/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/middleware-endpoint": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-3.2.1.tgz",
"integrity": "sha512-wWO3xYmFm6WRW8VsEJ5oU6h7aosFXfszlz3Dj176pTij6o21oZnzkCLzShfmRaaCHDkBXWBdO0c4sQAvLFP6zA==",
- "dev": true,
"dependencies": {
"@smithy/core": "^2.5.1",
"@smithy/middleware-serde": "^3.0.8",
@@ -3691,14 +3425,12 @@
"node_modules/@smithy/middleware-endpoint/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/middleware-retry": {
"version": "3.0.25",
"resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-3.0.25.tgz",
"integrity": "sha512-m1F70cPaMBML4HiTgCw5I+jFNtjgz5z5UdGnUbG37vw6kh4UvizFYjqJGHvicfgKMkDL6mXwyPp5mhZg02g5sg==",
- "dev": true,
"dependencies": {
"@smithy/node-config-provider": "^3.1.9",
"@smithy/protocol-http": "^4.1.5",
@@ -3718,7 +3450,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -3730,14 +3461,12 @@
"node_modules/@smithy/middleware-retry/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/middleware-serde": {
"version": "3.0.8",
"resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-3.0.8.tgz",
"integrity": "sha512-Xg2jK9Wc/1g/MBMP/EUn2DLspN8LNt+GMe7cgF+Ty3vl+Zvu+VeZU5nmhveU+H8pxyTsjrAkci8NqY6OuvZnjA==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -3749,14 +3478,12 @@
"node_modules/@smithy/middleware-serde/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/middleware-stack": {
"version": "3.0.8",
"resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-3.0.8.tgz",
"integrity": "sha512-d7ZuwvYgp1+3682Nx0MD3D/HtkmZd49N3JUndYWQXfRZrYEnCWYc8BHcNmVsPAp9gKvlurdg/mubE6b/rPS9MA==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -3768,14 +3495,12 @@
"node_modules/@smithy/middleware-stack/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/node-config-provider": {
"version": "3.1.9",
"resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz",
"integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==",
- "dev": true,
"dependencies": {
"@smithy/property-provider": "^3.1.8",
"@smithy/shared-ini-file-loader": "^3.1.9",
@@ -3789,14 +3514,12 @@
"node_modules/@smithy/node-config-provider/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/node-http-handler": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-2.5.0.tgz",
"integrity": "sha512-mVGyPBzkkGQsPoxQUbxlEfRjrj6FPyA3u3u2VXGr9hT8wilsoQdZdvKpMBFMB8Crfhv5dNkKHIW0Yyuc7eABqA==",
- "dev": true,
"dependencies": {
"@smithy/abort-controller": "^2.2.0",
"@smithy/protocol-http": "^3.3.0",
@@ -3812,7 +3535,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-2.2.0.tgz",
"integrity": "sha512-L1kSeviUWL+emq3CUVSgdogoM/D9QMFaqxL/dd0X7PCNWmPXqt+ExtrBjqT0V7HLN03Vs9SuiLrG3zy3JGnE5A==",
- "dev": true,
"dependencies": {
"@smithy/types": "^2.12.0",
"@smithy/util-uri-escape": "^2.2.0",
@@ -3826,7 +3548,6 @@
"version": "2.12.0",
"resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz",
"integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -3838,7 +3559,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-2.2.0.tgz",
"integrity": "sha512-jtmJMyt1xMD/d8OtbVJ2gFZOSKc+ueYJZPW20ULW1GOp/q/YIM0wNh+u8ZFao9UaIGz4WoPW8hC64qlWLIfoDA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -3849,14 +3569,12 @@
"node_modules/@smithy/node-http-handler/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/property-provider": {
"version": "3.1.8",
"resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-3.1.8.tgz",
"integrity": "sha512-ukNUyo6rHmusG64lmkjFeXemwYuKge1BJ8CtpVKmrxQxc6rhUX0vebcptFA9MmrGsnLhwnnqeH83VTU9hwOpjA==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -3868,14 +3586,12 @@
"node_modules/@smithy/property-provider/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/protocol-http": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-3.3.0.tgz",
"integrity": "sha512-Xy5XK1AFWW2nlY/biWZXu6/krgbaf2dg0q492D8M5qthsnU2H+UgFeZLbM76FnH7s6RO/xhQRkj+T6KBO3JzgQ==",
- "dev": true,
"dependencies": {
"@smithy/types": "^2.12.0",
"tslib": "^2.6.2"
@@ -3888,7 +3604,6 @@
"version": "2.12.0",
"resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz",
"integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -3899,14 +3614,12 @@
"node_modules/@smithy/protocol-http/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/querystring-builder": {
"version": "3.0.8",
"resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-3.0.8.tgz",
"integrity": "sha512-btYxGVqFUARbUrN6VhL9c3dnSviIwBYD9Rz1jHuN1hgh28Fpv2xjU1HeCeDJX68xctz7r4l1PBnFhGg1WBBPuA==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"@smithy/util-uri-escape": "^3.0.0",
@@ -3919,14 +3632,12 @@
"node_modules/@smithy/querystring-builder/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/querystring-parser": {
"version": "3.0.8",
"resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-3.0.8.tgz",
"integrity": "sha512-BtEk3FG7Ks64GAbt+JnKqwuobJNX8VmFLBsKIwWr1D60T426fGrV2L3YS5siOcUhhp6/Y6yhBw1PSPxA5p7qGg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -3938,14 +3649,12 @@
"node_modules/@smithy/querystring-parser/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/service-error-classification": {
"version": "3.0.8",
"resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-3.0.8.tgz",
"integrity": "sha512-uEC/kCCFto83bz5ZzapcrgGqHOh/0r69sZ2ZuHlgoD5kYgXJEThCoTuw/y1Ub3cE7aaKdznb+jD9xRPIfIwD7g==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0"
},
@@ -3957,7 +3666,6 @@
"version": "3.1.9",
"resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz",
"integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -3969,14 +3677,12 @@
"node_modules/@smithy/shared-ini-file-loader/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/signature-v4": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-2.3.0.tgz",
"integrity": "sha512-ui/NlpILU+6HAQBfJX8BBsDXuKSNrjTSuOYArRblcrErwKFutjrCNb/OExfVRyj9+26F9J+ZmfWT+fKWuDrH3Q==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^2.2.0",
"@smithy/types": "^2.12.0",
@@ -3994,7 +3700,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz",
"integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -4006,7 +3711,6 @@
"version": "2.12.0",
"resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz",
"integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -4018,7 +3722,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz",
"integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^2.2.0",
"tslib": "^2.6.2"
@@ -4031,7 +3734,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-2.2.0.tgz",
"integrity": "sha512-7iKXR+/4TpLK194pVjKiasIyqMtTYJsgKgM242Y9uzt5dhHnUDvMNb+3xIhRJ9QhvqGii/5cRUt4fJn3dtXNHQ==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -4043,7 +3745,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-2.2.0.tgz",
"integrity": "sha512-L1qpleXf9QD6LwLCJ5jddGkgWyuSvWBkJwWAZ6kFkdifdso+sk3L3O1HdmPvCdnCK3IS4qWyPxev01QMnfHSBw==",
- "dev": true,
"dependencies": {
"@smithy/types": "^2.12.0",
"tslib": "^2.6.2"
@@ -4056,7 +3757,6 @@
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-2.2.0.tgz",
"integrity": "sha512-jtmJMyt1xMD/d8OtbVJ2gFZOSKc+ueYJZPW20ULW1GOp/q/YIM0wNh+u8ZFao9UaIGz4WoPW8hC64qlWLIfoDA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -4068,7 +3768,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
@@ -4080,14 +3779,12 @@
"node_modules/@smithy/signature-v4/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/smithy-client": {
"version": "3.4.2",
"resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-3.4.2.tgz",
"integrity": "sha512-dxw1BDxJiY9/zI3cBqfVrInij6ShjpV4fmGHesGZZUiP9OSE/EVfdwdRz0PgvkEvrZHpsj2htRaHJfftE8giBA==",
- "dev": true,
"dependencies": {
"@smithy/core": "^2.5.1",
"@smithy/middleware-endpoint": "^3.2.1",
@@ -4105,7 +3802,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -4117,14 +3813,12 @@
"node_modules/@smithy/smithy-client/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/types": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.6.0.tgz",
"integrity": "sha512-8VXK/KzOHefoC65yRgCn5vG1cysPJjHnOVt9d0ybFQSmJgQj152vMn4EkYhGuaOmnnZvCPav/KnYyE6/KsNZ2w==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -4135,14 +3829,12 @@
"node_modules/@smithy/types/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/url-parser": {
"version": "3.0.8",
"resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-3.0.8.tgz",
"integrity": "sha512-4FdOhwpTW7jtSFWm7SpfLGKIBC9ZaTKG5nBF0wK24aoQKQyDIKUw3+KFWCQ9maMzrgTJIuOvOnsV2lLGW5XjTg==",
- "dev": true,
"dependencies": {
"@smithy/querystring-parser": "^3.0.8",
"@smithy/types": "^3.6.0",
@@ -4152,14 +3844,12 @@
"node_modules/@smithy/url-parser/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-base64": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-3.0.0.tgz",
"integrity": "sha512-Kxvoh5Qtt0CDsfajiZOCpJxgtPHXOKwmM+Zy4waD43UoEMA+qPxxa98aE/7ZhdnBFZFXMOiBR5xbcaMhLtznQQ==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^3.0.0",
"@smithy/util-utf8": "^3.0.0",
@@ -4172,14 +3862,12 @@
"node_modules/@smithy/util-base64/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-body-length-browser": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-3.0.0.tgz",
"integrity": "sha512-cbjJs2A1mLYmqmyVl80uoLTJhAcfzMOyPgjwAYusWKMdLeNtzmMz9YxNl3/jRLoxSS3wkqkf0jwNdtXWtyEBaQ==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
}
@@ -4187,14 +3875,12 @@
"node_modules/@smithy/util-body-length-browser/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-body-length-node": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-3.0.0.tgz",
"integrity": "sha512-Tj7pZ4bUloNUP6PzwhN7K386tmSmEET9QtQg0TgdNOnxhZvCssHji+oZTUIuzxECRfG8rdm2PMw2WCFs6eIYkA==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -4205,14 +3891,12 @@
"node_modules/@smithy/util-body-length-node/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-buffer-from": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-3.0.0.tgz",
"integrity": "sha512-aEOHCgq5RWFbP+UDPvPot26EJHjOC+bRgse5A8V3FSShqd5E5UN4qc7zkwsvJPPAVsf73QwYcHN1/gt/rtLwQA==",
- "dev": true,
"dependencies": {
"@smithy/is-array-buffer": "^3.0.0",
"tslib": "^2.6.2"
@@ -4224,14 +3908,12 @@
"node_modules/@smithy/util-buffer-from/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-config-provider": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-3.0.0.tgz",
"integrity": "sha512-pbjk4s0fwq3Di/ANL+rCvJMKM5bzAQdE5S/6RL5NXgMExFAi6UgQMPOm5yPaIWPpr+EOXKXRonJ3FoxKf4mCJQ==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -4242,14 +3924,12 @@
"node_modules/@smithy/util-config-provider/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-defaults-mode-browser": {
"version": "3.0.25",
"resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-3.0.25.tgz",
"integrity": "sha512-fRw7zymjIDt6XxIsLwfJfYUfbGoO9CmCJk6rjJ/X5cd20+d2Is7xjU5Kt/AiDt6hX8DAf5dztmfP5O82gR9emA==",
- "dev": true,
"dependencies": {
"@smithy/property-provider": "^3.1.8",
"@smithy/smithy-client": "^3.4.2",
@@ -4264,14 +3944,12 @@
"node_modules/@smithy/util-defaults-mode-browser/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-defaults-mode-node": {
"version": "3.0.25",
"resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-3.0.25.tgz",
"integrity": "sha512-H3BSZdBDiVZGzt8TG51Pd2FvFO0PAx/A0mJ0EH8a13KJ6iUCdYnw/Dk/MdC1kTd0eUuUGisDFaxXVXo4HHFL1g==",
- "dev": true,
"dependencies": {
"@smithy/config-resolver": "^3.0.10",
"@smithy/credential-provider-imds": "^3.2.5",
@@ -4288,14 +3966,12 @@
"node_modules/@smithy/util-defaults-mode-node/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-endpoints": {
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-2.1.4.tgz",
"integrity": "sha512-kPt8j4emm7rdMWQyL0F89o92q10gvCUa6sBkBtDJ7nV2+P7wpXczzOfoDJ49CKXe5CCqb8dc1W+ZdLlrKzSAnQ==",
- "dev": true,
"dependencies": {
"@smithy/node-config-provider": "^3.1.9",
"@smithy/types": "^3.6.0",
@@ -4308,14 +3984,12 @@
"node_modules/@smithy/util-endpoints/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-hex-encoding": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-3.0.0.tgz",
"integrity": "sha512-eFndh1WEK5YMUYvy3lPlVmYY/fZcQE1D8oSf41Id2vCeIkKJXPcYDCZD+4+xViI6b1XSd7tE+s5AmXzz5ilabQ==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -4326,14 +4000,12 @@
"node_modules/@smithy/util-hex-encoding/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-middleware": {
"version": "3.0.8",
"resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-3.0.8.tgz",
"integrity": "sha512-p7iYAPaQjoeM+AKABpYWeDdtwQNxasr4aXQEA/OmbOaug9V0odRVDy3Wx4ci8soljE/JXQo+abV0qZpW8NX0yA==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -4345,14 +4017,12 @@
"node_modules/@smithy/util-middleware/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-retry": {
"version": "3.0.8",
"resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-3.0.8.tgz",
"integrity": "sha512-TCEhLnY581YJ+g1x0hapPz13JFqzmh/pMWL2KEFASC51qCfw3+Y47MrTmea4bUE5vsdxQ4F6/KFbUeSz22Q1ow==",
- "dev": true,
"dependencies": {
"@smithy/service-error-classification": "^3.0.8",
"@smithy/types": "^3.6.0",
@@ -4365,14 +4035,12 @@
"node_modules/@smithy/util-retry/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-stream": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-3.2.1.tgz",
"integrity": "sha512-R3ufuzJRxSJbE58K9AEnL/uSZyVdHzud9wLS8tIbXclxKzoe09CRohj2xV8wpx5tj7ZbiJaKYcutMm1eYgz/0A==",
- "dev": true,
"dependencies": {
"@smithy/fetch-http-handler": "^4.0.0",
"@smithy/node-http-handler": "^3.2.5",
@@ -4391,7 +4059,6 @@
"version": "3.1.6",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.6.tgz",
"integrity": "sha512-0XuhuHQlEqbNQZp7QxxrFTdVWdwxch4vjxYgfInF91hZFkPxf9QDrdQka0KfxFMPqLNzSw0b95uGTrLliQUavQ==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -4404,7 +4071,6 @@
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-4.0.0.tgz",
"integrity": "sha512-MLb1f5tbBO2X6K4lMEKJvxeLooyg7guq48C2zKr4qM7F2Gpkz4dc+hdSgu77pCJ76jVqFBjZczHYAs6dp15N+g==",
- "dev": true,
"dependencies": {
"@smithy/protocol-http": "^4.1.5",
"@smithy/querystring-builder": "^3.0.8",
@@ -4417,7 +4083,6 @@
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-3.2.5.tgz",
"integrity": "sha512-PkOwPNeKdvX/jCpn0A8n9/TyoxjGZB8WVoJmm9YzsnAgggTj4CrjpRHlTQw7dlLZ320n1mY1y+nTRUDViKi/3w==",
- "dev": true,
"dependencies": {
"@smithy/abort-controller": "^3.1.6",
"@smithy/protocol-http": "^4.1.5",
@@ -4433,7 +4098,6 @@
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz",
"integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -4445,14 +4109,12 @@
"node_modules/@smithy/util-stream/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-uri-escape": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-3.0.0.tgz",
"integrity": "sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg==",
- "dev": true,
"dependencies": {
"tslib": "^2.6.2"
},
@@ -4463,14 +4125,12 @@
"node_modules/@smithy/util-uri-escape/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-utf8": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz",
"integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==",
- "dev": true,
"dependencies": {
"@smithy/util-buffer-from": "^3.0.0",
"tslib": "^2.6.2"
@@ -4482,14 +4142,12 @@
"node_modules/@smithy/util-utf8/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@smithy/util-waiter": {
"version": "3.1.7",
"resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-3.1.7.tgz",
"integrity": "sha512-d5yGlQtmN/z5eoTtIYgkvOw27US2Ous4VycnXatyoImIF9tzlcpnKqQ/V7qhvJmb2p6xZne1NopCLakdTnkBBQ==",
- "dev": true,
"dependencies": {
"@smithy/abort-controller": "^3.1.6",
"@smithy/types": "^3.6.0",
@@ -4503,7 +4161,6 @@
"version": "3.1.6",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.6.tgz",
"integrity": "sha512-0XuhuHQlEqbNQZp7QxxrFTdVWdwxch4vjxYgfInF91hZFkPxf9QDrdQka0KfxFMPqLNzSw0b95uGTrLliQUavQ==",
- "dev": true,
"dependencies": {
"@smithy/types": "^3.6.0",
"tslib": "^2.6.2"
@@ -4515,20 +4172,17 @@
"node_modules/@smithy/util-waiter/node_modules/tslib": {
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz",
- "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==",
- "dev": true
+ "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA=="
},
"node_modules/@types/aws-lambda": {
"version": "8.10.145",
"resolved": "https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.145.tgz",
- "integrity": "sha512-dtByW6WiFk5W5Jfgz1VM+YPA21xMXTuSFoLYIDY0L44jDLLflVPtZkYuu3/YxpGcvjzKFBZLU+GyKjR0HOYtyw==",
- "dev": true
+ "integrity": "sha512-dtByW6WiFk5W5Jfgz1VM+YPA21xMXTuSFoLYIDY0L44jDLLflVPtZkYuu3/YxpGcvjzKFBZLU+GyKjR0HOYtyw=="
},
"node_modules/@types/body-parser": {
"version": "1.19.5",
"resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz",
"integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==",
- "dev": true,
"dependencies": {
"@types/connect": "*",
"@types/node": "*"
@@ -4538,7 +4192,6 @@
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz",
"integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==",
- "dev": true,
"dependencies": {
"@types/node": "*"
}
@@ -4547,7 +4200,6 @@
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz",
"integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==",
- "dev": true,
"dependencies": {
"@types/body-parser": "*",
"@types/express-serve-static-core": "^4.17.33",
@@ -4559,7 +4211,6 @@
"version": "4.19.6",
"resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.6.tgz",
"integrity": "sha512-N4LZ2xG7DatVqhCZzOGb1Yi5lMbXSZcmdLDe9EzSndPV2HpWYWzRbaerl2n27irrm94EPpprqa8KpskPT085+A==",
- "dev": true,
"dependencies": {
"@types/node": "*",
"@types/qs": "*",
@@ -4570,26 +4221,22 @@
"node_modules/@types/geojson": {
"version": "7946.0.14",
"resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.14.tgz",
- "integrity": "sha512-WCfD5Ht3ZesJUsONdhvm84dmzWOiOzOAqOncN0++w0lBw1o8OuDNJF2McvvCef/yBqb/HYRahp1BYtODFQ8bRg==",
- "dev": true
+ "integrity": "sha512-WCfD5Ht3ZesJUsONdhvm84dmzWOiOzOAqOncN0++w0lBw1o8OuDNJF2McvvCef/yBqb/HYRahp1BYtODFQ8bRg=="
},
"node_modules/@types/http-errors": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz",
- "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==",
- "dev": true
+ "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA=="
},
"node_modules/@types/mime": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz",
- "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==",
- "dev": true
+ "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w=="
},
"node_modules/@types/node": {
"version": "22.7.9",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.9.tgz",
"integrity": "sha512-jrTfRC7FM6nChvU7X2KqcrgquofrWLFDeYC1hKfwNWomVvrn7JIksqf344WN2X/y8xrgqBd2dJATZV4GbatBfg==",
- "dev": true,
"dependencies": {
"undici-types": "~6.19.2"
}
@@ -4597,20 +4244,17 @@
"node_modules/@types/qs": {
"version": "6.9.16",
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.16.tgz",
- "integrity": "sha512-7i+zxXdPD0T4cKDuxCUXJ4wHcsJLwENa6Z3dCu8cfCK743OGy5Nu1RmAGqDPsoTDINVEcdXKRvR/zre+P2Ku1A==",
- "dev": true
+ "integrity": "sha512-7i+zxXdPD0T4cKDuxCUXJ4wHcsJLwENa6Z3dCu8cfCK743OGy5Nu1RmAGqDPsoTDINVEcdXKRvR/zre+P2Ku1A=="
},
"node_modules/@types/range-parser": {
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz",
- "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==",
- "dev": true
+ "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ=="
},
"node_modules/@types/send": {
"version": "0.17.4",
"resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz",
"integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==",
- "dev": true,
"dependencies": {
"@types/mime": "^1",
"@types/node": "*"
@@ -4620,7 +4264,6 @@
"version": "1.15.7",
"resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz",
"integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==",
- "dev": true,
"dependencies": {
"@types/http-errors": "*",
"@types/node": "*",
@@ -4630,20 +4273,17 @@
"node_modules/@types/uuid": {
"version": "9.0.8",
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz",
- "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==",
- "dev": true
+ "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA=="
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
- "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
- "dev": true
+ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/axios": {
"version": "1.7.7",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.7.tgz",
"integrity": "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==",
- "dev": true,
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
@@ -4653,20 +4293,17 @@
"node_modules/bowser": {
"version": "2.11.0",
"resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz",
- "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==",
- "dev": true
+ "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA=="
},
"node_modules/buffer-equal-constant-time": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
- "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==",
- "dev": true
+ "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
- "dev": true,
"dependencies": {
"delayed-stream": "~1.0.0"
},
@@ -4678,7 +4315,6 @@
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz",
"integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==",
- "dev": true,
"engines": {
"node": ">= 0.6"
}
@@ -4687,7 +4323,6 @@
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz",
"integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==",
- "dev": true,
"engines": {
"node": ">= 12"
}
@@ -4696,7 +4331,6 @@
"version": "4.3.7",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz",
"integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==",
- "dev": true,
"dependencies": {
"ms": "^2.1.3"
},
@@ -4713,7 +4347,6 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
- "dev": true,
"engines": {
"node": ">=0.4.0"
}
@@ -4722,7 +4355,6 @@
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-5.0.0.tgz",
"integrity": "sha512-rlpvsxUtM0PQvy9iZe640/IWwWYyBsTApREbA1pHOpmOUIl9MkP/U4z7vTtg4Oaojvqhxt7sdufnT0EzGaR31g==",
- "dev": true,
"engines": {
"node": ">=4"
}
@@ -4731,7 +4363,6 @@
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-2.1.0.tgz",
"integrity": "sha512-CwffZFvlJffUg9zZA0uqrjQayUTC8ob94pnr5sFwaVv3IOmkfUHcWH+jXaQK3askE51Cqe8/9Ql/0uXNwqZ8Zg==",
- "dev": true,
"engines": {
"node": ">=0.10.0"
}
@@ -4740,7 +4371,6 @@
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz",
"integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==",
- "dev": true,
"dependencies": {
"safe-buffer": "^5.0.1"
}
@@ -4749,7 +4379,6 @@
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz",
"integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==",
- "dev": true,
"funding": [
{
"type": "github",
@@ -4771,7 +4400,6 @@
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz",
"integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==",
- "dev": true,
"funding": [
{
"type": "github",
@@ -4794,7 +4422,6 @@
"version": "1.15.9",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz",
"integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==",
- "dev": true,
"funding": [
{
"type": "individual",
@@ -4814,7 +4441,6 @@
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz",
"integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==",
- "dev": true,
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
@@ -4828,7 +4454,6 @@
"version": "4.0.10",
"resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz",
"integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==",
- "dev": true,
"dependencies": {
"fetch-blob": "^3.1.2"
},
@@ -4839,14 +4464,12 @@
"node_modules/http-status-codes": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/http-status-codes/-/http-status-codes-2.3.0.tgz",
- "integrity": "sha512-RJ8XvFvpPM/Dmc5SV+dC4y5PCeOhT3x1Hq0NU3rjGeg5a/CqlhZ7uudknPwZFz4aeAXDcbAyaeP7GAo9lvngtA==",
- "dev": true
+ "integrity": "sha512-RJ8XvFvpPM/Dmc5SV+dC4y5PCeOhT3x1Hq0NU3rjGeg5a/CqlhZ7uudknPwZFz4aeAXDcbAyaeP7GAo9lvngtA=="
},
"node_modules/iiif-builder": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/iiif-builder/-/iiif-builder-1.0.7.tgz",
"integrity": "sha512-PZl7ayHGA9q+6wgCVlIOT3EnfqpbOD6JVjTkcJdMgO9t9jjbfBrO2jhs+3XRue4bg8d7i2opZARPjS9K1qRXaQ==",
- "dev": true,
"dependencies": {
"@iiif/parser": "1.x",
"@iiif/presentation-3": "1.x",
@@ -4857,7 +4480,6 @@
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/jsonschema/-/jsonschema-1.4.1.tgz",
"integrity": "sha512-S6cATIPVv1z0IlxdN+zUk5EPjkGCdnhN4wVSBlvoUO1tOLJootbo9CquNJmbIh4yikWHiUedhRYrNPn1arpEmQ==",
- "dev": true,
"engines": {
"node": "*"
}
@@ -4866,7 +4488,6 @@
"version": "8.5.1",
"resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz",
"integrity": "sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w==",
- "dev": true,
"dependencies": {
"jws": "^3.2.2",
"lodash.includes": "^4.3.0",
@@ -4888,7 +4509,6 @@
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz",
"integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==",
- "dev": true,
"dependencies": {
"buffer-equal-constant-time": "1.0.1",
"ecdsa-sig-formatter": "1.0.11",
@@ -4899,7 +4519,6 @@
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz",
"integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==",
- "dev": true,
"dependencies": {
"jwa": "^1.4.1",
"safe-buffer": "^5.0.1"
@@ -4908,56 +4527,47 @@
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
- "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
- "dev": true
+ "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"node_modules/lodash.includes": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz",
- "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==",
- "dev": true
+ "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="
},
"node_modules/lodash.isboolean": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz",
- "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==",
- "dev": true
+ "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg=="
},
"node_modules/lodash.isinteger": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz",
- "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==",
- "dev": true
+ "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA=="
},
"node_modules/lodash.isnumber": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz",
- "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==",
- "dev": true
+ "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw=="
},
"node_modules/lodash.isplainobject": {
"version": "4.0.6",
"resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
- "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==",
- "dev": true
+ "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA=="
},
"node_modules/lodash.isstring": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz",
- "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==",
- "dev": true
+ "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw=="
},
"node_modules/lodash.once": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz",
- "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==",
- "dev": true
+ "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg=="
},
"node_modules/lz-string": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz",
"integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==",
- "dev": true,
"bin": {
"lz-string": "bin/bin.js"
}
@@ -4966,7 +4576,6 @@
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
- "dev": true,
"engines": {
"node": ">= 0.6"
}
@@ -4975,7 +4584,6 @@
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
- "dev": true,
"dependencies": {
"mime-db": "1.52.0"
},
@@ -4987,7 +4595,6 @@
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
"integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
- "dev": true,
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
@@ -4995,20 +4602,17 @@
"node_modules/mitt": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.1.tgz",
- "integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==",
- "dev": true
+ "integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw=="
},
"node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
- "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
- "dev": true
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
},
"node_modules/node-domexception": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
"integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
- "dev": true,
"funding": [
{
"type": "github",
@@ -5027,7 +4631,6 @@
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz",
"integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==",
- "dev": true,
"dependencies": {
"data-uri-to-buffer": "^4.0.0",
"fetch-blob": "^3.1.4",
@@ -5044,20 +4647,17 @@
"node_modules/parse-http-header": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/parse-http-header/-/parse-http-header-1.0.1.tgz",
- "integrity": "sha512-xOoH7vzokDoenX4e3c+4ik8lf30kq9Pawq20TH5uq3RURsIJquqFTE0gS7OAEE6nvMQzuP5OXxubYuN7YLsTiw==",
- "dev": true
+ "integrity": "sha512-xOoH7vzokDoenX4e3c+4ik8lf30kq9Pawq20TH5uq3RURsIJquqFTE0gS7OAEE6nvMQzuP5OXxubYuN7YLsTiw=="
},
"node_modules/proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
- "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
- "dev": true
+ "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
},
"node_modules/redux": {
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/redux/-/redux-4.2.1.tgz",
"integrity": "sha512-LAUYz4lc+Do8/g7aeRa8JkyDErK6ekstQaqWQrNRW//MY1TvCEpMtpTWvlQ+FPbWCx+Xixu/6SHt5N0HR+SB4w==",
- "dev": true,
"dependencies": {
"@babel/runtime": "^7.9.2"
}
@@ -5065,14 +4665,12 @@
"node_modules/regenerator-runtime": {
"version": "0.14.1",
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz",
- "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==",
- "dev": true
+ "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw=="
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
- "dev": true,
"funding": [
{
"type": "github",
@@ -5091,14 +4689,12 @@
"node_modules/sax": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz",
- "integrity": "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==",
- "dev": true
+ "integrity": "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg=="
},
"node_modules/semver": {
"version": "5.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz",
"integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==",
- "dev": true,
"bin": {
"semver": "bin/semver"
}
@@ -5107,7 +4703,6 @@
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/sort-json/-/sort-json-2.0.1.tgz",
"integrity": "sha512-s8cs2bcsQCzo/P2T/uoU6Js4dS/jnX8+4xunziNoq9qmSpZNCrRIAIvp4avsz0ST18HycV4z/7myJ7jsHWB2XQ==",
- "dev": true,
"dependencies": {
"detect-indent": "^5.0.0",
"detect-newline": "^2.1.0",
@@ -5121,7 +4716,6 @@
"version": "0.1.10",
"resolved": "https://registry.npmjs.org/stacktrace-parser/-/stacktrace-parser-0.1.10.tgz",
"integrity": "sha512-KJP1OCML99+8fhOHxwwzyWrlUuVX5GQ0ZpJTd1DFXhdkrvg1szxfHhawXUZ3g9TkXORQd4/WG68jMlQZ2p8wlg==",
- "dev": true,
"dependencies": {
"type-fest": "^0.7.1"
},
@@ -5132,26 +4726,22 @@
"node_modules/strnum": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz",
- "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==",
- "dev": true
+ "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA=="
},
"node_modules/tiny-invariant": {
"version": "1.3.3",
"resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz",
- "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==",
- "dev": true
+ "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg=="
},
"node_modules/tslib": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
- "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==",
- "dev": true
+ "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
},
"node_modules/type-fest": {
"version": "0.7.1",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.7.1.tgz",
"integrity": "sha512-Ne2YiiGN8bmrmJJEuTWTLJR32nh/JdL1+PSicowtNb0WFpn59GK8/lfD61bVtzguz7b3PBt74nxpv/Pw5po5Rg==",
- "dev": true,
"engines": {
"node": ">=8"
}
@@ -5160,7 +4750,6 @@
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/typesafe-actions/-/typesafe-actions-5.1.0.tgz",
"integrity": "sha512-bna6Yi1pRznoo6Bz1cE6btB/Yy8Xywytyfrzu/wc+NFW3ZF0I+2iCGImhBsoYYCOWuICtRO4yHcnDlzgo1AdNg==",
- "dev": true,
"engines": {
"node": ">= 4"
}
@@ -5168,14 +4757,12 @@
"node_modules/undici-types": {
"version": "6.19.8",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
- "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
- "dev": true
+ "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw=="
},
"node_modules/uuid": {
"version": "9.0.1",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz",
"integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==",
- "dev": true,
"funding": [
"https://github.com/sponsors/broofa",
"https://github.com/sponsors/ctavan"
@@ -5188,7 +4775,6 @@
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz",
"integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==",
- "dev": true,
"engines": {
"node": ">= 8"
}
@@ -5197,7 +4783,6 @@
"version": "1.6.11",
"resolved": "https://registry.npmjs.org/xml-js/-/xml-js-1.6.11.tgz",
"integrity": "sha512-7rVi2KMfwfWFl+GpPg6m80IVMWXLRjO+PxTq7V2CDhoGak0wzYzFgUY2m4XJ47OGdXd8eLE8EmwfAmdjw7lC1g==",
- "dev": true,
"dependencies": {
"sax": "^1.2.4"
},
diff --git a/node/src/package.json b/api/src/package.json
similarity index 97%
rename from node/src/package.json
rename to api/src/package.json
index 6618e61a..65c08496 100644
--- a/node/src/package.json
+++ b/api/src/package.json
@@ -5,7 +5,7 @@
"repository": "https://github.com/nulib/dc-api-v2",
"author": "nulib",
"license": "Apache-2.0",
- "devDependencies": {
+ "dependencies": {
"@aws-crypto/sha256-browser": "^2.0.1",
"@aws-sdk/client-s3": "^3.565.0",
"@aws-sdk/client-secrets-manager": "^3.563.0",
diff --git a/api/template.yaml b/api/template.yaml
new file mode 100644
index 00000000..b3cb1c73
--- /dev/null
+++ b/api/template.yaml
@@ -0,0 +1,731 @@
+# Build and Deploy Template for DC API
+#
+# Note: Any comment starting with `#*` will be removed
+# at build time. This allows us to run without the
+# dependency layer in development without removing the
+# layer from the build.
+
+AWSTemplateFormatVersion: "2010-09-09"
+Transform:
+ - AWS::Serverless-2016-10-31
+ - AWS::LanguageExtensions
+Description: dc-api-v2 API
+Globals:
+ Function:
+ CodeUri: ./src
+ Runtime: nodejs20.x
+ Architectures:
+ - x86_64
+ MemorySize: 128
+ Timeout: 10
+ Environment:
+ Variables:
+ API_CONFIG_PREFIX: !Ref ApiConfigPrefix
+ API_TOKEN_NAME: !Ref ApiTokenName
+ DC_API_ENDPOINT: !Ref DcApiEndpoint
+ DC_URL: !Ref DcUrl
+ DEFAULT_SEARCH_SIZE: "100"
+ DEV_TEAM_NET_IDS: !Ref DevTeamNetIds
+ ENV_PREFIX: !Ref EnvironmentPrefix
+ HONEYBADGER_API_KEY: !Ref HoneybadgerApiKey
+ HONEYBADGER_ENV: !Ref HoneybadgerEnv
+ HONEYBADGER_REVISION: !Ref HoneybadgerRevision
+ READING_ROOM_IPS: !Ref ReadingRoomIPs
+ SECRETS_PATH: !Ref SecretsPath
+Parameters:
+ ApiConfigPrefix:
+ Type: String
+ Description: Secret Name for API Configuration (if not provided, will use SecretsPath)
+ Default: ""
+ ApiTokenName:
+ Type: String
+ Description: Name of the jwt that DC API issues
+ ApiTokenSecret:
+ Type: String
+ Description: Secret Key for Encrypting JWTs (must match IIIF server)
+ ChatWebSocketURI:
+ Type: String
+ Description: URI of the chat websocket API
+ Default: ""
+ CustomDomainCertificateArn:
+ Type: String
+ Description: SSL Certificate for the Custom Domain Name
+ CustomDomainZone:
+ Type: String
+ Description: Hosted Zone Name for Custom Domain
+ CustomDomainHost:
+ Type: String
+ Description: Hostname within ApiDomainName for Custom Domain
+ DcApiEndpoint:
+ Type: String
+ Description: URL for DC API
+ DcUrl:
+ Type: String
+ Description: URL of Digital Collections website
+ DeployAPI:
+ Type: String
+ Description: Whether to deploy all the API functions or only the ones required for other stacks
+ DeployAVDownload:
+ Type: String
+ Description: Set to true to deploy AVDownload
+ DeployChat:
+ Type: String
+ Description: Set to true to deploy Chat
+ DevTeamNetIds:
+ Type: String
+ Description: Northwestern NetIDs of the development team
+ EnvironmentPrefix:
+ Type: String
+ Description: Index Prefix
+ Default: ""
+ HoneybadgerApiKey:
+ Type: String
+ Description: Honeybadger API Key
+ Default: ""
+ HoneybadgerEnv:
+ Type: String
+ Description: Honeybadger Environment
+ Default: ""
+ HoneybadgerRevision:
+ Type: String
+ Description: Honeybadger Revision
+ Default: ""
+ PyramidBucket:
+ Type: String
+ Description: Meadow Pyramid Bucket
+ ReadingRoomIPs:
+ Type: String
+ Description: Comma-delimited list of IP addresses to serve private resources to
+ SecretsPath:
+ Type: String
+ Description: Prefix to use when loading configs from Secrets Manager
+ SecretsPolicy:
+ Type: String
+ Description: Policy that allows reading of required secrets
+ WriteConfigSecret:
+ Type: String
+ Description: Set to something other than "true" to _not_ write configuration secrets
+ Default: "true"
+Conditions:
+ CustomConfigSecret:
+ Fn::Not:
+ - Fn::Equals:
+ - !Ref ApiConfigPrefix
+ - ""
+ DeployAPI:
+ Fn::Equals: [!Ref DeployAPI, "true"]
+ DeployAVDownload:
+ Fn::Equals: [!Ref DeployAVDownload, "true"]
+ DeployChat:
+ Fn::Equals: [!Ref DeployChat, "true"]
+ WriteSecret:
+ Fn::Equals:
+ - !Ref WriteConfigSecret
+ - true
+Resources:
+ #* apiDependencies:
+ #* Type: AWS::Serverless::LayerVersion
+ #* Properties:
+ #* LayerName: !Sub "${AWS::StackName}-api-dependencies"
+ #* Description: Dependencies for API handlers
+ #* ContentUri: ./dependencies
+ #* CompatibleRuntimes:
+ #* - nodejs20.x
+ #* LicenseInfo: Apache-2.0
+ #* Metadata:
+ #* BuildMethod: nodejs20.x
+ # Configuration
+ apiConfiguration:
+ Type: AWS::SecretsManager::Secret
+ Condition: WriteSecret
+ Properties:
+ Name:
+ Fn::If:
+ - CustomConfigSecret
+ - !Sub "${ApiConfigPrefix}/config/dcapi"
+ - !Sub "${SecretsPath}/config/dcapi"
+ SecretString:
+ Fn::ToJsonString:
+ api_token_secret: !Ref ApiTokenSecret
+ base_url: !Sub "https://${CustomDomainHost}.${CustomDomainZone}/api/v2"
+ readIndexPolicy:
+ Type: AWS::IAM::ManagedPolicy
+ Properties:
+ PolicyDocument:
+ Version: 2012-10-17
+ Statement:
+ - Sid: ESHTTPPolicy
+ Effect: Allow
+ Action:
+ - es:ESHttp*
+ Resource: "*"
+ # V2 API
+ getAuthCallbackFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: handlers/get-auth-callback.handler
+ Description: NUSSO callback function.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ Events:
+ ApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /auth/callback
+ Method: GET
+ ApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /auth/callback
+ Method: HEAD
+ getAuthLoginFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: handlers/get-auth-login.handler
+ Description: Performs NUSSO login.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ Events:
+ ApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /auth/login
+ Method: GET
+ getAuthLogoutFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: handlers/get-auth-logout.handler
+ Description: Performs NUSSO logout.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ Events:
+ ApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /auth/logout
+ Method: GET
+ getAuthTokenFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: handlers/get-auth-token.handler
+ Description: Function to retrieve raw JWT.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ Events:
+ ApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /auth/token
+ Method: GET
+ ApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /auth/token
+ Method: HEAD
+ getAuthWhoAmIFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: handlers/get-auth-whoami.handler
+ Description: Exchanges valid JWT token for user information.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ Events:
+ ApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /auth/whoami
+ Method: GET
+ getCollectionsFunction:
+ Type: AWS::Serverless::Function
+ Condition: DeployAPI
+ Properties:
+ Handler: handlers/get-collections.handler
+ Description: Gets Collections.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ - !Ref readIndexPolicy
+ Events:
+ ApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /collections
+ Method: GET
+ ApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /collections
+ Method: HEAD
+ getCollectionByIdFunction:
+ Type: AWS::Serverless::Function
+ Condition: DeployAPI
+ Properties:
+ Handler: handlers/get-collection-by-id.handler
+ Description: Gets a Collection by id.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ - !Ref readIndexPolicy
+ Events:
+ ApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /collections/{id}
+ Method: GET
+ ApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /collections/{id}
+ Method: HEAD
+ getFileSetByIdFunction:
+ Type: AWS::Serverless::Function
+ Condition: DeployAPI
+ Properties:
+ Handler: handlers/get-file-set-by-id.handler
+ Description: Gets a FileSet by id.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ - !Ref readIndexPolicy
+ Events:
+ ApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /file-sets/{id}
+ Method: GET
+ ApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /file-sets/{id}
+ Method: HEAD
+ getFileSetAuthFunction:
+ Type: AWS::Serverless::Function
+ Condition: DeployAPI
+ Properties:
+ Handler: handlers/get-file-set-auth.handler
+ Description: Authorizes access to a file set.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Environment:
+ Variables:
+ USE_PROXIED_IP: true
+ Policies:
+ - !Ref SecretsPolicy
+ - !Ref readIndexPolicy
+ Events:
+ ApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /file-sets/{id}/authorization
+ Method: GET
+ ApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /file-sets/{id}/authorization
+ Method: HEAD
+ getFileSetDownloadFunction:
+ Type: AWS::Serverless::Function
+ Condition: DeployAVDownload
+ Properties:
+ Handler: handlers/get-file-set-download.handler
+ Description: Downloads a file set.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Environment:
+ Variables:
+ AV_DOWNLOAD_EMAIL_TEMPLATE: !Ref AWS::NoValue
+ AV_DOWNLOAD_STATE_MACHINE_ARN: !Ref AWS::NoValue
+ GET_DOWNLOAD_LINK_FUNCTION: !Ref AWS::NoValue
+ MEDIA_CONVERT_DESTINATION_BUCKET: !Ref AWS::NoValue
+ MEDIA_CONVERT_ENDPOINT: !Ref AWS::NoValue
+ MEDIA_CONVERT_JOB_QUEUE_ARN: !Ref AWS::NoValue
+ MEDIA_CONVERT_ROLE_ARN: !Ref AWS::NoValue
+ PYRAMID_BUCKET: !Ref AWS::NoValue
+ REPOSITORY_EMAIL: !Ref AWS::NoValue
+ SEND_TEMPLATED_EMAIL_FUNCTION: !Ref AWS::NoValue
+ START_AUDIO_TRANSCODE_FUNCTION: !Ref AWS::NoValue
+ START_TRANSCODE_FUNCTION: !Ref AWS::NoValue
+ STEP_FUNCTION_ENDPOINT: !Ref AWS::NoValue
+ STREAMING_BUCKET: !Ref AWS::NoValue
+ TRANSCODE_STATUS_FUNCTION: !Ref AWS::NoValue
+ Policies:
+ - !Ref SecretsPolicy
+ - Version: 2012-10-17
+ Statement:
+ - Sid: ExecuteAVDownloadStepFunction
+ Effect: Allow
+ Action:
+ - states:StartExecution
+ Resource:
+ - "*"
+ - Sid: BucketAccess
+ Effect: Allow
+ Action:
+ - s3:GetObject
+ Resource: !Sub "arn:aws:s3:::${PyramidBucket}/*"
+ - Sid: ESHTTPPolicy
+ Effect: Allow
+ Action:
+ - es:ESHttp*
+ Resource: "*"
+ Events:
+ ApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /file-sets/{id}/download
+ Method: GET
+ ApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /file-sets/{id}/download
+ Method: HEAD
+ getWorkAuthFunction:
+ Type: AWS::Serverless::Function
+ Condition: DeployAPI
+ Properties:
+ Handler: handlers/get-work-auth.handler
+ Description: Authorizes access to a work.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Environment:
+ Variables:
+ USE_PROXIED_IP: true
+ Policies:
+ - !Ref SecretsPolicy
+ - !Ref readIndexPolicy
+ Events:
+ ApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /works/{id}/authorization
+ Method: GET
+ ApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /works/{id}/authorization
+ Method: HEAD
+ getWorkByIdFunction:
+ Type: AWS::Serverless::Function
+ Condition: DeployAPI
+ Properties:
+ Handler: handlers/get-work-by-id.handler
+ Description: Gets a Work by id.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Environment:
+ Variables:
+ USE_PROXIED_IP: true
+ Policies:
+ - !Ref SecretsPolicy
+ - !Ref readIndexPolicy
+ Events:
+ ApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /works/{id}
+ Method: GET
+ ApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /works/{id}
+ Method: HEAD
+ getThumbnailFunction:
+ Type: AWS::Serverless::Function
+ Condition: DeployAPI
+ Properties:
+ Handler: handlers/get-thumbnail.handler
+ Description: Gets a Work's representative thumbnail.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ - !Ref readIndexPolicy
+ Events:
+ CollectionApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /collections/{id}/thumbnail
+ Method: GET
+ CollectionApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /collections/{id}/thumbnail
+ Method: HEAD
+ WorkApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /works/{id}/thumbnail
+ Method: GET
+ WorkApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /works/{id}/thumbnail
+ Method: HEAD
+ getSimilarFunction:
+ Type: AWS::Serverless::Function
+ Condition: DeployAPI
+ Properties:
+ Handler: handlers/get-similar.handler
+ Timeout: 100
+ Description: Gets works similar to a specific work.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ - !Ref readIndexPolicy
+ Events:
+ WorkApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /works/{id}/similar
+ Method: GET
+ WorkApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /works/{id}/similar
+ Method: HEAD
+ searchPostFunction:
+ Type: AWS::Serverless::Function
+ Condition: DeployAPI
+ Properties:
+ Handler: handlers/search.postSearch
+ Description: Handles OpenSearch search requests, Works only by default.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ - !Ref readIndexPolicy
+ Events:
+ SearchApi:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /search
+ Method: POST
+ SearchWithModelsApi:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /search/{models}
+ Method: POST
+ searchGetFunction:
+ Type: AWS::Serverless::Function
+ Condition: DeployAPI
+ Properties:
+ Handler: handlers/search.getSearch
+ Description: Handles paging requests
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ - !Ref readIndexPolicy
+ Events:
+ SearchApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /search
+ Method: GET
+ SearchApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /search
+ Method: HEAD
+ SearchWithModelsApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /search/{models}
+ Method: GET
+ SearchWithModelsApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /search/{models}
+ Method: HEAD
+ optionsFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: handlers/options-request.handler
+ Timeout: 3
+ Description: Handles all OPTIONS requests
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ Events:
+ Everything:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /{proxy+}
+ Method: OPTIONS
+ getSharedLinkByIdFunction:
+ Type: AWS::Serverless::Function
+ Condition: DeployAPI
+ Properties:
+ Handler: handlers/get-shared-link-by-id.handler
+ Description: Gets a shared link document by id.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ - !Ref readIndexPolicy
+ Events:
+ ApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /shared-links/{id}
+ Method: GET
+ ApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /shared-links/{id}
+ Method: HEAD
+ oaiFunction:
+ Type: AWS::Serverless::Function
+ Condition: DeployAPI
+ Properties:
+ Handler: handlers/oai.handler
+ Description: Transforms works into OAI Records.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Timeout: 60
+ Policies:
+ - !Ref SecretsPolicy
+ - !Ref readIndexPolicy
+ Events:
+ GetApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /oai
+ Method: GET
+ GetApiHead:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /oai
+ Method: HEAD
+ PostApi:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /oai
+ Method: POST
+ chatWebsocketEndpoint:
+ Type: AWS::Serverless::Function
+ Condition: DeployChat
+ Properties:
+ Handler: handlers/get-chat-endpoint.handler
+ Description: Returns the URI of the chat websocket API.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Environment:
+ Variables:
+ WEBSOCKET_URI: !Ref ChatWebSocketURI
+ Policies:
+ - !Ref SecretsPolicy
+ Events:
+ GetApiGet:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /chat/endpoint
+ Method: GET
+ chatFeedback:
+ Type: AWS::Serverless::Function
+ Condition: DeployChat
+ Properties:
+ Environment:
+ Variables:
+ CHAT_FEEDBACK_BUCKET: !Ref chatFeedbackBucket
+ CHAT_FEEDBACK_TOPIC_ARN: !Ref chatFeedbackTopic
+ Handler: handlers/post-chat-feedback.handler
+ Description: Handles feedback from the chat.
+ #* Layers:
+ #* - !Ref apiDependencies
+ Policies:
+ - !Ref SecretsPolicy
+ - Version: 2012-10-17
+ Statement:
+ - Sid: BucketAccess
+ Effect: Allow
+ Action:
+ - s3:PutObject
+ Resource: !Sub "arn:aws:s3:::${chatFeedbackBucket}/*"
+ - Sid: TopicAccess
+ Effect: Allow
+ Action:
+ - sns:Publish
+ Resource: !Ref chatFeedbackTopic
+ Events:
+ PostApi:
+ Type: HttpApi
+ Properties:
+ ApiId: !Ref dcApi
+ Path: /chat/feedback
+ Method: POST
+ chatFeedbackBucket:
+ Type: 'AWS::S3::Bucket'
+ chatFeedbackTopic:
+ Type: AWS::SNS::Topic
+ Properties:
+ DisplayName: DC Chat Feedback
+ TopicName: !Sub "${AWS::StackName}-chat-feedback"
+ dcApi:
+ Type: AWS::Serverless::HttpApi
+ Properties:
+ StageName: v2
+ StageVariables:
+ basePath: api/v2
+ apiMapping:
+ Type: AWS::ApiGatewayV2::ApiMapping
+ Properties:
+ DomainName: !Sub "${CustomDomainHost}.${CustomDomainZone}"
+ ApiMappingKey: api/v2
+ ApiId: !Ref dcApi
+ Stage: !Ref dcApiv2Stage
diff --git a/node/test/.eslintrc b/api/test/.eslintrc
similarity index 100%
rename from node/test/.eslintrc
rename to api/test/.eslintrc
diff --git a/node/test/fixtures/mocks/collection-1234-no-thumbnail.json b/api/test/fixtures/mocks/collection-1234-no-thumbnail.json
similarity index 100%
rename from node/test/fixtures/mocks/collection-1234-no-thumbnail.json
rename to api/test/fixtures/mocks/collection-1234-no-thumbnail.json
diff --git a/node/test/fixtures/mocks/collection-1234-private-published.json b/api/test/fixtures/mocks/collection-1234-private-published.json
similarity index 100%
rename from node/test/fixtures/mocks/collection-1234-private-published.json
rename to api/test/fixtures/mocks/collection-1234-private-published.json
diff --git a/node/test/fixtures/mocks/collection-1234.json b/api/test/fixtures/mocks/collection-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/collection-1234.json
rename to api/test/fixtures/mocks/collection-1234.json
diff --git a/node/test/fixtures/mocks/collections.json b/api/test/fixtures/mocks/collections.json
similarity index 100%
rename from node/test/fixtures/mocks/collections.json
rename to api/test/fixtures/mocks/collections.json
diff --git a/node/test/fixtures/mocks/expired-shared-link-9101112.json b/api/test/fixtures/mocks/expired-shared-link-9101112.json
similarity index 100%
rename from node/test/fixtures/mocks/expired-shared-link-9101112.json
rename to api/test/fixtures/mocks/expired-shared-link-9101112.json
diff --git a/node/test/fixtures/mocks/fileset-1234.json b/api/test/fixtures/mocks/fileset-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/fileset-1234.json
rename to api/test/fixtures/mocks/fileset-1234.json
diff --git a/node/test/fixtures/mocks/fileset-audio-1234.json b/api/test/fixtures/mocks/fileset-audio-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/fileset-audio-1234.json
rename to api/test/fixtures/mocks/fileset-audio-1234.json
diff --git a/node/test/fixtures/mocks/fileset-baddata-1234.json b/api/test/fixtures/mocks/fileset-baddata-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/fileset-baddata-1234.json
rename to api/test/fixtures/mocks/fileset-baddata-1234.json
diff --git a/node/test/fixtures/mocks/fileset-netid-1234.json b/api/test/fixtures/mocks/fileset-netid-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/fileset-netid-1234.json
rename to api/test/fixtures/mocks/fileset-netid-1234.json
diff --git a/node/test/fixtures/mocks/fileset-restricted-1234.json b/api/test/fixtures/mocks/fileset-restricted-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/fileset-restricted-1234.json
rename to api/test/fixtures/mocks/fileset-restricted-1234.json
diff --git a/node/test/fixtures/mocks/fileset-restricted-unpublished-1234.json b/api/test/fixtures/mocks/fileset-restricted-unpublished-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/fileset-restricted-unpublished-1234.json
rename to api/test/fixtures/mocks/fileset-restricted-unpublished-1234.json
diff --git a/node/test/fixtures/mocks/fileset-unpublished-1234.json b/api/test/fixtures/mocks/fileset-unpublished-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/fileset-unpublished-1234.json
rename to api/test/fixtures/mocks/fileset-unpublished-1234.json
diff --git a/node/test/fixtures/mocks/fileset-video-1234.json b/api/test/fixtures/mocks/fileset-video-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/fileset-video-1234.json
rename to api/test/fixtures/mocks/fileset-video-1234.json
diff --git a/node/test/fixtures/mocks/missing-collection-1234.json b/api/test/fixtures/mocks/missing-collection-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/missing-collection-1234.json
rename to api/test/fixtures/mocks/missing-collection-1234.json
diff --git a/node/test/fixtures/mocks/missing-fileset-1234.json b/api/test/fixtures/mocks/missing-fileset-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/missing-fileset-1234.json
rename to api/test/fixtures/mocks/missing-fileset-1234.json
diff --git a/node/test/fixtures/mocks/missing-index.json b/api/test/fixtures/mocks/missing-index.json
similarity index 100%
rename from node/test/fixtures/mocks/missing-index.json
rename to api/test/fixtures/mocks/missing-index.json
diff --git a/node/test/fixtures/mocks/missing-shared-link-5678.json b/api/test/fixtures/mocks/missing-shared-link-5678.json
similarity index 100%
rename from node/test/fixtures/mocks/missing-shared-link-5678.json
rename to api/test/fixtures/mocks/missing-shared-link-5678.json
diff --git a/node/test/fixtures/mocks/missing-work-1234.json b/api/test/fixtures/mocks/missing-work-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/missing-work-1234.json
rename to api/test/fixtures/mocks/missing-work-1234.json
diff --git a/node/test/fixtures/mocks/oai-list-identifiers-sets.json b/api/test/fixtures/mocks/oai-list-identifiers-sets.json
similarity index 100%
rename from node/test/fixtures/mocks/oai-list-identifiers-sets.json
rename to api/test/fixtures/mocks/oai-list-identifiers-sets.json
diff --git a/node/test/fixtures/mocks/oai-sets.json b/api/test/fixtures/mocks/oai-sets.json
similarity index 100%
rename from node/test/fixtures/mocks/oai-sets.json
rename to api/test/fixtures/mocks/oai-sets.json
diff --git a/node/test/fixtures/mocks/private-unpublished-work-1234.json b/api/test/fixtures/mocks/private-unpublished-work-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/private-unpublished-work-1234.json
rename to api/test/fixtures/mocks/private-unpublished-work-1234.json
diff --git a/node/test/fixtures/mocks/private-work-1234.json b/api/test/fixtures/mocks/private-work-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/private-work-1234.json
rename to api/test/fixtures/mocks/private-work-1234.json
diff --git a/node/test/fixtures/mocks/real-search-event.json b/api/test/fixtures/mocks/real-search-event.json
similarity index 100%
rename from node/test/fixtures/mocks/real-search-event.json
rename to api/test/fixtures/mocks/real-search-event.json
diff --git a/node/test/fixtures/mocks/scroll-empty.json b/api/test/fixtures/mocks/scroll-empty.json
similarity index 100%
rename from node/test/fixtures/mocks/scroll-empty.json
rename to api/test/fixtures/mocks/scroll-empty.json
diff --git a/node/test/fixtures/mocks/scroll-missing.json b/api/test/fixtures/mocks/scroll-missing.json
similarity index 100%
rename from node/test/fixtures/mocks/scroll-missing.json
rename to api/test/fixtures/mocks/scroll-missing.json
diff --git a/node/test/fixtures/mocks/scroll.json b/api/test/fixtures/mocks/scroll.json
similarity index 100%
rename from node/test/fixtures/mocks/scroll.json
rename to api/test/fixtures/mocks/scroll.json
diff --git a/node/test/fixtures/mocks/search-earliest-record.json b/api/test/fixtures/mocks/search-earliest-record.json
similarity index 100%
rename from node/test/fixtures/mocks/search-earliest-record.json
rename to api/test/fixtures/mocks/search-earliest-record.json
diff --git a/node/test/fixtures/mocks/search-multiple-targets.json b/api/test/fixtures/mocks/search-multiple-targets.json
similarity index 100%
rename from node/test/fixtures/mocks/search-multiple-targets.json
rename to api/test/fixtures/mocks/search-multiple-targets.json
diff --git a/node/test/fixtures/mocks/search.json b/api/test/fixtures/mocks/search.json
similarity index 100%
rename from node/test/fixtures/mocks/search.json
rename to api/test/fixtures/mocks/search.json
diff --git a/node/test/fixtures/mocks/shared-link-1234.json b/api/test/fixtures/mocks/shared-link-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/shared-link-1234.json
rename to api/test/fixtures/mocks/shared-link-1234.json
diff --git a/node/test/fixtures/mocks/similar.json b/api/test/fixtures/mocks/similar.json
similarity index 100%
rename from node/test/fixtures/mocks/similar.json
rename to api/test/fixtures/mocks/similar.json
diff --git a/node/test/fixtures/mocks/thumbnail_full.jpg b/api/test/fixtures/mocks/thumbnail_full.jpg
similarity index 100%
rename from node/test/fixtures/mocks/thumbnail_full.jpg
rename to api/test/fixtures/mocks/thumbnail_full.jpg
diff --git a/node/test/fixtures/mocks/thumbnail_square.jpg b/api/test/fixtures/mocks/thumbnail_square.jpg
similarity index 100%
rename from node/test/fixtures/mocks/thumbnail_square.jpg
rename to api/test/fixtures/mocks/thumbnail_square.jpg
diff --git a/node/test/fixtures/mocks/unpublished-work-1234.json b/api/test/fixtures/mocks/unpublished-work-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/unpublished-work-1234.json
rename to api/test/fixtures/mocks/unpublished-work-1234.json
diff --git a/node/test/fixtures/mocks/work-1234-choice.json b/api/test/fixtures/mocks/work-1234-choice.json
similarity index 100%
rename from node/test/fixtures/mocks/work-1234-choice.json
rename to api/test/fixtures/mocks/work-1234-choice.json
diff --git a/node/test/fixtures/mocks/work-1234-no-collection.json b/api/test/fixtures/mocks/work-1234-no-collection.json
similarity index 100%
rename from node/test/fixtures/mocks/work-1234-no-collection.json
rename to api/test/fixtures/mocks/work-1234-no-collection.json
diff --git a/node/test/fixtures/mocks/work-1234-no-fileset-representative-image.json b/api/test/fixtures/mocks/work-1234-no-fileset-representative-image.json
similarity index 100%
rename from node/test/fixtures/mocks/work-1234-no-fileset-representative-image.json
rename to api/test/fixtures/mocks/work-1234-no-fileset-representative-image.json
diff --git a/node/test/fixtures/mocks/work-1234-no-fileset-width-height.json b/api/test/fixtures/mocks/work-1234-no-fileset-width-height.json
similarity index 100%
rename from node/test/fixtures/mocks/work-1234-no-fileset-width-height.json
rename to api/test/fixtures/mocks/work-1234-no-fileset-width-height.json
diff --git a/node/test/fixtures/mocks/work-1234-no-thumbnail.json b/api/test/fixtures/mocks/work-1234-no-thumbnail.json
similarity index 100%
rename from node/test/fixtures/mocks/work-1234-no-thumbnail.json
rename to api/test/fixtures/mocks/work-1234-no-thumbnail.json
diff --git a/node/test/fixtures/mocks/work-1234.json b/api/test/fixtures/mocks/work-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/work-1234.json
rename to api/test/fixtures/mocks/work-1234.json
diff --git a/node/test/fixtures/mocks/work-netid-1234.json b/api/test/fixtures/mocks/work-netid-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/work-netid-1234.json
rename to api/test/fixtures/mocks/work-netid-1234.json
diff --git a/node/test/fixtures/mocks/work-restricted-1234.json b/api/test/fixtures/mocks/work-restricted-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/work-restricted-1234.json
rename to api/test/fixtures/mocks/work-restricted-1234.json
diff --git a/node/test/fixtures/mocks/work-restricted-unpublished-1234.json b/api/test/fixtures/mocks/work-restricted-unpublished-1234.json
similarity index 100%
rename from node/test/fixtures/mocks/work-restricted-unpublished-1234.json
rename to api/test/fixtures/mocks/work-restricted-unpublished-1234.json
diff --git a/node/test/fixtures/mocks/work-video-5678.json b/api/test/fixtures/mocks/work-video-5678.json
similarity index 100%
rename from node/test/fixtures/mocks/work-video-5678.json
rename to api/test/fixtures/mocks/work-video-5678.json
diff --git a/node/test/integration/get-auth-callback.test.js b/api/test/integration/get-auth-callback.test.js
similarity index 100%
rename from node/test/integration/get-auth-callback.test.js
rename to api/test/integration/get-auth-callback.test.js
diff --git a/node/test/integration/get-auth-login.test.js b/api/test/integration/get-auth-login.test.js
similarity index 100%
rename from node/test/integration/get-auth-login.test.js
rename to api/test/integration/get-auth-login.test.js
diff --git a/node/test/integration/get-auth-logout.test.js b/api/test/integration/get-auth-logout.test.js
similarity index 100%
rename from node/test/integration/get-auth-logout.test.js
rename to api/test/integration/get-auth-logout.test.js
diff --git a/node/test/integration/get-auth-token.test.js b/api/test/integration/get-auth-token.test.js
similarity index 100%
rename from node/test/integration/get-auth-token.test.js
rename to api/test/integration/get-auth-token.test.js
diff --git a/node/test/integration/get-auth-whoami.test.js b/api/test/integration/get-auth-whoami.test.js
similarity index 100%
rename from node/test/integration/get-auth-whoami.test.js
rename to api/test/integration/get-auth-whoami.test.js
diff --git a/node/test/integration/get-chat-endpoint.test.js b/api/test/integration/get-chat-endpoint.test.js
similarity index 100%
rename from node/test/integration/get-chat-endpoint.test.js
rename to api/test/integration/get-chat-endpoint.test.js
diff --git a/node/test/integration/get-collection-by-id.test.js b/api/test/integration/get-collection-by-id.test.js
similarity index 100%
rename from node/test/integration/get-collection-by-id.test.js
rename to api/test/integration/get-collection-by-id.test.js
diff --git a/node/test/integration/get-collections.test.js b/api/test/integration/get-collections.test.js
similarity index 100%
rename from node/test/integration/get-collections.test.js
rename to api/test/integration/get-collections.test.js
diff --git a/node/test/integration/get-doc.test.js b/api/test/integration/get-doc.test.js
similarity index 100%
rename from node/test/integration/get-doc.test.js
rename to api/test/integration/get-doc.test.js
diff --git a/node/test/integration/get-file-set-auth.test.js b/api/test/integration/get-file-set-auth.test.js
similarity index 100%
rename from node/test/integration/get-file-set-auth.test.js
rename to api/test/integration/get-file-set-auth.test.js
diff --git a/node/test/integration/get-file-set-download.test.js b/api/test/integration/get-file-set-download.test.js
similarity index 100%
rename from node/test/integration/get-file-set-download.test.js
rename to api/test/integration/get-file-set-download.test.js
diff --git a/node/test/integration/get-shared-link-by-id.test.js b/api/test/integration/get-shared-link-by-id.test.js
similarity index 100%
rename from node/test/integration/get-shared-link-by-id.test.js
rename to api/test/integration/get-shared-link-by-id.test.js
diff --git a/node/test/integration/get-similar.test.js b/api/test/integration/get-similar.test.js
similarity index 100%
rename from node/test/integration/get-similar.test.js
rename to api/test/integration/get-similar.test.js
diff --git a/node/test/integration/get-thumbnail.test.js b/api/test/integration/get-thumbnail.test.js
similarity index 100%
rename from node/test/integration/get-thumbnail.test.js
rename to api/test/integration/get-thumbnail.test.js
diff --git a/node/test/integration/get-work-auth.test.js b/api/test/integration/get-work-auth.test.js
similarity index 100%
rename from node/test/integration/get-work-auth.test.js
rename to api/test/integration/get-work-auth.test.js
diff --git a/node/test/integration/get-work-by-id.test.js b/api/test/integration/get-work-by-id.test.js
similarity index 100%
rename from node/test/integration/get-work-by-id.test.js
rename to api/test/integration/get-work-by-id.test.js
diff --git a/node/test/integration/middleware.test.js b/api/test/integration/middleware.test.js
similarity index 100%
rename from node/test/integration/middleware.test.js
rename to api/test/integration/middleware.test.js
diff --git a/node/test/integration/oai.test.js b/api/test/integration/oai.test.js
similarity index 100%
rename from node/test/integration/oai.test.js
rename to api/test/integration/oai.test.js
diff --git a/node/test/integration/options-request.test.js b/api/test/integration/options-request.test.js
similarity index 100%
rename from node/test/integration/options-request.test.js
rename to api/test/integration/options-request.test.js
diff --git a/api/test/integration/post-chat-feedback.test.js b/api/test/integration/post-chat-feedback.test.js
new file mode 100644
index 00000000..179d5c71
--- /dev/null
+++ b/api/test/integration/post-chat-feedback.test.js
@@ -0,0 +1,278 @@
+const chai = require("chai");
+const expect = chai.expect;
+chai.use(require("chai-http"));
+const ApiToken = requireSource("api/api-token");
+const { mockClient } = require("aws-sdk-client-mock");
+const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3");
+const { SNSClient, PublishCommand } = require("@aws-sdk/client-sns");
+
+const { handler } = requireSource("handlers/post-chat-feedback");
+
+describe("Chat feedback route", () => {
+ helpers.saveEnvironment();
+ // Pass in the S3 and SNS clients to the handler
+ // to workound an issue with the mocking library
+ // https://github.com/m-radzikowski/aws-sdk-client-mock
+ const s3Mock = mockClient(S3Client);
+ const s3Client = new S3Client({});
+ const snsMock = mockClient(SNSClient);
+ const snsClient = new SNSClient({});
+
+ beforeEach(() => {
+ s3Mock.reset();
+ snsMock.reset();
+ });
+
+ describe("Form POST submission", () => {
+ beforeEach(() => {
+ s3Mock.on(PutObjectCommand).resolves({});
+ snsMock.on(PublishCommand).resolves({});
+ });
+
+ it("should return 401 if user is not logged in", async () => {
+ let requestBody = JSON.stringify({
+ sentiment: "positive",
+ timestamp: new Date().toISOString(),
+ ref: "5a6e1d76-0d4c-43c5-ab2c-4687112ba102",
+ refIndex: 0,
+ context: {
+ ref: "5a6e1d76-0d4c-43c5-ab2c-4687112ba102",
+ initialQuestion: "What is the capital of France?",
+ turns: [
+ {
+ question: "What is the capital of France?",
+ answer: "Paris",
+ works: [],
+ aggregations: [],
+ },
+ ],
+ },
+ feedback: {
+ options: [],
+ text: "",
+ email: "",
+ },
+ });
+
+ const event = helpers
+ .mockEvent("POST", "/chat-feedback")
+ .body(requestBody)
+ .render();
+ const response = await handler(event);
+ expect(response.statusCode).to.equal(401);
+ expect(response.body).to.equal("Authorization Required");
+ });
+
+ it("should fail if sentiment is invalid", async () => {
+ const token = new ApiToken().user({ uid: "abc123" }).sign();
+
+ let requestBody = JSON.stringify({
+ sentiment: "neutral",
+ timestamp: new Date().toISOString(),
+ ref: "e6005d7c-e03b-43f7-94a3-e327b4b5a538",
+ refIndex: 0,
+ context: {
+ ref: "3fc98004-995b-4491-94fd-aea48a0363ba",
+ initialQuestion: "What is the capital of France?",
+ turns: [
+ {
+ question: "What is the capital of France?",
+ answer: "Paris",
+ works: [],
+ aggregations: [],
+ },
+ ],
+ },
+ feedback: {
+ options: [],
+ text: "",
+ email: "",
+ },
+ });
+
+ const event = helpers
+ .mockEvent("POST", "/chat-feedback")
+ .body(requestBody)
+ .headers({
+ Cookie: `${process.env.API_TOKEN_NAME}=${token}`,
+ })
+ .render();
+ const response = await handler(event);
+ expect(response.statusCode).to.equal(400);
+ expect(response.body).to.equal(
+ `sentiment is not one of enum values: positive,negative`
+ );
+ });
+
+ it("should fail if ref is missing", async () => {
+ const token = new ApiToken().user({ uid: "abc123" }).sign();
+ let requestBody = JSON.stringify({
+ sentiment: "positive",
+ timestamp: new Date().toISOString(),
+ // ... we omit ref here ...
+ refIndex: 0,
+ context: {
+ ref: "e6005d7c-e03b-43f7-94a3-e327b4b5a538",
+ initialQuestion: "Hello?",
+ turns: [
+ {
+ question: "Hello?",
+ answer: "World",
+ works: [],
+ aggregations: [],
+ },
+ ],
+ },
+ feedback: {
+ options: [],
+ text: "",
+ email: "",
+ },
+ });
+ const event = helpers
+ .mockEvent("POST", "/chat-feedback")
+ .body(requestBody)
+ .headers({
+ Cookie: `${process.env.API_TOKEN_NAME}=${token}`,
+ })
+ .render();
+ const response = await handler(event);
+ expect(response.statusCode).to.equal(400);
+ expect(response.body).to.equal(`instance requires property "ref"`);
+ });
+
+ it("should fail if refIndex is missing", async () => {
+ const token = new ApiToken().user({ uid: "abc123" }).sign();
+ let requestBody = JSON.stringify({
+ sentiment: "positive",
+ timestamp: new Date().toISOString(),
+ ref: "e6005d7c-e03b-43f7-94a3-e327b4b5a538",
+ // ... we omit refIndex ...
+ context: {
+ ref: "e6005d7c-e03b-43f7-94a3-e327b4b5a538",
+ initialQuestion: "Hello?",
+ turns: [
+ {
+ question: "Hello?",
+ answer: "World",
+ works: [],
+ aggregations: [],
+ },
+ ],
+ },
+ feedback: {
+ options: [],
+ text: "",
+ email: "",
+ },
+ });
+ const event = helpers
+ .mockEvent("POST", "/chat-feedback")
+ .body(requestBody)
+ .headers({
+ Cookie: `${process.env.API_TOKEN_NAME}=${token}`,
+ })
+ .render();
+ const response = await handler(event);
+ expect(response.statusCode).to.equal(400);
+ expect(response.body).to.equal(`instance requires property "refIndex"`);
+ });
+
+ it("should fail if timestamp is missing", async () => {
+ const token = new ApiToken().user({ uid: "abc123" }).sign();
+ let requestBody = JSON.stringify({
+ sentiment: "positive",
+ // ... we omit timestamp ...
+ ref: "e6005d7c-e03b-43f7-94a3-e327b4b5a538",
+ refIndex: 0,
+ context: {
+ ref: "e6005d7c-e03b-43f7-94a3-e327b4b5a538",
+ initialQuestion: "Hello?",
+ turns: [
+ {
+ question: "Hello?",
+ answer: "World",
+ works: [],
+ aggregations: [],
+ },
+ ],
+ },
+ feedback: {
+ options: [],
+ text: "",
+ email: "",
+ },
+ });
+ const event = helpers
+ .mockEvent("POST", "/chat-feedback")
+ .body(requestBody)
+ .headers({
+ Cookie: `${process.env.API_TOKEN_NAME}=${token}`,
+ })
+ .render();
+ const response = await handler(event);
+ expect(response.statusCode).to.equal(400);
+ expect(response.body).to.equal(`instance requires property "timestamp"`);
+ });
+
+ describe("Saving feedback", () => {
+ it("should upload the response to S3 and return 200", async () => {
+ const token = new ApiToken().user({ uid: "abc123" }).sign();
+
+ const requestBody = {
+ sentiment: "negative",
+ timestamp: new Date().toISOString(),
+ ref: "e6005d7c-e03b-43f7-94a3-e327b4b5a538",
+ refIndex: 0,
+ context: {
+ ref: "e6005d7c-e03b-43f7-94a3-e327b4b5a538",
+ initialQuestion: "What is the capital of France?",
+ turns: [
+ {
+ question: "What is the capital of France?",
+ answer: "Rome",
+ works: [],
+ aggregations: [],
+ },
+ ],
+ },
+ feedback: {
+ options: ["option1"],
+ text: "Bad answer!",
+ email: "example@example.com",
+ },
+ };
+
+ const event = helpers
+ .mockEvent("POST", "/chat-feedback")
+ .body(JSON.stringify(requestBody))
+ .headers({
+ Cookie: `${process.env.API_TOKEN_NAME}=${token}`,
+ })
+ .render();
+
+ const response = await handler(event, {
+ injections: { s3Client, snsClient },
+ });
+
+ expect(response.statusCode).to.equal(200);
+ expect(response.body).to.equal(
+ '{"message":"Feedback received. Thank you."}'
+ );
+ expect(s3Mock.calls(PutObjectCommand).length).to.equal(1);
+ expect(s3Mock.call(0).args[0].input.Bucket).eq(
+ process.env.CHAT_FEEDBACK_BUCKET
+ );
+ expect(s3Mock.call(0).args[0].input.Key).eq(
+ "negative/e6005d7c-e03b-43f7-94a3-e327b4b5a538_0.json"
+ );
+ expect(s3Mock.call(0).args[0].input.ContentType).eq("application/json");
+ expect(JSON.parse(s3Mock.call(0).args[0].input.Body)).to.deep.equal(
+ requestBody
+ );
+
+ expect(snsMock.calls(PublishCommand).length).to.equal(1);
+ });
+ });
+ });
+});
diff --git a/node/test/integration/search.test.js b/api/test/integration/search.test.js
similarity index 100%
rename from node/test/integration/search.test.js
rename to api/test/integration/search.test.js
diff --git a/node/test/test-helpers/event-builder.js b/api/test/test-helpers/event-builder.js
similarity index 100%
rename from node/test/test-helpers/event-builder.js
rename to api/test/test-helpers/event-builder.js
diff --git a/node/test/test-helpers/index.js b/api/test/test-helpers/index.js
similarity index 100%
rename from node/test/test-helpers/index.js
rename to api/test/test-helpers/index.js
diff --git a/node/test/unit/api/api-token.test.js b/api/test/unit/api/api-token.test.js
similarity index 100%
rename from node/test/unit/api/api-token.test.js
rename to api/test/unit/api/api-token.test.js
diff --git a/node/test/unit/api/helpers.test.js b/api/test/unit/api/helpers.test.js
similarity index 100%
rename from node/test/unit/api/helpers.test.js
rename to api/test/unit/api/helpers.test.js
diff --git a/node/test/unit/api/opensearch.test.js b/api/test/unit/api/opensearch.test.js
similarity index 100%
rename from node/test/unit/api/opensearch.test.js
rename to api/test/unit/api/opensearch.test.js
diff --git a/node/test/unit/api/pagination.test.js b/api/test/unit/api/pagination.test.js
similarity index 100%
rename from node/test/unit/api/pagination.test.js
rename to api/test/unit/api/pagination.test.js
diff --git a/node/test/unit/api/request/models.test.js b/api/test/unit/api/request/models.test.js
similarity index 100%
rename from node/test/unit/api/request/models.test.js
rename to api/test/unit/api/request/models.test.js
diff --git a/node/test/unit/api/request/pipeline.test.js b/api/test/unit/api/request/pipeline.test.js
similarity index 100%
rename from node/test/unit/api/request/pipeline.test.js
rename to api/test/unit/api/request/pipeline.test.js
diff --git a/node/test/unit/api/response/error.test.js b/api/test/unit/api/response/error.test.js
similarity index 100%
rename from node/test/unit/api/response/error.test.js
rename to api/test/unit/api/response/error.test.js
diff --git a/node/test/unit/api/response/iiif/collection.test.js b/api/test/unit/api/response/iiif/collection.test.js
similarity index 100%
rename from node/test/unit/api/response/iiif/collection.test.js
rename to api/test/unit/api/response/iiif/collection.test.js
diff --git a/node/test/unit/api/response/iiif/manifest.test.js b/api/test/unit/api/response/iiif/manifest.test.js
similarity index 100%
rename from node/test/unit/api/response/iiif/manifest.test.js
rename to api/test/unit/api/response/iiif/manifest.test.js
diff --git a/node/test/unit/api/response/iiif/presentation-api/items.test.js b/api/test/unit/api/response/iiif/presentation-api/items.test.js
similarity index 100%
rename from node/test/unit/api/response/iiif/presentation-api/items.test.js
rename to api/test/unit/api/response/iiif/presentation-api/items.test.js
diff --git a/node/test/unit/api/response/iiif/presentation-api/metadata.test.js b/api/test/unit/api/response/iiif/presentation-api/metadata.test.js
similarity index 100%
rename from node/test/unit/api/response/iiif/presentation-api/metadata.test.js
rename to api/test/unit/api/response/iiif/presentation-api/metadata.test.js
diff --git a/node/test/unit/api/response/iiif/presentation-api/placeholder-canvas.test.js b/api/test/unit/api/response/iiif/presentation-api/placeholder-canvas.test.js
similarity index 100%
rename from node/test/unit/api/response/iiif/presentation-api/placeholder-canvas.test.js
rename to api/test/unit/api/response/iiif/presentation-api/placeholder-canvas.test.js
diff --git a/node/test/unit/api/response/iiif/presentation-api/provider.test.js b/api/test/unit/api/response/iiif/presentation-api/provider.test.js
similarity index 100%
rename from node/test/unit/api/response/iiif/presentation-api/provider.test.js
rename to api/test/unit/api/response/iiif/presentation-api/provider.test.js
diff --git a/node/test/unit/api/response/opensearch.test.js b/api/test/unit/api/response/opensearch.test.js
similarity index 100%
rename from node/test/unit/api/response/opensearch.test.js
rename to api/test/unit/api/response/opensearch.test.js
diff --git a/node/test/unit/aws/environment.test.js b/api/test/unit/aws/environment.test.js
similarity index 100%
rename from node/test/unit/aws/environment.test.js
rename to api/test/unit/aws/environment.test.js
diff --git a/node/test/unit/package.test.js b/api/test/unit/package.test.js
similarity index 100%
rename from node/test/unit/package.test.js
rename to api/test/unit/package.test.js
diff --git a/lambdas/get-download-link.js b/av-download/lambdas/get-download-link.js
similarity index 100%
rename from lambdas/get-download-link.js
rename to av-download/lambdas/get-download-link.js
diff --git a/lambdas/package-lock.json b/av-download/lambdas/package-lock.json
similarity index 100%
rename from lambdas/package-lock.json
rename to av-download/lambdas/package-lock.json
diff --git a/lambdas/package.json b/av-download/lambdas/package.json
similarity index 100%
rename from lambdas/package.json
rename to av-download/lambdas/package.json
diff --git a/lambdas/send-templated-email.js b/av-download/lambdas/send-templated-email.js
similarity index 100%
rename from lambdas/send-templated-email.js
rename to av-download/lambdas/send-templated-email.js
diff --git a/lambdas/start-audio-transcode.js b/av-download/lambdas/start-audio-transcode.js
similarity index 100%
rename from lambdas/start-audio-transcode.js
rename to av-download/lambdas/start-audio-transcode.js
diff --git a/lambdas/start-transcode.js b/av-download/lambdas/start-transcode.js
similarity index 100%
rename from lambdas/start-transcode.js
rename to av-download/lambdas/start-transcode.js
diff --git a/lambdas/transcode-status.js b/av-download/lambdas/transcode-status.js
similarity index 100%
rename from lambdas/transcode-status.js
rename to av-download/lambdas/transcode-status.js
diff --git a/av-download/template.yaml b/av-download/template.yaml
new file mode 100644
index 00000000..7307b48f
--- /dev/null
+++ b/av-download/template.yaml
@@ -0,0 +1,461 @@
+# Build and Deploy Template for DC API
+#
+# Note: Any comment starting with `#*` will be removed
+# at build time. This allows us to run without the
+# dependency layer in development without removing the
+# layer from the build.
+
+AWSTemplateFormatVersion: "2010-09-09"
+Transform:
+ - AWS::Serverless-2016-10-31
+ - AWS::LanguageExtensions
+Description: dc-api-v2 AV Download Support
+Parameters:
+ MediaConvertDestinationBucket:
+ Type: String
+ Description: S3 bucket destination for transcoded AV resource
+ MediaConvertEndpoint:
+ Type: String
+ Description: MediaConvert endpoint
+ MediaConvertJobQueueArn:
+ Type: String
+ Description: Job Queue ARN for MediaConvert
+ MediaConvertRoleArn:
+ Type: String
+ Description: MediaConvert role ARN
+ PyramidBucket:
+ Type: String
+ Description: Meadow Pyramid Bucket
+ RepositoryEmail:
+ Type: String
+ Description: Verified email address to use as sender
+ SecretsPath:
+ Type: String
+ Description: Prefix to use when loading configs from Secrets Manager
+ SecretsPolicy:
+ Type: String
+ Description: IAM Policy for reading secrets
+ StreamingBucket:
+ Type: String
+ Description: Meadow Streaming Bucket
+Resources:
+ apiConfiguration:
+ Type: AWS::SecretsManager::Secret
+ Properties:
+ Name: !Sub "${SecretsPath}/config/av-download"
+ SecretString:
+ Fn::ToJsonString:
+ step_function_endpoint: !Ref AWS::NoValue
+ av_download_state_machine_arn: !Ref avDownloadStateMachine
+ av_download_email_template: !Ref avDownloadEmailTemplate
+ use_proxied_ip: true
+ streaming_bucket: !Ref StreamingBucket
+ media_convert_destination_bucket: !Ref MediaConvertDestinationBucket
+ media_convert_endpoint: !Ref MediaConvertEndpoint
+ media_convert_job_queue_arn: !Ref MediaConvertJobQueueArn
+ media_convert_role_arn: !Ref MediaConvertRoleArn
+ pyramid_bucket: !Ref PyramidBucket
+ repository_email: !Ref RepositoryEmail
+ start_audio_transcode_function: !GetAtt startAudioTranscodeFunction.Arn
+ start_transcode_function: !GetAtt startTranscodeFunction.Arn
+ transcode_status_function: !GetAtt transcodeStatusFunction.Arn
+ get_download_link_function: !GetAtt getDownloadLinkFunction.Arn
+ send_templated_email_function: !GetAtt sendTemplatedEmailFunction.Arn
+ avDownloadStateMachine:
+ Type: AWS::Serverless::StateMachine
+ Properties:
+ Definition:
+ Comment: HLS stiching and save as file in s3 and email download link
+ StartAt: audioOrVideo
+ States:
+ audioOrVideo:
+ Type: Choice
+ Choices:
+ - Variable: "$.transcodeInput.type"
+ StringEquals: audio
+ Next: startAudioTranscode
+ Default: startTranscode
+ startAudioTranscode:
+ Type: Task
+ Resource: arn:aws:states:::lambda:invoke
+ Parameters:
+ Payload.$: "$.transcodeInput"
+ FunctionName.$: "$.configuration.startAudioTranscodeFunction"
+ Next: getDownloadLink
+ InputPath: "$"
+ ResultPath: "$.audioTranscodeOutput"
+ startTranscode:
+ Type: Task
+ Resource: arn:aws:states:::lambda:invoke
+ Parameters:
+ Payload.$: "$.transcodeInput"
+ FunctionName.$: "$.configuration.startTranscodeFunction"
+ Next: transcodeStatus
+ InputPath: "$"
+ ResultPath: "$.transcodeOutput"
+ transcodeStatus:
+ Type: Task
+ Resource: arn:aws:states:::lambda:invoke
+ Parameters:
+ Payload.$: "$.transcodeOutput.Payload"
+ FunctionName.$: "$.configuration.transcodeStatusFunction"
+ InputPath: "$"
+ ResultPath: "$.transcodeOutput"
+ Next: transcodeCompleted?
+ transcodeCompleted?:
+ Type: Choice
+ Choices:
+ - Variable: "$.transcodeOutput.Payload.status"
+ StringEquals: COMPLETE
+ Next: getDownloadLink
+ - Variable: "$.transcodeOutput.Payload.status"
+ StringEquals: ERROR
+ Next: failWorkflow
+ - Variable: "$.transcodeOutput.Payload.status"
+ StringEquals: CANCELED
+ Next: failWorkflow
+ Default: Wait 10 seconds
+ Wait 10 seconds:
+ Type: Wait
+ Seconds: 10
+ Next: transcodeStatus
+ getDownloadLink:
+ Type: Task
+ Resource: arn:aws:states:::lambda:invoke
+ Parameters:
+ Payload.$: "$.presignedUrlInput"
+ FunctionName.$: "$.configuration.getDownloadLinkFunction"
+ InputPath: "$"
+ ResultPath: "$.downloadLinkOutput"
+ Next: sendTemplatedEmail
+ sendTemplatedEmail:
+ Type: Task
+ Resource: arn:aws:states:::lambda:invoke
+ Parameters:
+ Payload:
+ to.$: "$.sendEmailInput.to"
+ from.$: "$.sendEmailInput.from"
+ template.$: "$.sendEmailInput.template"
+ params:
+ downloadLink.$: "$.downloadLinkOutput.Payload.downloadLink"
+ fileSetId.$: "$.sendEmailInput.params.fileSetId"
+ fileSetLabel.$: "$.sendEmailInput.params.fileSetLabel"
+ workId.$: "$.sendEmailInput.params.workId"
+ fileType.$: "$.sendEmailInput.params.fileType"
+ FunctionName.$: "$.configuration.sendTemplatedEmailFunction"
+ End: true
+ failWorkflow:
+ Type: Fail
+ Name: !Sub "${AWS::StackName}-av-download-state-machine"
+ Policies:
+ - Version: 2012-10-17
+ Statement:
+ - Sid: LambaInvokePermissions
+ Effect: Allow
+ Action:
+ - lambda:InvokeFunction
+ Resource:
+ - !GetAtt startAudioTranscodeFunction.Arn
+ - !GetAtt startTranscodeFunction.Arn
+ - !GetAtt transcodeStatusFunction.Arn
+ - !GetAtt getDownloadLinkFunction.Arn
+ - !GetAtt sendTemplatedEmailFunction.Arn
+ avDownloadEmailTemplate:
+ Type: AWS::SES::Template
+ Properties:
+ Template:
+ TemplateName: !Sub "${AWS::StackName}-av-download-template"
+ SubjectPart: Download of {{fileSetLabel}} is ready!
+ TextPart: |
+ Hello,
+ Your request for {{fileType}} download of {{fileSetLabel}} (file set id: {{fileSetId}}) has been fulfilled. Your download will be available for 3 days.
+ The {{fileType}} file can be downloaded from {{downloadLink}}
+ HtmlPart: |
+
+
+
+
+ NUL Meadow Download
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+ |
+
+
+
+ |
+
+
+
+
+
+
+
+
+ Hello,
+
+
+ Your request for {{fileType}} download of {{fileSetLabel}} (file set id: {{fileSetId}}) has been
+ fulfilled. Click below to download your file:
+
+
+
+
+
+
+
+
+ (Your download will be available for 3 days)
+
+ |
+
+
+
+ |
+
+
+
+ |
+
+ |
+
+
+
+
+
+
+ ffmpegLayer:
+ Type: AWS::Serverless::LayerVersion
+ Properties:
+ Description: "FFMPEG Lambda Layer"
+ ContentUri: ./layers/ffmpeg
+ CompatibleRuntimes:
+ - nodejs18.x
+ startAudioTranscodeFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Runtime: nodejs18.x
+ CodeUri: ./lambdas
+ Handler: start-audio-transcode.handler
+ Description: Performs audio transcode job with ffmpeg
+ Timeout: 900
+ MemorySize: 10240
+ Layers:
+ - !Ref ffmpegLayer
+ Policies:
+ - Version: 2012-10-17
+ Statement:
+ - Sid: BucketAccess
+ Effect: Allow
+ Action:
+ - s3:PutObject
+ Resource: !Sub "arn:aws:s3:::${MediaConvertDestinationBucket}/*"
+ Environment:
+ Variables:
+ MEDIA_CONVERT_DESTINATION_BUCKET: !Ref MediaConvertDestinationBucket
+ startTranscodeFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Runtime: nodejs20.x
+ CodeUri: ./lambdas
+ Handler: start-transcode.handler
+ Description: Creates MediaConvert Job to transcode HLS stream
+ Environment:
+ Variables:
+ MEDIA_CONVERT_ENDPOINT: !Ref MediaConvertEndpoint
+ MEDIA_CONVERT_JOB_QUEUE_ARN: !Ref MediaConvertJobQueueArn
+ MEDIA_CONVERT_ROLE_ARN: !Ref MediaConvertRoleArn
+ Policies:
+ - Version: 2012-10-17
+ Statement:
+ - Sid: PassMediaConvertRole
+ Effect: Allow
+ Action:
+ - iam:PassRole
+ Resource:
+ - !Ref MediaConvertRoleArn
+ - Sid: StartTranscodeJob
+ Effect: Allow
+ Action:
+ - mediaconvert:CreateJob
+ Resource: "*"
+ transcodeStatusFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Runtime: nodejs20.x
+ CodeUri: ./lambdas
+ Handler: transcode-status.handler
+ Description: Determines when transcode job has completed or errored
+ Environment:
+ Variables:
+ MEDIA_CONVERT_ENDPOINT: !Ref MediaConvertEndpoint
+ Policies:
+ - Version: 2012-10-17
+ Statement:
+ - Sid: TranscodeJobStatus
+ Effect: Allow
+ Action:
+ - mediaconvert:GetJob
+ - mediaconvert:ListJobs
+ Resource: "*"
+ getDownloadLinkFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Runtime: nodejs20.x
+ CodeUri: ./lambdas
+ Handler: get-download-link.handler
+ Description: Creates presigned url
+ Policies:
+ - Version: 2012-10-17
+ Statement:
+ - Sid: BucketAccess
+ Effect: Allow
+ Action:
+ - s3:GetObject
+ Resource: !Sub "arn:aws:s3:::${MediaConvertDestinationBucket}/*"
+ sendTemplatedEmailFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Runtime: nodejs20.x
+ CodeUri: ./lambdas
+ Handler: send-templated-email.handler
+ Description: Sends email
+ Policies:
+ - Version: 2012-10-17
+ Statement:
+ - Sid: SESSendEmail
+ Effect: Allow
+ Action:
+ - ses:SendTemplatedEmail
+ Resource: "*"
diff --git a/bin/make_deploy_config.sh b/bin/make_deploy_config.sh
new file mode 100755
index 00000000..326b5f95
--- /dev/null
+++ b/bin/make_deploy_config.sh
@@ -0,0 +1,88 @@
+#!/bin/bash
+
+CMD_WITH=$WITH
+
+get_secret() {
+ local secret_name=$1
+ local secret_value=$(aws secretsmanager get-secret-value --secret-id $secret_name --query SecretString --output text)
+
+ local key_name=$2
+ if [ -n "$key_name" ]; then
+ jq -r ".$key_name" <<< $secret_value
+ else
+ echo $secret_value
+ fi
+}
+
+with() {
+ local feature=$1
+ if [[ ",$WITH," == *",$feature,"* ]]; then
+ echo "true"
+ else
+ echo "false"
+ fi
+}
+
+aws_account_id=$(aws sts get-caller-identity --query 'Account' --output text)
+net_id=$(aws ec2 describe-tags \
+ --filters "Name=resource-id,Values=$(curl -s http://169.254.169.254/latest/meta-data/instance-id)" \
+ "Name=key,Values=NetID" \
+ --query 'Tags[0].Value' \
+ --output text)
+nusso_config=$(get_secret dev-environment/infrastructure/nusso)
+cat < samconfig.${DEV_PREFIX}.yaml
+---
+version: 1.0
+default:
+EOF
+
+for section in deploy sync; do
+ case $section in
+ deploy)
+ WITH=${CMD_WITH:-"API,AV_DOWNLOAD,CHAT"}
+ ;;
+ sync)
+ WITH=${CMD_WITH:-"CHAT"}
+ ;;
+ esac
+ cat <> samconfig.${DEV_PREFIX}.yaml
+ ${section}:
+ parameters:
+ stack_name: dc-api-${DEV_PREFIX}
+ s3_bucket: $(aws s3api list-buckets --query "Buckets[?starts_with(Name, 'aws-sam-cli-managed')].{Name:Name, CreationDate:CreationDate}" --output json | jq -r 'sort_by(.CreationDate) | .[0].Name')
+ s3_prefix: dc-api-${DEV_PREFIX}
+ region: us-east-1
+ confirm_changeset: true
+ capabilities:
+ - CAPABILITY_IAM
+ - CAPABILITY_AUTO_EXPAND
+ image_repositories: []
+ parameter_overrides: >
+ ApiConfigPrefix="dev-environment-${DEV_PREFIX}"
+ ApiTokenName="dcapi$(openssl rand -hex 4 | cut -c1-7)"
+ ApiTokenSecret="$(get_secret staging/config/dcapi api_token_secret)"
+ CustomDomainCertificateArn="$(aws acm list-certificates --query "CertificateSummaryList[?DomainName=='*.rdc-staging.library.northwestern.edu'].CertificateArn" --output text)"
+ CustomDomainHost="dcapi-${DEV_PREFIX}"
+ CustomDomainZone="rdc-staging.library.northwestern.edu"
+ DcApiEndpoint="https://dcapi-${DEV_PREFIX}.rdc-staging.library.northwestern.edu/api/v2"
+ DcUrl="https://dc.rdc-staging.library.northwestern.edu"
+ DeployAPI="$(with API)"
+ DeployAVDownload="$(with AV_DOWNLOAD)"
+ DeployChat="$(with CHAT)"
+ DeployDocs="$(with DOCS)"
+ DevTeamNetIds="${net_id}"
+ ElasticsearchEndpoint="$(get_secret dev-environment/infrastructure/index | jq -r '.endpoint | ltrimstr("https://")')"
+ EnvironmentPrefix="${DEV_PREFIX}-${DEV_ENV}"
+ MediaConvertDestinationBucket="${DEV_PREFIX}-${DEV_ENV}-streaming"
+ MediaConvertEndpoint="$(aws mediaconvert describe-endpoints --query 'Endpoints[0].Url' --output text)"
+ MediaConvertJobQueueArn="arn:aws:mediaconvert:us-east-1:${aws_account_id}:queues/Default"
+ MediaConvertRoleArn="arn:aws:iam::${aws_account_id}:role/service-role/MediaConvert_Default_Role"
+ NussoApiKey="$(jq -r '.api_key' <<< $nusso_config)"
+ NussoBaseUrl="$(jq -r '.base_url' <<< $nusso_config)"
+ PyramidBucket="${DEV_PREFIX}-${DEV_ENV}-pyramids"
+ ReadingRoomIPs=""
+ RepositoryEmail="repository@northwestern.edu"
+ SecretsPath="dev-environment"
+ StreamingBucket="${DEV_PREFIX}-${DEV_ENV}-streaming"
+EOF
+done
\ No newline at end of file
diff --git a/bin/make_env.sh b/bin/make_env.sh
new file mode 100755
index 00000000..237d4d20
--- /dev/null
+++ b/bin/make_env.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+
+get_secret() {
+ local secret_name=$1
+ local secret_value=$(aws secretsmanager get-secret-value --secret-id $secret_name --query SecretString --output text)
+
+ local key_name=$2
+ if [ -n "$key_name" ]; then
+ jq -r ".$key_name" <<< $secret_value
+ else
+ echo $secret_value
+ fi
+}
+
+aws_account_id=$(aws sts get-caller-identity --query 'Account' --output text)
+media_convert_endpoint=$(aws mediaconvert describe-endpoints --query 'Endpoints[0].Url' --output text)
+media_convert_queue=$(aws mediaconvert get-queue --name Default --query Queue.Name --output text)
+
+cat < env.json
+{
+ "Parameters": {
+ "AWS_REGION": "us-east-1",
+ "API_TOKEN_NAME": "dcApiLocal",
+ "API_TOKEN_SECRET": "$(get_secret staging/config/dcapi api_token_secret)",
+ "DC_API_ENDPOINT": "https://${DEV_PREFIX}.dev.rdc.library.northwestern.edu:3002",
+ "DC_URL": "https://${DEV_PREFIX}.dev.rdc.library.northwestern.edu:3001",
+ "DEFAULT_SEARCH_SIZE": "10",
+ "DEV_TEAM_NET_IDS": "$(aws ec2 describe-tags --filters "Name=resource-id,Values=$(curl -s http://169.254.169.254/latest/meta-data/instance-id)" "Name=key,Values=NetID" --query 'Tags[0].Value' --output text)",
+ "ENV_PREFIX": "${DEV_PREFIX}-${DEV_ENV}",
+ "READING_ROOM_IPS": "",
+ "SECRETS_PATH": "dev-environment",
+ "AV_DOWNLOAD_EMAIL_TEMPLATE": "av-download-template",
+ "AV_DOWNLOAD_STATE_MACHINE_ARN": "arn:aws:states:us-east-1:123456789012:stateMachine:hlsStitcherStepFunction",
+ "GET_DOWNLOAD_LINK_FUNCTION": "arn:aws:lambda:us-east-1:123456789012:function:getDownloadLinkFunction",
+ "MEDIA_CONVERT_DESTINATION_BUCKET": "${DEV_PREFIX}-${DEV_ENV}-streaming",
+ "MEDIA_CONVERT_ENDPOINT": "${media_convert_endpoint}",
+ "MEDIA_CONVERT_JOB_QUEUE_ARN": "${media_convert_queue}",
+ "MEDIA_CONVERT_ROLE_ARN": "arn:aws:iam::${aws_account_id}:role/service-role/MediaConvert_Default_Role",
+ "PYRAMID_BUCKET": "${DEV_PREFIX}-${DEV_ENV}-pyramids",
+ "REPOSITORY_EMAIL": "repository@northwestern.edu",
+ "SEND_TEMPLATED_EMAIL_FUNCTION": "arn:aws:lambda:us-east-1:123456789012:function:sendTemplatedEmailFunction",
+ "START_AUDIO_TRANSCODE_FUNCTION": "arn:aws:lambda:us-east-1:123456789012:function:startAudioTranscodeFunction",
+ "START_TRANSCODE_FUNCTION": "arn:aws:lambda:us-east-1:123456789012:function:startTranscodeFunction",
+ "STEP_FUNCTION_ENDPOINT": "http://172.17.0.1:8083",
+ "STREAMING_BUCKET": "${DEV_PREFIX}-${DEV_ENV}-streaming",
+ "TRANSCODE_STATUS_FUNCTION": "arn:aws:lambda:us-east-1:123456789012:function:transcodeStatusFunction"
+ }
+}
+EOF
diff --git a/chat-playground/opensearch.ipynb b/chat-playground/opensearch.ipynb
new file mode 100644
index 00000000..9ad9ad00
--- /dev/null
+++ b/chat-playground/opensearch.ipynb
@@ -0,0 +1,100 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from dotenv import load_dotenv\n",
+ "import sys\n",
+ "import os\n",
+ "\n",
+ "load_dotenv(override=True)\n",
+ "try:\n",
+ " del os.environ[\"DEV_PREFIX\"]\n",
+ " del os.environ[\"DEV_ENV\"]\n",
+ "except:\n",
+ " pass\n",
+ "\n",
+ "sys.path.insert(0, os.path.join(os.curdir, \"../chat/src\"))\n",
+ "\n",
+ "from core.setup import opensearch_vector_store\n",
+ "from core.secrets import load_secrets\n",
+ "\n",
+ "load_secrets()\n",
+ "opensearch = opensearch_vector_store()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Aggregations"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "opensearch.aggregations_search(\n",
+ " agg_field=\"visibility\"\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "opensearch.aggregations_search(\n",
+ " agg_field=\"published\"\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Similiarity Search"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "opensearch.similarity_search(\n",
+ " query=\"Football stadiums\",\n",
+ " k=80,\n",
+ " size=3\n",
+ ")"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": ".venv",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.12.2"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/chat-playground/playground.ipynb b/chat-playground/playground.ipynb
new file mode 100644
index 00000000..da13defb
--- /dev/null
+++ b/chat-playground/playground.ipynb
@@ -0,0 +1,98 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from dotenv import load_dotenv\n",
+ "import sys\n",
+ "import os\n",
+ "import json\n",
+ "\n",
+ "load_dotenv(override=True)\n",
+ "try:\n",
+ " del os.environ[\"DEV_PREFIX\"]\n",
+ " del os.environ[\"DEV_ENV\"]\n",
+ "except:\n",
+ " pass\n",
+ "\n",
+ "sys.path.insert(0, os.path.join(os.curdir, \"../chat/src\"))\n",
+ "import core.secrets # noqa"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from IPython.display import display\n",
+ "from typing import Any, Dict, Optional\n",
+ "from langchain_core.callbacks import BaseCallbackHandler\n",
+ "from langchain_core.messages.tool import ToolMessage\n",
+ "from langchain_core.outputs import LLMResult\n",
+ "\n",
+ "\n",
+ "class DebugHandler(BaseCallbackHandler):\n",
+ " def on_llm_start(self, serialized: dict[str, Any], prompts: list[str], metadata: Optional[dict[str, Any]] = None, **kwargs: Dict[str, Any]):\n",
+ " print(\"on_llm_start:\")\n",
+ " display({\"serialized\": serialized, \"metadata\": metadata, \"kwargs\": kwargs})\n",
+ "\n",
+ " def on_llm_end(self, response: LLMResult, **kwargs: Dict[str, Any]):\n",
+ " print(\"on_llm_end:\")\n",
+ " display({\"response\": response, \"kwargs\": kwargs})\n",
+ "\n",
+ " def on_tool_start(self, serialized: Dict[str, Any], input_str: str, **kwargs: Dict[str, Any]):\n",
+ " print(\"on_tool_start:\")\n",
+ " display({\"serialized\": serialized, \"kwargs\": kwargs})\n",
+ "\n",
+ " def on_tool_end(self, output: ToolMessage, **kwargs: Dict[str, Any]):\n",
+ " print(\"on_tool_end:\")\n",
+ " display({\"output\": output, \"kwargs\": kwargs})"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import agent.search_agent\n",
+ "from agent.search_agent import SearchAgent\n",
+ "from core.setup import chat_model\n",
+ "\n",
+ "model = chat_model(model=\"us.anthropic.claude-3-5-sonnet-20241022-v2:0\", streaming=False)\n",
+ "agent = SearchAgent(model=model)\n",
+ "agent.invoke(\n",
+ " \"What works in the collection pertain to Iranian film?\",\n",
+ " ref=\"abc123\",\n",
+ " callbacks=[DebugHandler()],\n",
+ " forget=True,\n",
+ ")"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": ".venv",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.12.2"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/chat/README.md b/chat/README.md
index fd78e205..6f25d636 100644
--- a/chat/README.md
+++ b/chat/README.md
@@ -4,57 +4,220 @@
## Local development setup
-##### ⚠️ *All commands and instructions in this file assume that the current working directory is the `/chat` subdirectory of the `dc-api-v2` project.*
+Follow the instructions in the [main `README`](../README.md) to deploy or sync a development stack
+that includes the `CHAT` feature.
-### Link `samconfig.yaml`
-This only needs to be done once.
+### Websocket Communication
-1. Pull the `miscellany` repo.
-2. Link the development `samconfig.yaml` file
- ```bash
- ln -s /path/to/miscellany/dc-api-v2/chat/samconfig.yaml .
- ```
+The DC API uses websockets to enable real-time, bi-directional communication between the frontend application and the chat API. This is particularly important for streaming chat responses where messages need to be delivered incrementally.
-### Deploy a development stack
+#### Connection
-1. [Log into AWS](http://docs.rdc.library.northwestern.edu/2._Developer_Guides/Environment_and_Tools/AWS-Authentication/) using your `staging-admin` profile.
-2. Pick a unique stack name, e.g., `dc-api-chat-[YOUR_INITIALS]`
-3. Create or synchronize the development stack
- ```bash
- sam sync --watch --config-env dev --stack-name [STACK_NAME]
- ```
+The websocket endpoint is available at:
+```
+wss://[API_ENDPOINT]/chat
+```
+
+##### Client to Server Messages
+
+To initiate a chat conversation, send:
+```json
+{
+ "question": "Your question here",
+ "docs": ["work-id-1", "work-id-2"],
+ "auth": "jwt-token",
+ "stream_response": true,
+ "ref": "abc123"
+}
+```
+
+Additional optional parameters:
+- `forget`: boolean (default: false) - Start a new conversation
+- `model`: string - Specify the LLM model to use (superuser only)
+- `k`: number - Number of documents to retrieve (superuser only)
+- `temperature`: number - Model temperature (superuser only)
+
+##### Server to Client Messages
+
+#### Server Message Format
+
+Messages from the API on the server-side follow this general structure:
+
+```json
+{
+ "type": "string", // Type of message
+ "message": "string", // Content of the message
+ "ref": "string", // Reference ID for tracking conversation
+}
+```
+
+The server sends different types of messages:
+
+1. Start Message:
+```json
+{
+ "type": "start",
+ "message": {
+ "model": "model_name"
+ },
+ "ref": "conversation-id"
+}
+```
+
+2. Token Updates:
+```json
+{
+ "type": "token",
+ "message": "partial response text",
+ "ref": "conversation-id"
+}
+```
+
+3. Stop Message:
+```json
+{
+ "type": "stop",
+ "ref": "conversation-id"
+}
+```
+
+4. Answer Message:
+```json
+{
+ "type": "answer",
+ "message": "response content",
+ "ref": "conversation-id"
+}
+```
-The first time the `sam sync` command is run, it will build the development stack. This takes longer than it will on subsequent runs.
+5. Final Message:
+```json
+{
+ "type": "final_message",
+ "ref": "conversation-id"
+}
+```
-While the `sam sync` remains open, it will keep the development stack synchronized with any code changes you make. Each time you change a file, you'll need to wait for the output of that command to indicate that resource syncing is finished.
+6. Tool Start:
+```json
+{
+ "type": "tool_start",
+ "message": {
+ "tool": "tool_name",
+ "input": "tool input"
+ },
+ "ref": "conversation-id"
+}
+```
-The first time the stack is created, it will show you the stack's outputs, including the websocket URL to use for interacting with the chat backend, e.g.:
+7. Aggregation Result:
+```json
+{
+ "type": "aggregation_result",
+ "message": {
+ // example aggregation result object
+ "buckets": [
+ {
+ "key": "bucket-key",
+ "doc_count": 10
+ }
+ ],
+ "sum_other_doc_count": 34,
+ "doc_count_error_upper_bound": 0,
+ },
+ "ref": "conversation-id"
+}
```
--------------------------------------------------------------------------------------------------
-CloudFormation outputs from deployed stack
--------------------------------------------------------------------------------------------------
-Outputs
--------------------------------------------------------------------------------------------------
-Key WebSocketURI
-Description The WSS Protocol URI to connect to
-Value wss://nmom3hnp3c.execute-api.us-east-1.amazonaws.com/latest
--------------------------------------------------------------------------------------------------
+
+8. Search Result:
+```json
+{
+ "type": "search_result",
+ "message": [
+ {
+ "id": "work-id",
+ "title": "work title",
+ "visibility": "visibility status",
+ "work_type": "type",
+ "thumbnail": "thumbnail url"
+ }
+ ],
+ "ref": "conversation-id"
+}
```
-On subsequent sync runs, the outputs will not be displayed. If you need to retrieve the value again, you can run
-```bash
-sam list stack-outputs --stack-name [STACK_NAME]
+9. Final Completion:
+```json
+{
+ "type": "final",
+ "message": "Finished", // Hard-coded value for the message
+ "ref": "conversation-id"
+}
```
-To stop synchronizing changes, simply terminate the `sam sync` process with `Ctrl+C`.
+10. Error Messages:
+```json
+{
+ "type": "error",
+ "message": "error description",
+ "ref": "conversation-id"
+}
+```
+
+#### Error Handling
+
+- 401 Unauthorized: Returned when the authentication token is missing or invalid
+- 400 Bad Request: Returned when the question is blank or missing
+- Connection errors will emit an "error" type message through the websocket
+
+
+## Security and Authentication
-### Tear down the development stack
+The chat service uses JWT-based authentication inherited from the main DC API. Each WebSocket connection requires a valid JWT token to be provided in the connection payload.
-The development stack will remain up and active even after `sam sync` exits; it will simply not actively synchronize changes any more. To tear it down completely, you have to delete it yourself.
+### Token Requirements
+
+- Tokens must be signed with the shared API secret
+- Tokens contain user entitlements and authentication status
+- Standard tokens expire after 12 hours
+- Anonymous access is supported with limited capabilities
+
+### Security Features
+
+- Token validation occurs on every connection
+- User privileges are enforced via the `ApiToken` class
+- Advanced features like model selection and debug mode require superuser status
+- Temperature and context window size limits are enforced for non-superusers
+- All chat interactions are logged for auditing purposes
+
+### Environment Configuration
+
+The following security-related environment variables must be configured:
+
+```
+API_TOKEN_NAME - Name of the JWT cookie/header
+API_TOKEN_SECRET - Shared secret for validating JWTs
+```
-1. [Log into AWS](http://docs.rdc.library.northwestern.edu/2._Developer_Guides/Environment_and_Tools/AWS-Authentication/) using your `staging-admin` profile.
-2. Delete the development stack
- ```bash
- sam delete --stack-name [STACK_NAME]
- ```
+### Authorization Levels
+
+The chat service implements the following authorization levels:
+
+- **Unauthorized**: No access to chat functionality
+- **Authenticated Users** (config.is_logged_in=true):
+ - Basic chat functionality
+ - Default model settings
+ - Standard context window limits
+ - Fixed temperature settings
+- **Dev Team** (config.is_dev_team=true):
+ - Same access as authenticated users
+ - Flagged in metrics logs for filtering development traffic
+ - No additional feature permissions
+- **Superusers** (config.is_superuser=true):
+ - Custom prompts
+ - Model selection
+ - Debug mode
+ - Temperature control
+ - Unrestricted context window
+ - Ability to override system defaults
diff --git a/chat/dependencies/requirements.txt b/chat/dependencies/requirements.txt
index 6c4a743b..cfaf688b 100644
--- a/chat/dependencies/requirements.txt
+++ b/chat/dependencies/requirements.txt
@@ -1,8 +1,9 @@
boto3~=1.34
honeybadger
langchain~=0.2
-langchain-aws~=0.1
+langchain-aws~=0.2
langchain-openai~=0.1
+langgraph~=0.2
openai~=1.35
opensearch-py
pyjwt~=2.6.0
diff --git a/chat/pytest.ini b/chat/pytest.ini
new file mode 100644
index 00000000..1ca84343
--- /dev/null
+++ b/chat/pytest.ini
@@ -0,0 +1,4 @@
+[pytest]
+addopts = -m "not slow"
+markers =
+ slow: marks tests as slow (deselect with '-m "not slow"')
\ No newline at end of file
diff --git a/chat/src/handlers/__init__.py b/chat/src/agent/__init__.py
similarity index 100%
rename from chat/src/handlers/__init__.py
rename to chat/src/agent/__init__.py
diff --git a/chat/src/helpers/__init__.py b/chat/src/agent/callbacks/__init__.py
similarity index 100%
rename from chat/src/helpers/__init__.py
rename to chat/src/agent/callbacks/__init__.py
diff --git a/chat/src/agent/callbacks/metrics.py b/chat/src/agent/callbacks/metrics.py
new file mode 100644
index 00000000..589e4308
--- /dev/null
+++ b/chat/src/agent/callbacks/metrics.py
@@ -0,0 +1,109 @@
+from datetime import datetime
+from typing import Any, Dict
+from langchain_core.callbacks import BaseCallbackHandler
+from langchain_core.outputs import LLMResult
+from langchain_core.messages.tool import ToolMessage
+import boto3
+import json
+import os
+
+class MetricsCallbackHandler(BaseCallbackHandler):
+ def __init__(self, log_stream = None, *args, extra_data = {}, **kwargs):
+ self.accumulator = {}
+ self.answers = []
+ self.artifacts = []
+ self.log_stream = log_stream
+ self.extra_data = extra_data
+ super().__init__(*args, **kwargs)
+
+ def on_llm_end(self, response: LLMResult, **kwargs: Dict[str, Any]):
+ if response is None:
+ return
+
+ if not response.generations or not response.generations[0]:
+ return
+
+ for generation in response.generations[0]:
+ if generation.text != "":
+ self.answers.append(generation.text)
+
+ if not hasattr(generation, "message") or generation.message is None:
+ continue
+
+ metadata = getattr(generation.message, "usage_metadata", None)
+ if metadata is None:
+ continue
+
+ for k, v in metadata.items():
+ self.accumulator[k] = self.accumulator.get(k, 0) + v
+
+ def on_tool_end(self, output: ToolMessage, **kwargs: Dict[str, Any]):
+ content = output.content
+ if isinstance(content, str):
+ try:
+ content = json.loads(content)
+ except json.decoder.JSONDecodeError as e:
+ print(
+ f"Invalid json ({e}) returned from {output.name} tool: {output.content}"
+ )
+ return
+
+ match output.name:
+ case "aggregate":
+ self.artifacts.append(
+ {
+ "type": "aggregation",
+ "artifact": content.get("aggregation_result", {}),
+ }
+ )
+ case "search":
+ source_urls = [doc.get("api_link") for doc in content]
+ self.artifacts.append({"type": "source_urls", "artifact": source_urls})
+ case "summarize":
+ print(output)
+
+ def log_metrics(self):
+ if self.log_stream is None:
+ return
+
+ log_group = os.getenv("METRICS_LOG_GROUP")
+ if log_group and ensure_log_stream_exists(log_group, self.log_stream):
+ client = log_client()
+ message = {
+ "answer": self.answers,
+ "artifacts": self.artifacts,
+ "token_counts": self.accumulator,
+ }
+ message.update(self.extra_data)
+
+ log_events = [
+ {
+ "timestamp": timestamp(),
+ "message": json.dumps(message),
+ }
+ ]
+ client.put_log_events(
+ logGroupName=log_group, logStreamName=self.log_stream, logEvents=log_events
+ )
+
+
+def ensure_log_stream_exists(log_group, log_stream):
+ client = log_client()
+ try:
+ print(
+ client.create_log_stream(logGroupName=log_group, logStreamName=log_stream)
+ )
+ return True
+ except client.exceptions.ResourceAlreadyExistsException:
+ return True
+ except Exception:
+ print(f"Could not create log stream: {log_group}:{log_stream}")
+ return False
+
+
+def log_client():
+ return boto3.client("logs", region_name=os.getenv("AWS_REGION", "us-east-1"))
+
+
+def timestamp():
+ return round(datetime.timestamp(datetime.now()) * 1000)
diff --git a/chat/src/agent/callbacks/socket.py b/chat/src/agent/callbacks/socket.py
new file mode 100644
index 00000000..8744be15
--- /dev/null
+++ b/chat/src/agent/callbacks/socket.py
@@ -0,0 +1,74 @@
+from typing import Any, Dict, List, Optional
+
+from core.websocket import Websocket
+
+from json.decoder import JSONDecodeError
+from langchain_core.callbacks import BaseCallbackHandler
+from langchain_core.messages.tool import ToolMessage
+from langchain_core.outputs import LLMResult
+
+import ast
+import json
+
+def deserialize_input(input_str):
+ try:
+ return ast.literal_eval(input_str)
+ except (ValueError, SyntaxError):
+ try:
+ return json.loads(input_str)
+ except JSONDecodeError:
+ return input_str
+
+class SocketCallbackHandler(BaseCallbackHandler):
+ def __init__(self, socket: Websocket, ref: str, *args: List[Any], **kwargs: Dict[str, Any]):
+ if socket is None:
+ raise ValueError("Socket not provided to agent callback handler")
+ self.socket = socket
+ self.ref = ref
+ super().__init__(*args, **kwargs)
+
+ def on_llm_start(self, serialized: dict[str, Any], prompts: list[str], metadata: Optional[dict[str, Any]] = None, **kwargs: Dict[str, Any]):
+ self.socket.send({"type": "start", "ref": self.ref, "message": {"model": metadata.get("ls_model_name")}})
+
+ def on_llm_end(self, response: LLMResult, **kwargs: Dict[str, Any]):
+ response_generation = response.generations[0][0]
+ content = response_generation.text
+ stop_reason = response_generation.message.response_metadata.get("stop_reason", "unknown")
+ if content != "":
+ self.socket.send({"type": "stop", "ref": self.ref})
+ self.socket.send({"type": "answer", "ref": self.ref, "message": content})
+ if stop_reason == "end_turn":
+ self.socket.send({"type": "final_message", "ref": self.ref})
+
+
+ def on_llm_new_token(self, token: str, **kwargs: Dict[str, Any]):
+ if token != "":
+ self.socket.send({"type": "token", "ref": self.ref, "message": token})
+
+ def on_tool_start(self, serialized: Dict[str, Any], input_str: str, **kwargs: Dict[str, Any]) -> Any:
+ input = deserialize_input(input_str)
+ self.socket.send({"type": "tool_start", "ref": self.ref, "message": {"tool": serialized.get("name"), "input": input}})
+
+ def on_tool_end(self, output: ToolMessage, **kwargs: Dict[str, Any]):
+ content = output.content
+ if isinstance(content, str):
+ content = json.loads(content)
+
+ match output.name:
+ case "aggregate":
+ self.socket.send({"type": "aggregation_result", "ref": self.ref, "message": content.get('aggregation_result', {})})
+ case "discover_fields":
+ pass
+ case "search":
+ result_fields = ("id", "title", "visibility", "work_type", "thumbnail")
+ docs: List[Dict[str, Any]] = [{k: doc.get(k) for k in result_fields} for doc in content]
+ self.socket.send({"type": "search_result", "ref": self.ref, "message": docs})
+ case "retrieve_documents":
+ result_fields = ("id", "title", "visibility", "work_type", "thumbnail")
+ docs: List[Dict[str, Any]] = [{k: doc.get(k) for k in result_fields} for doc in content]
+ self.socket.send({"type": "retrieved_documents", "ref": self.ref, "message": docs})
+ case _:
+ print(f"Unhandled tool_end message: {output}")
+
+ def on_agent_finish(self, finish, **kwargs):
+ self.socket.send({"type": "final", "ref": self.ref, "message": "Finished"})
diff --git a/chat/src/agent/search_agent.py b/chat/src/agent/search_agent.py
new file mode 100644
index 00000000..ac3df688
--- /dev/null
+++ b/chat/src/agent/search_agent.py
@@ -0,0 +1,179 @@
+import json
+from typing import Literal, List
+from langchain_core.messages import HumanMessage, ToolMessage
+from agent.tools import aggregate, discover_fields, search, retrieve_documents
+from langchain_core.messages.base import BaseMessage
+from langchain_core.language_models.chat_models import BaseModel
+from langchain_core.callbacks import BaseCallbackHandler
+from langchain_core.messages.system import SystemMessage
+from langgraph.graph import END, START, StateGraph, MessagesState
+from langgraph.prebuilt import ToolNode
+from langgraph.errors import GraphRecursionError
+from core.document import minimize_documents
+from core.setup import checkpoint_saver
+from agent.callbacks.socket import SocketCallbackHandler
+from typing import Optional
+import time
+
+DEFAULT_SYSTEM_MESSAGE = """
+Please provide a brief answer to the question using the tools provided. Include specific details from multiple documents that
+support your answer. Answer in raw markdown, but not within a code block. When citing source documents, construct Markdown
+links using the document's canonical_link field. Do not include intermediate messages explaining your process. If the user's
+question is unclear, ask for clarification. Use no more than 6 tool calls. If you still cannot answer the question after 6
+tool calls, summarize the information you have gathered so far and suggest ways in which the user might narrow the scope
+of their question to make it more answerable.
+"""
+
+MAX_RECURSION_LIMIT = 16
+
+class SearchWorkflow:
+ def __init__(self, model: BaseModel, system_message: str, metrics = None):
+ self.metrics = metrics
+ self.model = model
+ self.system_message = system_message
+
+ def should_continue(self, state: MessagesState) -> Literal["tools", END]:
+ messages = state["messages"]
+ last_message = messages[-1]
+ # If the LLM makes a tool call, then we route to the "tools" node
+ if last_message.tool_calls:
+ return "tools"
+ # Otherwise, we stop (reply to the user)
+ return END
+
+ def summarize(self, state: MessagesState):
+ messages = state["messages"]
+ last_message = messages[-1]
+ if last_message.name not in ["search", "retrieve_documents"]:
+ return {"messages": messages}
+
+ start_time = time.time()
+ content = minimize_documents(json.loads(last_message.content))
+ content = json.dumps(content, separators=(',', ':'))
+ end_time = time.time()
+ elapsed_time = end_time - start_time
+ print(f'Condensed {len(last_message.content)} bytes to {len(content)} bytes in {elapsed_time:.2f} seconds. Savings: {100 * (1 - len(content) / len(last_message.content)):.2f}%')
+
+ last_message.content = content
+
+ return {"messages": messages}
+
+ def call_model(self, state: MessagesState):
+ messages = [SystemMessage(content=self.system_message)] + state["messages"]
+ response: BaseMessage = self.model.invoke(messages)
+ # We return a list, because this will get added to the existing list
+ return {"messages": [response]}
+
+class SearchAgent:
+ def __init__(
+ self,
+ model: BaseModel,
+ *,
+ metrics = None,
+ system_message: str = DEFAULT_SYSTEM_MESSAGE,
+ **kwargs
+ ):
+ tools = [discover_fields, search, aggregate, retrieve_documents]
+ tool_node = ToolNode(tools)
+
+ try:
+ model = model.bind_tools(tools)
+ except NotImplementedError:
+ pass
+
+ self.workflow_logic = SearchWorkflow(model=model, system_message=system_message, metrics=metrics)
+
+ # Define a new graph
+ workflow = StateGraph(MessagesState)
+
+ # Define the two nodes we will cycle between
+ workflow.add_node("agent", self.workflow_logic.call_model)
+ workflow.add_node("tools", tool_node)
+ workflow.add_node("summarize", self.workflow_logic.summarize)
+
+ # Set the entrypoint as `agent`
+ workflow.add_edge(START, "agent")
+
+ # Add a conditional edge
+ workflow.add_conditional_edges("agent", self.workflow_logic.should_continue)
+
+ # Add a normal edge from `tools` to `agent`
+ #workflow.add_edge("tools", "agent")
+ workflow.add_edge("tools", "summarize")
+ workflow.add_edge("summarize", "agent")
+
+ self.checkpointer = checkpoint_saver()
+ self.search_agent = workflow.compile(checkpointer=self.checkpointer)
+
+ def invoke(self, question: str, ref: str, *, docs: Optional[List[str]] = None,
+callbacks: List[BaseCallbackHandler] = [], forget: bool = False, **kwargs):
+ if forget:
+ self.checkpointer.delete_checkpoints(ref)
+
+ # If documents are provided, skip the search tools and use these docs directly
+ if docs and len(docs) > 0:
+ # Limit to 20 documents
+ docs = docs[:20]
+ # Pass documents in as context for the model
+ doc_lines = [str(doc) for doc in docs]
+ return self.search_agent.invoke(
+ {"messages": [HumanMessage(content=question + "\n" + "\n".join(doc_lines))]},
+ config={
+ "configurable": {"thread_id": ref},
+ "callbacks": callbacks},
+ **kwargs
+ )
+ else:
+ try:
+ return self.search_agent.invoke(
+ {"messages": [HumanMessage(content=question)]},
+ config={
+ "configurable": {"thread_id": ref},
+ "callbacks": callbacks,
+ "recursion_limit": MAX_RECURSION_LIMIT,
+ },
+ **kwargs
+ )
+ except GraphRecursionError as e:
+ print(f"Recursion error: {e}")
+
+ # Retrieve the messages processed so far
+ checkpoint_tuple = self.checkpointer.get_tuple({"configurable": {"thread_id": ref}})
+ state = checkpoint_tuple.checkpoint if checkpoint_tuple else None
+ messages = state.get("channel_values", {}).get("messages", []) if state else []
+
+ # Extract relevant responses including tool outputs
+ responses = []
+ for msg in messages:
+ if isinstance(msg, (BaseMessage, ToolMessage)):
+ responses.append(msg.content)
+
+ if responses:
+ # Summarize the responses so far
+ summary_prompt = f"""
+ The following is what I have discovered so far based on multiple sources.
+ Summarize the key points concisely for the user:
+
+ {responses[-5:]} # Take the last few responses
+ """
+
+ # Generate a summary using the LLM
+ summary = self.workflow_logic.model.invoke([HumanMessage(content=summary_prompt)])
+ summary_text = summary.content
+
+ # Send summary as an "answer" message before finalizing
+ for cb in callbacks:
+ if isinstance(cb, SocketCallbackHandler):
+ cb.socket.send({"type": "answer", "ref": ref, "message": summary_text})
+
+ else:
+ # Send a fallback message
+ fallback_message = "I reached my recursion limit but couldn't retrieve enough useful information."
+ for cb in callbacks:
+ if isinstance(cb, SocketCallbackHandler):
+ cb.socket.send({"type": "answer", "ref": ref, "message": fallback_message})
+
+ for cb in callbacks:
+ if hasattr(cb, "on_agent_finish"):
+ cb.on_agent_finish(finish=None, run_id=ref, **kwargs)
+ return {"type": "final", "ref": ref, "message": "Finished"}
diff --git a/chat/src/agent/tools.py b/chat/src/agent/tools.py
new file mode 100644
index 00000000..4e5de6ff
--- /dev/null
+++ b/chat/src/agent/tools.py
@@ -0,0 +1,102 @@
+import json
+
+from langchain_core.tools import tool
+from core.setup import opensearch_vector_store
+from typing import List
+
+def get_keyword_fields(properties, prefix=''):
+ """
+ Filters a nested list of opensearch mappings and returns a flat list of keyword fields
+ """
+ keyword_fields = []
+ for field_name, field_mapping in properties.items():
+ current_path = f"{prefix}{field_name}"
+ if field_mapping.get('type') == 'keyword':
+ keyword_fields.append(current_path)
+ if 'fields' in field_mapping:
+ for subfield_name, subfield_mapping in field_mapping['fields'].items():
+ if subfield_mapping.get('type') == 'keyword':
+ keyword_fields.append(f"{current_path}.{subfield_name}")
+ if 'properties' in field_mapping:
+ nested_properties = field_mapping['properties']
+ keyword_fields.extend(get_keyword_fields(nested_properties, prefix=current_path + '.'))
+ return keyword_fields
+
+def filter_results(results):
+ """
+ Filters out the embeddings from the results
+ """
+ filtered = []
+ for result in results:
+ doc = result.metadata
+ if 'embedding' in doc:
+ doc.pop('embedding')
+ filtered.append(doc)
+ return filtered
+
+@tool(response_format="content")
+def discover_fields():
+ """
+ Discover the fields available in the OpenSearch index. This tool is useful for understanding the structure of the index and the fields available for aggregation queries.
+ """
+ # filter fields that are not useful for aggregation (only include keyword fields)
+ opensearch = opensearch_vector_store()
+ fields = opensearch.client.indices.get_mapping(index=opensearch.index)
+ top_properties = list(fields.values())[0]['mappings']['properties']
+ result = get_keyword_fields(top_properties)
+ return result
+
+@tool(response_format="content")
+def search(query: str):
+ """Perform a semantic search of Northwestern University Library digital collections. When answering a search query, ground your answer in the context of the results with references to the document's metadata."""
+ query_results = opensearch_vector_store().similarity_search(query, size=20)
+ return filter_results(query_results)
+
+@tool(response_format="content")
+def aggregate(agg_field: str, term_field: str, term: str):
+ """
+ Perform a quantitative aggregation on the OpenSearch index. Use this tool for quantitative questions like "How many...?" or "What are the most common...?"
+
+ Args:
+ agg_field (str): The field to aggregate on.
+ term_field (str): The field to filter on.
+ term (str): The term to filter on.
+
+ Leave term_field and term empty to aggregate across the entire index.
+
+ Available fields:
+ You must use the discover_fields tool first to obtain the list of appropriate fields for aggregration in the index.
+
+ Do not use any fields that do not exist in the list returned by discover_fields!
+
+ See sum_other_doc_count to get the total count of documents, even if the aggregation is limited by size.
+ """
+ try:
+ response = opensearch_vector_store().aggregations_search(agg_field, term_field, term)
+ return response
+ except Exception as e:
+ return json.dumps({"error": str(e)})
+
+@tool(response_format="content")
+def retrieve_documents(doc_ids: List[str]):
+ """
+ Retrieve documents from the OpenSearch index based on a list of document IDs.
+
+ Use this instead of the search tool if the user has provided docs for context
+ and you need the full metadata, or if you're working with output from another
+ tool that only contains document IDs.
+ Provide an answer to their question based on the metadata of the documents.
+
+
+ Args:
+ doc_ids (List[str]): A list of document IDs to fetch.
+
+ Returns:
+ A JSON list of documents that match the given IDs.
+ """
+
+ try:
+ response = opensearch_vector_store().retrieve_documents(doc_ids)
+ return filter_results(response)
+ except Exception as e:
+ return {"error": str(e)}
diff --git a/chat/src/content_handler.py b/chat/src/content_handler.py
deleted file mode 100644
index b75f98b9..00000000
--- a/chat/src/content_handler.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import json
-from typing import Dict, List
-from langchain_community.embeddings.sagemaker_endpoint import EmbeddingsContentHandler
-
-class ContentHandler(EmbeddingsContentHandler):
- content_type = "application/json"
- accepts = "application/json"
-
- def transform_input(self, inputs: list[str], model_kwargs: Dict) -> bytes:
- """
- Transforms the input into bytes that can be consumed by SageMaker endpoint.
- Args:
- inputs: List of input strings.
- model_kwargs: Additional keyword arguments to be passed to the endpoint.
- Returns:
- The transformed bytes input.
- """
- # Example: inference.py expects a JSON string with a "inputs" key:
- input_str = json.dumps({"inputs": inputs, **model_kwargs})
- return input_str.encode("utf-8")
-
- def transform_output(self, output: bytes) -> List[List[float]]:
- """
- Transforms the bytes output from the endpoint into a list of embeddings.
- Args:
- output: The bytes output from SageMaker endpoint.
- Returns:
- The transformed output - list of embeddings
- Note:
- The length of the outer list is the number of input strings.
- The length of the inner lists is the embedding dimension.
- """
- # Example: inference.py returns a JSON string with the list of
- # embeddings in a "vectors" key:
- response_json = json.loads(output.read().decode("utf-8"))
- return [response_json["embedding"]]
\ No newline at end of file
diff --git a/chat/test/helpers/__init__.py b/chat/src/core/__init__.py
similarity index 100%
rename from chat/test/helpers/__init__.py
rename to chat/src/core/__init__.py
diff --git a/chat/src/helpers/apitoken.py b/chat/src/core/apitoken.py
similarity index 100%
rename from chat/src/helpers/apitoken.py
rename to chat/src/core/apitoken.py
diff --git a/chat/src/core/document.py b/chat/src/core/document.py
new file mode 100644
index 00000000..486f3aa3
--- /dev/null
+++ b/chat/src/core/document.py
@@ -0,0 +1,47 @@
+def minimize_documents(docs):
+ return [minimize_document(doc) for doc in docs]
+
+def minimize_document(doc):
+ return {
+ 'id': doc.get('id'),
+ 'title': minimize(doc.get('title')),
+ 'alternate_title': minimize(doc.get('alternate_title')),
+ 'description': minimize(doc.get('description')),
+ 'abstract': minimize(doc.get('abstract')),
+ 'subject': labels_only(doc.get('subject')),
+ 'date_created': minimize(doc.get('date_created')),
+ 'provenance': minimize(doc.get('provenance')),
+ 'collection': minimize(doc.get('collection', {}).get('title')),
+ 'creator': labels_only(doc.get('creator')),
+ 'contributor': labels_only(doc.get('contributor')),
+
+ 'work_type': minimize(doc.get('work_type')),
+ 'genre': labels_only(doc.get('genre')),
+ 'scope_and_contents': minimize(doc.get('scope_and_contents')),
+ 'table_of_contents': minimize(doc.get('table_of_contents')),
+ 'cultural_context': minimize(doc.get('cultural_context')),
+ 'notes': minimize(doc.get('notes')),
+ 'keywords': minimize(doc.get('keywords')),
+ 'visibility': minimize(doc.get('visibility')),
+ 'canonical_link': minimize(doc.get('canonical_link')),
+
+ 'rights_statement': label_only(doc.get('rights_statement')),
+ }
+
+def labels_only(list_of_fields):
+ return minimize([label_only(field) for field in list_of_fields])
+
+def label_only(field):
+ if field is None:
+ return None
+ return field.get('label_with_role', field.get('label', None))
+
+def minimize(field):
+ try:
+ if field is None:
+ return None
+ if len(field) == 0:
+ return None
+ return field
+ except TypeError:
+ return field
diff --git a/chat/src/core/event_config.py b/chat/src/core/event_config.py
new file mode 100644
index 00000000..23a223e5
--- /dev/null
+++ b/chat/src/core/event_config.py
@@ -0,0 +1,111 @@
+import json
+
+from dataclasses import dataclass, field
+
+from langchain_core.prompts import ChatPromptTemplate
+
+from core.apitoken import ApiToken
+from core.prompts import prompt_template
+from core.websocket import Websocket
+from uuid import uuid4
+from typing import Optional, List
+
+CHAIN_TYPE = "stuff"
+DOCUMENT_VARIABLE_NAME = "context"
+K_VALUE = 40
+MAX_K = 100
+MAX_TOKENS = 1000
+SIZE = 20
+TEMPERATURE = 0.2
+TEXT_KEY = "id"
+VERSION = "2024-02-01"
+
+
+@dataclass
+class EventConfig:
+ """
+ The EventConfig class represents the configuration for an event.
+ Default values are set for the following properties which can be overridden in the payload message.
+ """
+
+ api_token: ApiToken = field(init=False)
+ debug_mode: bool = field(init=False)
+ docs: Optional[List[str]] = field(init=False, default=None)
+ event: dict = field(default_factory=dict)
+ forget: bool = field(init=False)
+ is_dev_team: bool = field(init=False)
+ is_logged_in: bool = field(init=False)
+ is_superuser: bool = field(init=False)
+ k: int = field(init=False)
+ max_tokens: int = field(init=False)
+ model: str = field(init=False)
+ payload: dict = field(default_factory=dict)
+ prompt_text: str = field(init=False)
+ prompt: ChatPromptTemplate = field(init=False)
+ question: str = field(init=False)
+ ref: str = field(init=False)
+ request_context: dict = field(init=False)
+ temperature: float = field(init=False)
+ size: int = field(init=False)
+ socket: Websocket = field(init=False, default=None)
+ stream_response: bool = field(init=False)
+ text_key: str = field(init=False)
+
+ def __post_init__(self):
+ self.payload = json.loads(self.event.get("body", "{}"))
+ self.api_token = ApiToken(signed_token=self.payload.get("auth"))
+ self.debug_mode = self._is_debug_mode_enabled()
+ self.docs = self.payload.get("docs", None)
+ self.forget = self.payload.get("forget", False)
+ self.is_dev_team = self.api_token.is_dev_team()
+ self.is_logged_in = self.api_token.is_logged_in()
+ self.is_superuser = self.api_token.is_superuser()
+ self.k = self._get_k()
+ self.max_tokens = min(self.payload.get("max_tokens", MAX_TOKENS), MAX_TOKENS)
+ self.model = self._get_payload_value_with_superuser_check("model", "us.anthropic.claude-3-5-sonnet-20241022-v2:0")
+ self.prompt_text = self._get_prompt_text()
+ self.request_context = self.event.get("requestContext", {})
+ self.question = self.payload.get("question")
+ self.ref = self.payload.get("ref", uuid4().hex)
+ self.size = self._get_size()
+ self.stream_response = self.payload.get("stream_response", not self.debug_mode)
+ self.temperature = self._get_temperature()
+ self.text_key = self._get_text_key()
+ self.prompt = ChatPromptTemplate.from_template(self.prompt_text)
+
+ def _get_payload_value_with_superuser_check(self, key, default):
+ if self.api_token.is_superuser():
+ return self.payload.get(key, default)
+ else:
+ return default
+
+ def _get_k(self):
+ value = self._get_payload_value_with_superuser_check("k", K_VALUE)
+ return min(value, MAX_K)
+
+ def _get_prompt_text(self):
+ return self._get_payload_value_with_superuser_check("prompt", prompt_template())
+
+ def _get_size(self):
+ return self._get_payload_value_with_superuser_check("size", SIZE)
+
+ def _get_temperature(self):
+ return self._get_payload_value_with_superuser_check("temperature", TEMPERATURE)
+
+ def _get_text_key(self):
+ return self._get_payload_value_with_superuser_check("text_key", TEXT_KEY)
+
+ def setup_websocket(self, socket=None):
+ if socket is None:
+ connection_id = self.request_context.get("connectionId")
+ endpoint_url = f'https://{self.request_context.get("domainName")}/{self.request_context.get("stage")}'
+ self.socket = Websocket(
+ endpoint_url=endpoint_url, connection_id=connection_id, ref=self.ref
+ )
+ else:
+ self.socket = socket
+ return self.socket
+
+ def _is_debug_mode_enabled(self):
+ debug = self.payload.get("debug", False)
+ return debug and self.api_token.is_superuser()
diff --git a/chat/src/helpers/prompts.py b/chat/src/core/prompts.py
similarity index 100%
rename from chat/src/helpers/prompts.py
rename to chat/src/core/prompts.py
diff --git a/chat/src/secrets.py b/chat/src/core/secrets.py
similarity index 61%
rename from chat/src/secrets.py
rename to chat/src/core/secrets.py
index 8f69d816..83a344c6 100644
--- a/chat/src/secrets.py
+++ b/chat/src/core/secrets.py
@@ -4,18 +4,16 @@
def load_secrets():
SecretsPath = os.getenv('SECRETS_PATH')
+ ApiConfigPrefix = os.getenv('API_CONFIG_PREFIX') or SecretsPath
EnvironmentMap = [
['API_TOKEN_SECRET', 'dcapi', 'api_token_secret'],
['OPENSEARCH_ENDPOINT', 'index', 'endpoint'],
- ['OPENSEARCH_MODEL_ID', 'index', 'embedding_model'],
- ['AZURE_OPENAI_API_KEY', 'azure_openai', 'api_key'],
- ['AZURE_OPENAI_LLM_DEPLOYMENT_ID', 'azure_openai', 'llm_deployment_id'],
- ['AZURE_OPENAI_RESOURCE_NAME', 'azure_openai', 'resource_name']
+ ['OPENSEARCH_MODEL_ID', 'index', 'embedding_model']
]
- client = boto3.client("secretsmanager")
+ client = boto3.client("secretsmanager", region_name=os.getenv('AWS_REGION', 'us-east-1'))
response = client.batch_get_secret_value(SecretIdList=[
- f'{SecretsPath}/config/dcapi',
+ f'{ApiConfigPrefix}/config/dcapi',
f'{SecretsPath}/infrastructure/index',
f'{SecretsPath}/infrastructure/azure_openai'
])
@@ -32,7 +30,3 @@ def load_secrets():
if var not in os.environ and value is not None:
os.environ[var] = value
- os.environ['__SKIP_SECRETS__'] = 'true'
-
-if not os.getenv('__SKIP_SECRETS__'):
- load_secrets()
diff --git a/chat/src/core/setup.py b/chat/src/core/setup.py
new file mode 100644
index 00000000..7f1f4e07
--- /dev/null
+++ b/chat/src/core/setup.py
@@ -0,0 +1,78 @@
+from persistence.selective_checkpointer import SelectiveCheckpointer
+from search.opensearch_neural_search import OpenSearchNeuralSearch
+from langchain_aws import ChatBedrock
+from langchain_core.language_models.base import BaseModel
+from langgraph.checkpoint.base import BaseCheckpointSaver
+from opensearchpy import OpenSearch, RequestsHttpConnection
+from requests_aws4auth import AWS4Auth
+from urllib.parse import urlparse
+import os
+import boto3
+
+def chat_model(**kwargs) -> BaseModel:
+ return ChatBedrock(**kwargs)
+
+def checkpoint_saver(**kwargs) -> BaseCheckpointSaver:
+ checkpoint_bucket: str = os.getenv("CHECKPOINT_BUCKET_NAME")
+ return SelectiveCheckpointer(bucket_name=checkpoint_bucket, retain_history=False, **kwargs)
+
+def prefix(value):
+ env_prefix = os.getenv("ENV_PREFIX")
+ env_prefix = None if env_prefix == "" else env_prefix
+ return "-".join(filter(None, [env_prefix, value]))
+
+
+def opensearch_endpoint():
+ endpoint = os.getenv("OPENSEARCH_ENDPOINT")
+ parsed = urlparse(endpoint)
+ if parsed.netloc != "":
+ return parsed.netloc
+ else:
+ return endpoint
+
+
+def opensearch_client(region_name=None):
+ region_name = region_name or os.getenv("AWS_REGION") # Evaluate at runtime
+ session = boto3.Session(region_name=region_name)
+ awsauth = AWS4Auth(
+ region=region_name,
+ service="es",
+ refreshable_credentials=session.get_credentials(),
+ )
+ endpoint = opensearch_endpoint()
+
+ return OpenSearch(
+ hosts=[{"host": endpoint, "port": 443}],
+ use_ssl=True,
+ connection_class=RequestsHttpConnection,
+ http_auth=awsauth,
+ )
+
+
+def opensearch_vector_store(region_name=None):
+ region_name = region_name or os.getenv("AWS_REGION") # Evaluate at runtime
+ session = boto3.Session(region_name=region_name)
+ awsauth = AWS4Auth(
+ region=region_name,
+ service="es",
+ refreshable_credentials=session.get_credentials(),
+ )
+
+ docsearch = OpenSearchNeuralSearch(
+ index=prefix("dc-v2-work"),
+ model_id=os.getenv("OPENSEARCH_MODEL_ID"),
+ endpoint=opensearch_endpoint(),
+ connection_class=RequestsHttpConnection,
+ http_auth=awsauth,
+ text_field="id",
+ )
+ return docsearch
+
+
+def websocket_client(endpoint_url: str):
+ endpoint_url = endpoint_url or os.getenv("APIGATEWAY_URL")
+ try:
+ client = boto3.client("apigatewaymanagementapi", endpoint_url=endpoint_url)
+ return client
+ except Exception as e:
+ raise e
diff --git a/chat/src/websocket.py b/chat/src/core/websocket.py
similarity index 78%
rename from chat/src/websocket.py
rename to chat/src/core/websocket.py
index ea682b0a..3864d2f8 100644
--- a/chat/src/websocket.py
+++ b/chat/src/core/websocket.py
@@ -1,5 +1,5 @@
import json
-from setup import websocket_client
+from core.setup import websocket_client
class Websocket:
def __init__(self, client=None, endpoint_url=None, connection_id=None, ref=None):
@@ -18,3 +18,9 @@ def send(self, data):
else:
self.client.post_to_connection(Data=data_as_bytes, ConnectionId=self.connection_id)
return data
+
+ def __str__(self):
+ return f"Websocket({self.connection_id}, {self.ref})"
+
+ def __repr__(self):
+ return str(self)
diff --git a/chat/src/event_config.py b/chat/src/event_config.py
deleted file mode 100644
index 28e09348..00000000
--- a/chat/src/event_config.py
+++ /dev/null
@@ -1,208 +0,0 @@
-import os
-import json
-
-from dataclasses import dataclass, field
-
-from langchain_core.prompts import ChatPromptTemplate
-from setup import (
- opensearch_client,
- opensearch_vector_store,
- openai_chat_client,
-)
-from typing import List
-from handlers.streaming_socket_callback_handler import StreamingSocketCallbackHandler
-from helpers.apitoken import ApiToken
-from helpers.prompts import document_template, prompt_template
-from websocket import Websocket
-
-CHAIN_TYPE = "stuff"
-DOCUMENT_VARIABLE_NAME = "context"
-K_VALUE = 40
-MAX_K = 100
-MAX_TOKENS = 1000
-SIZE = 20
-TEMPERATURE = 0.2
-TEXT_KEY = "id"
-VERSION = "2024-02-01"
-
-@dataclass
-class EventConfig:
- """
- The EventConfig class represents the configuration for an event.
- Default values are set for the following properties which can be overridden in the payload message.
- """
-
- DEFAULT_ATTRIBUTES = ["accession_number", "alternate_title", "api_link", "canonical_link", "caption", "collection",
- "contributor", "date_created", "date_created_edtf", "description", "genre", "id", "identifier",
- "keywords", "language", "notes", "physical_description_material", "physical_description_size",
- "provenance", "publisher", "rights_statement", "subject", "table_of_contents", "thumbnail",
- "title", "visibility", "work_type"]
-
- api_token: ApiToken = field(init=False)
- attributes: List[str] = field(init=False)
- azure_endpoint: str = field(init=False)
- azure_resource_name: str = field(init=False)
- debug_mode: bool = field(init=False)
- deployment_name: str = field(init=False)
- document_prompt: ChatPromptTemplate = field(init=False)
- event: dict = field(default_factory=dict)
- is_dev_team: bool = field(init=False)
- is_logged_in: bool = field(init=False)
- is_superuser: bool = field(init=False)
- k: int = field(init=False)
- max_tokens: int = field(init=False)
- openai_api_version: str = field(init=False)
- payload: dict = field(default_factory=dict)
- prompt_text: str = field(init=False)
- prompt: ChatPromptTemplate = field(init=False)
- question: str = field(init=False)
- ref: str = field(init=False)
- request_context: dict = field(init=False)
- temperature: float = field(init=False)
- size: int = field(init=False)
- socket: Websocket = field(init=False, default=None)
- stream_response: bool = field(init=False)
- text_key: str = field(init=False)
-
- def __post_init__(self):
- self.payload = json.loads(self.event.get("body", "{}"))
- self.api_token = ApiToken(signed_token=self.payload.get("auth"))
- self.attributes = self._get_attributes()
- self.azure_endpoint = self._get_azure_endpoint()
- self.azure_resource_name = self._get_azure_resource_name()
- self.debug_mode = self._is_debug_mode_enabled()
- self.deployment_name = self._get_deployment_name()
- self.is_dev_team = self.api_token.is_dev_team()
- self.is_logged_in = self.api_token.is_logged_in()
- self.is_superuser = self.api_token.is_superuser()
- self.k = self._get_k()
- self.max_tokens = min(self.payload.get("max_tokens", MAX_TOKENS), MAX_TOKENS)
- self.openai_api_version = self._get_openai_api_version()
- self.prompt_text = self._get_prompt_text()
- self.request_context = self.event.get("requestContext", {})
- self.question = self.payload.get("question")
- self.ref = self.payload.get("ref")
- self.size = self._get_size()
- self.stream_response = self.payload.get("stream_response", not self.debug_mode)
- self.temperature = self._get_temperature()
- self.text_key = self._get_text_key()
- self.document_prompt = self._get_document_prompt()
- self.prompt = ChatPromptTemplate.from_template(self.prompt_text)
-
- def _get_payload_value_with_superuser_check(self, key, default):
- if self.api_token.is_superuser():
- return self.payload.get(key, default)
- else:
- return default
-
- def _get_attributes_function(self):
- try:
- opensearch = opensearch_client()
- mapping = opensearch.indices.get_mapping(index="dc-v2-work")
- return list(next(iter(mapping.values()))['mappings']['properties'].keys())
- except StopIteration:
- return []
-
- def _get_attributes(self):
- return self._get_payload_value_with_superuser_check("attributes", self.DEFAULT_ATTRIBUTES)
-
- def _get_azure_endpoint(self):
- default = f"https://{self._get_azure_resource_name()}.openai.azure.com/"
- return self._get_payload_value_with_superuser_check("azure_endpoint", default)
-
- def _get_azure_resource_name(self):
- azure_resource_name = self._get_payload_value_with_superuser_check(
- "azure_resource_name", os.environ.get("AZURE_OPENAI_RESOURCE_NAME")
- )
- if not azure_resource_name:
- raise EnvironmentError(
- "Either payload must contain 'azure_resource_name' or environment variable 'AZURE_OPENAI_RESOURCE_NAME' must be set"
- )
- return azure_resource_name
-
- def _get_deployment_name(self):
- return self._get_payload_value_with_superuser_check(
- "deployment_name", os.getenv("AZURE_OPENAI_LLM_DEPLOYMENT_ID")
- )
-
- def _get_k(self):
- value = self._get_payload_value_with_superuser_check("k", K_VALUE)
- return min(value, MAX_K)
-
- def _get_openai_api_version(self):
- return self._get_payload_value_with_superuser_check(
- "openai_api_version", VERSION
- )
-
- def _get_prompt_text(self):
- return self._get_payload_value_with_superuser_check("prompt", prompt_template())
-
- def _get_size(self):
- return self._get_payload_value_with_superuser_check("size", SIZE)
-
- def _get_temperature(self):
- return self._get_payload_value_with_superuser_check("temperature", TEMPERATURE)
-
- def _get_text_key(self):
- return self._get_payload_value_with_superuser_check("text_key", TEXT_KEY)
-
- def _get_document_prompt(self):
- return ChatPromptTemplate.from_template(document_template(self.attributes))
-
- def debug_message(self):
- return {
- "type": "debug",
- "message": {
- "attributes": self.attributes,
- "azure_endpoint": self.azure_endpoint,
- "deployment_name": self.deployment_name,
- "k": self.k,
- "openai_api_version": self.openai_api_version,
- "prompt": self.prompt_text,
- "question": self.question,
- "ref": self.ref,
- "size": self.ref,
- "temperature": self.temperature,
- "text_key": self.text_key,
- },
- }
-
- def setup_websocket(self, socket=None):
- if socket is None:
- connection_id = self.request_context.get("connectionId")
- endpoint_url = f'https://{self.request_context.get("domainName")}/{self.request_context.get("stage")}'
- self.socket = Websocket(
- endpoint_url=endpoint_url, connection_id=connection_id, ref=self.ref
- )
- else:
- self.socket = socket
- return self.socket
-
- def setup_llm_request(self):
- self._setup_vector_store()
- self._setup_chat_client()
-
- def _setup_vector_store(self):
- self.opensearch = opensearch_vector_store()
-
- def _setup_chat_client(self):
- self.client = openai_chat_client(
- azure_deployment=self.deployment_name,
- azure_endpoint=self.azure_endpoint,
- openai_api_version=self.openai_api_version,
- callbacks=[StreamingSocketCallbackHandler(self.socket, stream=self.stream_response)],
- streaming=True,
- max_tokens=self.max_tokens
- )
-
- def _is_debug_mode_enabled(self):
- debug = self.payload.get("debug", False)
- return debug and self.api_token.is_superuser()
-
- def _to_bool(self, val):
- """Converts a value to boolean. If the value is a string, it considers
- "", "no", "false", "0" as False. Otherwise, it returns the boolean of the value.
- """
- if isinstance(val, str):
- return val.lower() not in ["", "no", "false", "0"]
- return bool(val)
diff --git a/chat/src/handlers.py b/chat/src/handlers.py
new file mode 100644
index 00000000..13c38348
--- /dev/null
+++ b/chat/src/handlers.py
@@ -0,0 +1,85 @@
+import json
+import logging
+from core.secrets import load_secrets
+from core.event_config import EventConfig
+from honeybadger import honeybadger
+from agent.search_agent import SearchAgent
+from agent.callbacks.socket import SocketCallbackHandler
+from agent.callbacks.metrics import MetricsCallbackHandler
+from core.setup import chat_model
+
+honeybadger.configure()
+logging.getLogger("honeybadger").addHandler(logging.StreamHandler())
+
+def chat_sync(event, context):
+ load_secrets()
+ config = EventConfig(event)
+
+ if not config.is_logged_in:
+ return {"statusCode": 401, "body": "Unauthorized"}
+
+ if config.question is None or config.question == "":
+ return {"statusCode": 400, "body": "Question cannot be blank"}
+
+ model = chat_model(model=config.model, streaming=False)
+ search_agent = SearchAgent(model=model)
+ result = MetricsCallbackHandler()
+ search_agent.invoke(
+ config.question, config.ref, forget=config.forget, callbacks=[result]
+ )
+
+ return {
+ "statusCode": 200,
+ "headers": {"Content-Type": "application/json"},
+ "body": json.dumps(
+ {
+ "answer": result.answers,
+ "is_dev_team": config.api_token.is_dev_team(),
+ "is_superuser": config.api_token.is_superuser(),
+ "k": config.k,
+ "model": config.model,
+ "question": config.question,
+ "ref": config.ref,
+ "artifacts": result.artifacts,
+ "token_counts": result.accumulator,
+ }
+ ),
+ }
+
+def chat(event, context):
+ load_secrets()
+ config = EventConfig(event)
+ socket = event.get("socket", None)
+ config.setup_websocket(socket)
+
+ if not (config.is_logged_in or config.is_superuser):
+ config.socket.send({"type": "error", "message": "Unauthorized"})
+ return {"statusCode": 401, "body": "Unauthorized"}
+
+ if config.question is None or config.question == "":
+ config.socket.send({"type": "error", "message": "Question cannot be blank"})
+ return {"statusCode": 400, "body": "Question cannot be blank"}
+
+ log_info = {
+ "is_dev_team": config.api_token.is_dev_team(),
+ "is_superuser": config.api_token.is_superuser(),
+ "k": config.k,
+ "model": config.model,
+ "question": config.question,
+ "ref": config.ref,
+ }
+ metrics = MetricsCallbackHandler(context.log_stream_name, extra_data=log_info)
+ callbacks = [SocketCallbackHandler(config.socket, config.ref), metrics]
+ model = chat_model(model=config.model, streaming=config.stream_response)
+ search_agent = SearchAgent(model=model, metrics=metrics)
+
+ try:
+ search_agent.invoke(config.question, config.ref, forget=config.forget, docs=config.docs, callbacks=callbacks)
+ metrics.log_metrics()
+ except Exception as e:
+ error_response = {"type": "error", "message": "An unexpected error occurred. Please try again later."}
+ if config.socket:
+ config.socket.send(error_response)
+ raise e
+
+ return {"statusCode": 200}
diff --git a/chat/src/handlers/chat.py b/chat/src/handlers/chat.py
deleted file mode 100644
index 92ea6041..00000000
--- a/chat/src/handlers/chat.py
+++ /dev/null
@@ -1,79 +0,0 @@
-import secrets # noqa
-import boto3
-import json
-import logging
-import os
-from datetime import datetime
-from event_config import EventConfig
-from helpers.response import Response
-from honeybadger import honeybadger
-
-honeybadger.configure()
-logging.getLogger('honeybadger').addHandler(logging.StreamHandler())
-
-RESPONSE_TYPES = {
- "base": ["answer", "ref"],
- "debug": ["answer", "attributes", "azure_endpoint", "deployment_name", "is_superuser", "k", "openai_api_version", "prompt", "question", "ref", "temperature", "text_key", "token_counts"],
- "log": ["answer", "deployment_name", "is_superuser", "k", "openai_api_version", "prompt", "question", "ref", "size", "source_documents", "temperature", "token_counts", "is_dev_team"],
- "error": ["question", "error", "source_documents"]
-}
-
-def handler(event, context):
- config = EventConfig(event)
- socket = event.get('socket', None)
- config.setup_websocket(socket)
-
- if not (config.is_logged_in or config.is_superuser):
- config.socket.send({"type": "error", "message": "Unauthorized"})
- return {"statusCode": 401, "body": "Unauthorized"}
-
- if config.question is None or config.question == "":
- config.socket.send({"type": "error", "message": "Question cannot be blank"})
- return {"statusCode": 400, "body": "Question cannot be blank"}
-
- debug_message = config.debug_message()
- if config.debug_mode:
- config.socket.send(debug_message)
-
- if not os.getenv("SKIP_LLM_REQUEST"):
- config.setup_llm_request()
-
- response = Response(config)
- final_response = response.prepare_response()
- if "error" in final_response:
- logging.error(f'Error: {final_response["error"]}')
- config.socket.send({"type": "error", "message": "Internal Server Error"})
- return {"statusCode": 500, "body": "Internal Server Error"}
- else:
- config.socket.send(reshape_response(final_response, 'debug' if config.debug_mode else 'base'))
-
- log_group = os.getenv('METRICS_LOG_GROUP')
- log_stream = context.log_stream_name
- if log_group and ensure_log_stream_exists(log_group, log_stream):
- log_client = boto3.client('logs')
- log_message = reshape_response(final_response, 'log')
- log_events = [
- {
- 'timestamp': timestamp(),
- 'message': json.dumps(log_message)
- }
- ]
- log_client.put_log_events(logGroupName=log_group, logStreamName=log_stream, logEvents=log_events)
- return {"statusCode": 200}
-
-def reshape_response(response, type):
- return {k: response[k] for k in RESPONSE_TYPES[type]}
-
-def ensure_log_stream_exists(log_group, log_stream):
- log_client = boto3.client('logs')
- try:
- log_client.create_log_stream(logGroupName=log_group, logStreamName=log_stream)
- return True
- except log_client.exceptions.ResourceAlreadyExistsException:
- return True
- except Exception:
- print(f'Could not create log stream: {log_group}:{log_stream}')
- return False
-
-def timestamp():
- return round(datetime.timestamp(datetime.now()) * 1000)
\ No newline at end of file
diff --git a/chat/src/handlers/chat_sync.py b/chat/src/handlers/chat_sync.py
deleted file mode 100644
index fad1808a..00000000
--- a/chat/src/handlers/chat_sync.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import secrets # noqa
-import json
-import logging
-import os
-from http_event_config import HTTPEventConfig
-from helpers.http_response import HTTPResponse
-from honeybadger import honeybadger
-
-honeybadger.configure()
-logging.getLogger('honeybadger').addHandler(logging.StreamHandler())
-
-RESPONSE_TYPES = {
- "base": ["answer", "ref", "context"],
- "debug": ["answer", "attributes", "azure_endpoint", "deployment_name", "is_superuser", "k", "openai_api_version", "prompt", "question", "ref", "temperature", "text_key", "token_counts", "context"],
- "log": ["answer", "deployment_name", "is_superuser", "k", "openai_api_version", "prompt", "question", "ref", "size", "source_documents", "temperature", "token_counts"],
- "error": ["question", "error", "source_documents"]
-}
-
-def handler(event, context):
- config = HTTPEventConfig(event)
-
- if not config.is_logged_in:
- return {"statusCode": 401, "body": "Unauthorized"}
-
- if config.question is None or config.question == "":
- return {"statusCode": 400, "body": "Question cannot be blank"}
-
- if not os.getenv("SKIP_LLM_REQUEST"):
- config.setup_llm_request()
- response = HTTPResponse(config)
- final_response = response.prepare_response()
- if "error" in final_response:
- logging.error(f'Error: {final_response["error"]}')
- return {"statusCode": 500, "body": "Internal Server Error"}
- else:
- return {"statusCode": 200, "body": json.dumps(reshape_response(final_response, 'debug' if config.debug_mode else 'base'))}
-
- return {"statusCode": 200}
-
-def reshape_response(response, type):
- return {k: response[k] for k in RESPONSE_TYPES[type]}
\ No newline at end of file
diff --git a/chat/src/handlers/streaming_socket_callback_handler.py b/chat/src/handlers/streaming_socket_callback_handler.py
deleted file mode 100644
index 8fe32272..00000000
--- a/chat/src/handlers/streaming_socket_callback_handler.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from langchain.callbacks.base import BaseCallbackHandler
-from websocket import Websocket
-from typing import Any
-from langchain_core.outputs.llm_result import LLMResult
-
-class StreamingSocketCallbackHandler(BaseCallbackHandler):
- def __init__(self, socket: Websocket, stream: bool = True):
- self.socket = socket
- self.stream = stream
-
- def on_llm_new_token(self, token: str, **kwargs):
- if len(token) > 0 and self.socket and self.stream:
- return self.socket.send({"token": token})
-
- def on_llm_end(self, response: LLMResult, **kwargs: Any):
- try:
- finish_reason = response.generations[0][0].generation_info["finish_reason"]
- if self.socket:
- return self.socket.send({"end": {"reason": finish_reason}})
- except Exception as err:
- finish_reason = f'Unknown ({str(err)})'
- print(f"Stream ended: {finish_reason}")
diff --git a/chat/src/helpers/http_response.py b/chat/src/helpers/http_response.py
deleted file mode 100644
index 11ba2962..00000000
--- a/chat/src/helpers/http_response.py
+++ /dev/null
@@ -1,63 +0,0 @@
-from helpers.metrics import debug_response
-from langchain_core.output_parsers import StrOutputParser
-from langchain_core.runnables import RunnableLambda, RunnablePassthrough
-
-def extract_prompt_value(v):
- if isinstance(v, list):
- return [extract_prompt_value(item) for item in v]
- elif isinstance(v, dict) and 'label' in v:
- return [v.get('label')]
- else:
- return v
-
-class HTTPResponse:
- def __init__(self, config):
- self.config = config
- self.store = {}
-
- def debug_response_passthrough(self):
- return RunnableLambda(lambda x: debug_response(self.config, x, self.original_question))
-
- def original_question_passthrough(self):
- def get_and_send_original_question(docs):
- source_documents = []
- for doc in docs["context"]:
- doc.metadata = {key: extract_prompt_value(doc.metadata.get(key)) for key in self.config.attributes if key in doc.metadata}
- source_document = doc.metadata.copy()
- source_document["content"] = doc.page_content
- source_documents.append(source_document)
-
- self.context = source_documents
-
- original_question = {
- "question": self.config.question,
- "source_documents": source_documents,
- }
-
- self.original_question = original_question
- return docs
-
- return RunnablePassthrough(get_and_send_original_question)
-
- def prepare_response(self):
- try:
- retriever = self.config.opensearch.as_retriever(search_type="similarity", search_kwargs={"k": self.config.k, "size": self.config.size, "_source": {"excludes": ["embedding"]}})
- chain = (
- {"context": retriever, "question": RunnablePassthrough()}
- | self.original_question_passthrough()
- | self.config.prompt
- | self.config.client
- | StrOutputParser()
- | self.debug_response_passthrough()
- )
- response = chain.invoke(self.config.question)
- response["context"] = self.context
- except Exception as err:
- response = {
- "question": self.config.question,
- "error": str(err),
- "source_documents": [],
- }
- return response
-
-
\ No newline at end of file
diff --git a/chat/src/helpers/metrics.py b/chat/src/helpers/metrics.py
deleted file mode 100644
index f00abc00..00000000
--- a/chat/src/helpers/metrics.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import tiktoken
-
-def debug_response(config, response, original_question):
- source_urls = [doc["api_link"] for doc in original_question.get("source_documents", [])]
-
- return {
- "answer": response,
- "attributes": config.attributes,
- "azure_endpoint": config.azure_endpoint,
- "deployment_name": config.deployment_name,
- "is_dev_team": config.api_token.is_dev_team(),
- "is_superuser": config.api_token.is_superuser(),
- "k": config.k,
- "openai_api_version": config.openai_api_version,
- "prompt": config.prompt_text,
- "question": config.question,
- "ref": config.ref,
- "size": config.size,
- "source_documents": source_urls,
- "temperature": config.temperature,
- "text_key": config.text_key,
- "token_counts": token_usage(config, response, original_question),
- }
-
-def token_usage(config, response, original_question):
- data = {
- "question": count_tokens(config.question),
- "answer": count_tokens(response),
- "prompt": count_tokens(config.prompt_text),
- "source_documents": count_tokens(original_question["source_documents"]),
- }
- data["total"] = sum(data.values())
- return data
-
-
-def count_tokens(val):
- encoding = tiktoken.encoding_for_model("gpt-4")
- token_integers = encoding.encode(str(val))
- num_tokens = len(token_integers)
-
- return num_tokens
diff --git a/chat/src/helpers/response.py b/chat/src/helpers/response.py
deleted file mode 100644
index b7c24836..00000000
--- a/chat/src/helpers/response.py
+++ /dev/null
@@ -1,65 +0,0 @@
-from helpers.metrics import debug_response
-from langchain_core.output_parsers import StrOutputParser
-from langchain_core.runnables import RunnableLambda, RunnablePassthrough
-
-def extract_prompt_value(v):
- if isinstance(v, list):
- return [extract_prompt_value(item) for item in v]
- elif isinstance(v, dict) and 'label' in v:
- return [v.get('label')]
- else:
- return v
-
-class Response:
- def __init__(self, config):
- self.config = config
- self.store = {}
-
- def debug_response_passthrough(self):
- return RunnableLambda(lambda x: debug_response(self.config, x, self.original_question))
-
- def original_question_passthrough(self):
- def get_and_send_original_question(docs):
- source_documents = []
- for doc in docs["context"]:
- doc.metadata = {key: extract_prompt_value(doc.metadata.get(key)) for key in self.config.attributes if key in doc.metadata}
- source_document = doc.metadata.copy()
- source_document["content"] = doc.page_content
- source_documents.append(source_document)
-
- socket_message = {
- "question": self.config.question,
- "source_documents": source_documents[:5]
- }
- self.config.socket.send(socket_message)
-
- original_question = {
- "question": self.config.question,
- "source_documents": source_documents
- }
- self.original_question = original_question
-
- docs["source_documents"] = source_documents
- return docs
-
- return RunnablePassthrough(get_and_send_original_question)
-
- def prepare_response(self):
- try:
- retriever = self.config.opensearch.as_retriever(search_type="similarity", search_kwargs={"k": self.config.k, "size": self.config.size, "_source": {"excludes": ["embedding"]}})
- chain = (
- {"context": retriever, "question": RunnablePassthrough()}
- | self.original_question_passthrough()
- | self.config.prompt
- | self.config.client
- | StrOutputParser()
- | self.debug_response_passthrough()
- )
- response = chain.invoke(self.config.question)
- except Exception as err:
- response = {
- "question": self.config.question,
- "error": str(err),
- "source_documents": [],
- }
- return response
diff --git a/chat/src/helpers/utils.py b/chat/src/helpers/utils.py
deleted file mode 100644
index d0d243d4..00000000
--- a/chat/src/helpers/utils.py
+++ /dev/null
@@ -1,7 +0,0 @@
-def to_bool(val):
- """Converts a value to boolean. If the value is a string, it considers
- "", "no", "false", "0" as False. Otherwise, it returns the boolean of the value.
- """
- if isinstance(val, str):
- return val.lower() not in ["", "no", "false", "0"]
- return bool(val)
diff --git a/chat/src/http_event_config.py b/chat/src/http_event_config.py
deleted file mode 100644
index 47f479aa..00000000
--- a/chat/src/http_event_config.py
+++ /dev/null
@@ -1,188 +0,0 @@
-import os
-import json
-
-from dataclasses import dataclass, field
-
-from langchain_core.prompts import ChatPromptTemplate
-from setup import (
- opensearch_client,
- opensearch_vector_store,
- openai_chat_client,
-)
-from typing import List
-from helpers.apitoken import ApiToken
-from helpers.prompts import document_template, prompt_template
-
-CHAIN_TYPE = "stuff"
-DOCUMENT_VARIABLE_NAME = "context"
-K_VALUE = 40
-MAX_K = 100
-MAX_TOKENS = 1000
-SIZE = 5
-TEMPERATURE = 0.2
-TEXT_KEY = "id"
-VERSION = "2024-02-01"
-
-@dataclass
-class HTTPEventConfig:
- """
- The EventConfig class represents the configuration for an event.
- Default values are set for the following properties which can be overridden in the payload message.
- """
-
- DEFAULT_ATTRIBUTES = ["accession_number", "alternate_title", "api_link", "canonical_link", "caption", "collection",
- "contributor", "date_created", "date_created_edtf", "description", "genre", "id", "identifier",
- "keywords", "language", "notes", "physical_description_material", "physical_description_size",
- "provenance", "publisher", "rights_statement", "subject", "table_of_contents", "thumbnail",
- "title", "visibility", "work_type"]
-
- api_token: ApiToken = field(init=False)
- attributes: List[str] = field(init=False)
- azure_endpoint: str = field(init=False)
- azure_resource_name: str = field(init=False)
- debug_mode: bool = field(init=False)
- deployment_name: str = field(init=False)
- document_prompt: ChatPromptTemplate = field(init=False)
- event: dict = field(default_factory=dict)
- is_logged_in: bool = field(init=False)
- k: int = field(init=False)
- max_tokens: int = field(init=False)
- openai_api_version: str = field(init=False)
- payload: dict = field(default_factory=dict)
- prompt_text: str = field(init=False)
- prompt: ChatPromptTemplate = field(init=False)
- question: str = field(init=False)
- ref: str = field(init=False)
- request_context: dict = field(init=False)
- temperature: float = field(init=False)
- size: int = field(init=False)
- stream_response: bool = field(init=False)
- text_key: str = field(init=False)
-
- def __post_init__(self):
- self.payload = json.loads(self.event.get("body", "{}"))
- self.api_token = ApiToken(signed_token=self.payload.get("auth"))
- self.attributes = self._get_attributes()
- self.azure_endpoint = self._get_azure_endpoint()
- self.azure_resource_name = self._get_azure_resource_name()
- self.debug_mode = self._is_debug_mode_enabled()
- self.deployment_name = self._get_deployment_name()
- self.is_logged_in = self.api_token.is_logged_in()
- self.k = self._get_k()
- self.max_tokens = min(self.payload.get("max_tokens", MAX_TOKENS), MAX_TOKENS)
- self.openai_api_version = self._get_openai_api_version()
- self.prompt_text = self._get_prompt_text()
- self.request_context = self.event.get("requestContext", {})
- self.question = self.payload.get("question")
- self.ref = self.payload.get("ref")
- self.size = self._get_size()
- self.stream_response = self.payload.get("stream_response", not self.debug_mode)
- self.temperature = self._get_temperature()
- self.text_key = self._get_text_key()
- self.document_prompt = self._get_document_prompt()
- self.prompt = ChatPromptTemplate.from_template(self.prompt_text)
-
- def _get_payload_value_with_superuser_check(self, key, default):
- if self.api_token.is_superuser():
- return self.payload.get(key, default)
- else:
- return default
-
- def _get_attributes_function(self):
- try:
- opensearch = opensearch_client()
- mapping = opensearch.indices.get_mapping(index="dc-v2-work")
- return list(next(iter(mapping.values()))['mappings']['properties'].keys())
- except StopIteration:
- return []
-
- def _get_attributes(self):
- return self._get_payload_value_with_superuser_check("attributes", self.DEFAULT_ATTRIBUTES)
-
- def _get_azure_endpoint(self):
- default = f"https://{self._get_azure_resource_name()}.openai.azure.com/"
- return self._get_payload_value_with_superuser_check("azure_endpoint", default)
-
- def _get_azure_resource_name(self):
- azure_resource_name = self._get_payload_value_with_superuser_check(
- "azure_resource_name", os.environ.get("AZURE_OPENAI_RESOURCE_NAME")
- )
- if not azure_resource_name:
- raise EnvironmentError(
- "Either payload must contain 'azure_resource_name' or environment variable 'AZURE_OPENAI_RESOURCE_NAME' must be set"
- )
- return azure_resource_name
-
- def _get_deployment_name(self):
- return self._get_payload_value_with_superuser_check(
- "deployment_name", os.getenv("AZURE_OPENAI_LLM_DEPLOYMENT_ID")
- )
-
- def _get_k(self):
- value = self._get_payload_value_with_superuser_check("k", K_VALUE)
- return min(value, MAX_K)
-
- def _get_openai_api_version(self):
- return self._get_payload_value_with_superuser_check(
- "openai_api_version", VERSION
- )
-
- def _get_prompt_text(self):
- return self._get_payload_value_with_superuser_check("prompt", prompt_template())
-
- def _get_size(self):
- return self._get_payload_value_with_superuser_check("size", SIZE)
-
- def _get_temperature(self):
- return self._get_payload_value_with_superuser_check("temperature", TEMPERATURE)
-
- def _get_text_key(self):
- return self._get_payload_value_with_superuser_check("text_key", TEXT_KEY)
-
- def _get_document_prompt(self):
- return ChatPromptTemplate.from_template(document_template(self.attributes))
-
- def debug_message(self):
- return {
- "type": "debug",
- "message": {
- "attributes": self.attributes,
- "azure_endpoint": self.azure_endpoint,
- "deployment_name": self.deployment_name,
- "k": self.k,
- "openai_api_version": self.openai_api_version,
- "prompt": self.prompt_text,
- "question": self.question,
- "ref": self.ref,
- "size": self.ref,
- "temperature": self.temperature,
- "text_key": self.text_key,
- },
- }
-
- def setup_llm_request(self):
- self._setup_vector_store()
- self._setup_chat_client()
-
- def _setup_vector_store(self):
- self.opensearch = opensearch_vector_store()
-
- def _setup_chat_client(self):
- self.client = openai_chat_client(
- azure_deployment=self.deployment_name,
- azure_endpoint=self.azure_endpoint,
- openai_api_version=self.openai_api_version,
- max_tokens=self.max_tokens
- )
-
- def _is_debug_mode_enabled(self):
- debug = self.payload.get("debug", False)
- return debug and self.api_token.is_superuser()
-
- def _to_bool(self, val):
- """Converts a value to boolean. If the value is a string, it considers
- "", "no", "false", "0" as False. Otherwise, it returns the boolean of the value.
- """
- if isinstance(val, str):
- return val.lower() not in ["", "no", "false", "0"]
- return bool(val)
diff --git a/chat/src/persistence/__init__.py b/chat/src/persistence/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/chat/src/persistence/compressible_json_serializer.py b/chat/src/persistence/compressible_json_serializer.py
new file mode 100644
index 00000000..46965dca
--- /dev/null
+++ b/chat/src/persistence/compressible_json_serializer.py
@@ -0,0 +1,67 @@
+from typing import Any, Optional, Tuple
+
+import base64
+import bz2
+import gzip
+import json
+import langchain_core.messages as langchain_messages
+from langchain_core.messages import BaseMessage
+from langgraph.checkpoint.serde.jsonplus import JsonPlusSerializer
+
+class CompressibleJsonSerializer(JsonPlusSerializer):
+ def __init__(self, compression: Optional[str] = None):
+ self.compression = compression
+
+ def dumps_typed(self, obj: Any) -> Tuple[str, Any]:
+ def default(o):
+ if isinstance(o, BaseMessage):
+ return {
+ "__type__": o.__class__.__name__,
+ "data": o.model_dump(),
+ }
+ raise TypeError(
+ f"Object of type {o.__class__.__name__} is not JSON serializable"
+ )
+
+ json_str = json.dumps(obj, default=default)
+
+ if self.compression is None:
+ return "json", json_str
+ elif self.compression == "bz2":
+ compressed_str = base64.b64encode(
+ bz2.compress(json_str.encode("utf-8"))
+ ).decode("utf-8")
+ return "bz2_json", compressed_str
+ elif self.compression == "gzip":
+ compressed_str = base64.b64encode(
+ gzip.compress(json_str.encode("utf-8"))
+ ).decode("utf-8")
+ return "gzip_json", compressed_str
+ else:
+ raise ValueError(f"Unsupported compression type: {self.compression}")
+
+ def loads_typed(self, data: Tuple[str, Any]) -> Any:
+ type_, payload = data
+
+ if type_ == "json":
+ json_str = payload
+ elif type_ == "bz2_json":
+ json_str = bz2.decompress(base64.b64decode(payload)).decode("utf-8")
+ elif type_ == "gzip_json":
+ json_str = gzip.decompress(base64.b64decode(payload)).decode("utf-8")
+ else:
+ raise ValueError(f"Unknown data type: {type_}")
+
+ def object_hook(dct):
+ if "__type__" in dct:
+ type_name = dct["__type__"]
+ data = dct["data"]
+ cls = getattr(langchain_messages, type_name, None)
+ if cls and issubclass(cls, BaseMessage):
+ return cls.model_construct(**data)
+ else:
+ raise ValueError(f"Unknown type: {type_name}")
+ return dct
+
+ obj = json.loads(json_str, object_hook=object_hook)
+ return obj
diff --git a/chat/src/persistence/s3_checkpointer.py b/chat/src/persistence/s3_checkpointer.py
new file mode 100644
index 00000000..b7435bba
--- /dev/null
+++ b/chat/src/persistence/s3_checkpointer.py
@@ -0,0 +1,343 @@
+import boto3
+import json
+import os
+import time
+from persistence.compressible_json_serializer import CompressibleJsonSerializer
+from typing import Any, Dict, Iterator, Optional, Sequence, Tuple, List
+from langchain_core.runnables import RunnableConfig
+
+from langgraph.checkpoint.base import (
+ BaseCheckpointSaver,
+ ChannelVersions,
+ Checkpoint,
+ CheckpointMetadata,
+ CheckpointTuple,
+ PendingWrite,
+ get_checkpoint_id,
+)
+
+
+def _namespace(val):
+ return "__default__" if val == "" else val
+
+def _namespace_val(namespace):
+ return "" if namespace == "__default__" else namespace
+
+def _make_s3_thread_prefix(thread_id: str) -> str:
+ return f"checkpoints/{thread_id}"
+
+def _make_s3_namespace_prefix(thread_id: str, checkpoint_ns: str) -> str:
+ prefix = _make_s3_thread_prefix(thread_id)
+ return f"{prefix}/{_namespace(checkpoint_ns)}"
+
+def _make_s3_checkpoint_prefix(thread_id: str, checkpoint_ns: str, checkpoint_id: str) -> str:
+ prefix = _make_s3_namespace_prefix(thread_id, checkpoint_ns)
+ return f"{prefix}/{checkpoint_id}"
+
+def _make_s3_checkpoint_key(thread_id: str, checkpoint_ns: str, checkpoint_id: str) -> str:
+ prefix = _make_s3_checkpoint_prefix(thread_id, checkpoint_ns, checkpoint_id)
+ return f"{prefix}/checkpoint.json"
+
+def _make_s3_write_key(thread_id: str, checkpoint_ns: str, checkpoint_id: str, task_id: str, idx: int) -> str:
+ prefix = _make_s3_checkpoint_prefix(thread_id, checkpoint_ns, checkpoint_id)
+ return f"{prefix}/writes/{task_id}/{idx}.json"
+
+def _parse_s3_checkpoint_key(key: str) -> Dict[str, str]:
+ parts = key.split("/")
+ if len(parts) < 5 or parts[4] != "checkpoint.json":
+ raise ValueError("Invalid checkpoint key format")
+ thread_id = parts[1]
+ checkpoint_ns = _namespace_val(parts[2])
+ checkpoint_id = parts[3]
+ return {
+ "thread_id": thread_id,
+ "checkpoint_ns": checkpoint_ns,
+ "checkpoint_id": checkpoint_id,
+ }
+
+
+class S3Checkpointer(BaseCheckpointSaver):
+ """S3-based checkpoint saver implementation."""
+
+ def __init__(
+ self,
+ bucket_name: str,
+ region_name: str = os.getenv("AWS_REGION"),
+ endpoint_url: Optional[str] = None,
+ compression: Optional[str] = None,
+ ) -> None:
+ super().__init__()
+ self.serde = CompressibleJsonSerializer(compression=compression)
+ self.s3 = boto3.client('s3', region_name=region_name, endpoint_url=endpoint_url)
+ self.bucket_name = bucket_name
+
+ def put(
+ self,
+ config: RunnableConfig,
+ checkpoint: Checkpoint,
+ metadata: CheckpointMetadata,
+ new_versions: ChannelVersions,
+ ) -> RunnableConfig:
+ thread_id = config["configurable"]["thread_id"]
+ checkpoint_ns = config["configurable"].get("checkpoint_ns", "")
+ checkpoint_id = checkpoint["id"]
+ parent_checkpoint_id = config["configurable"].get("checkpoint_id")
+ key = _make_s3_checkpoint_key(thread_id, checkpoint_ns, checkpoint_id)
+
+ ck_type, ck_data = self.serde.dumps_typed(checkpoint)
+ md_type, md_data = self.serde.dumps_typed(metadata)
+
+ data = {
+ "checkpoint_type": ck_type,
+ "checkpoint_data": ck_data,
+ "metadata_data": md_data,
+ "parent_checkpoint_id": parent_checkpoint_id if parent_checkpoint_id else None,
+ "timestamp": int(time.time() * 1000),
+ }
+
+ body = json.dumps(data).encode("utf-8")
+ self.s3.put_object(Bucket=self.bucket_name, Key=key, Body=body)
+
+ return {
+ "configurable": {
+ "thread_id": thread_id,
+ "checkpoint_ns": checkpoint_ns,
+ "checkpoint_id": checkpoint_id,
+ }
+ }
+
+ def put_writes(
+ self,
+ config: RunnableConfig,
+ writes: Sequence[Tuple[str, Any]],
+ task_id: str,
+ ) -> None:
+ thread_id = config["configurable"]["thread_id"]
+ checkpoint_ns = config["configurable"]["checkpoint_ns"]
+ checkpoint_id = config["configurable"]["checkpoint_id"]
+
+ for idx, (channel, value) in enumerate(writes):
+ v_type, v_data = self.serde.dumps_typed(value)
+ write_data = {
+ "channel": channel,
+ "type": v_type,
+ "value": v_data,
+ "timestamp": int(time.time() * 1000),
+ }
+ write_key = _make_s3_write_key(thread_id, checkpoint_ns, checkpoint_id, task_id, idx)
+ self.s3.put_object(
+ Bucket=self.bucket_name,
+ Key=write_key,
+ Body=json.dumps(write_data).encode("utf-8"),
+ )
+
+ def get_tuple(self, config: RunnableConfig) -> Optional[CheckpointTuple]:
+ thread_id = config["configurable"]["thread_id"]
+ checkpoint_ns = config["configurable"].get("checkpoint_ns", "")
+ checkpoint_id = get_checkpoint_id(config)
+
+ if checkpoint_id is None:
+ checkpoint_id = self._get_latest_checkpoint_id(thread_id, checkpoint_ns)
+ if checkpoint_id is None:
+ return None
+
+ key = _make_s3_checkpoint_key(thread_id, checkpoint_ns, checkpoint_id)
+ try:
+ obj = self.s3.get_object(Bucket=self.bucket_name, Key=key)
+ except self.s3.exceptions.NoSuchKey:
+ return None
+
+ data = json.loads(obj["Body"].read().decode("utf-8"))
+
+ checkpoint_type = data["checkpoint_type"]
+ checkpoint_data = data["checkpoint_data"]
+ checkpoint = self.serde.loads_typed((checkpoint_type, checkpoint_data))
+
+ metadata_data = data.get("metadata_data")
+ if metadata_data is None:
+ raise ValueError("Metadata is missing in checkpoint data")
+
+ metadata = self.serde.loads_typed((checkpoint_type, metadata_data))
+
+ parent_checkpoint_id = data.get("parent_checkpoint_id")
+ if parent_checkpoint_id:
+ parent_config = {
+ "configurable": {
+ "thread_id": thread_id,
+ "checkpoint_ns": checkpoint_ns,
+ "checkpoint_id": parent_checkpoint_id,
+ }
+ }
+ else:
+ parent_config = None
+
+ pending_writes = self._load_pending_writes(thread_id, checkpoint_ns, checkpoint_id)
+
+ return CheckpointTuple(
+ {
+ "configurable": {
+ "thread_id": thread_id,
+ "checkpoint_ns": checkpoint_ns,
+ "checkpoint_id": checkpoint_id,
+ }
+ },
+ checkpoint,
+ metadata,
+ parent_config,
+ pending_writes,
+ )
+
+ def list(
+ self,
+ config: Optional[RunnableConfig],
+ *,
+ filter: Optional[Dict[str, Any]] = None,
+ before: Optional[RunnableConfig] = None,
+ limit: Optional[int] = None,
+ ) -> Iterator[CheckpointTuple]:
+ if config is None:
+ raise ValueError("config must be provided for listing checkpoints in S3")
+
+ thread_id = config["configurable"]["thread_id"]
+ checkpoint_ns = config["configurable"].get("checkpoint_ns", "")
+ prefix = _make_s3_namespace_prefix(thread_id, checkpoint_ns)
+
+ paginator = self.s3.get_paginator("list_objects_v2")
+ pages = paginator.paginate(Bucket=self.bucket_name, Prefix=f"{prefix}/")
+
+ keys = []
+ for page in pages:
+ contents = page.get("Contents", [])
+ for c in contents:
+ key = c["Key"]
+ if key.endswith(".json") and "/writes/" not in key:
+ keys.append(key)
+
+ keys_info = [_parse_s3_checkpoint_key(k) for k in keys]
+ keys_info.sort(key=lambda x: x["checkpoint_id"], reverse=True)
+
+ if before:
+ before_id = before["configurable"]["checkpoint_id"]
+ keys_info = [ki for ki in keys_info if ki["checkpoint_id"] < before_id]
+
+ if limit is not None:
+ keys_info = keys_info[:limit]
+
+ for ki in keys_info:
+ ck_key = _make_s3_checkpoint_key(ki["thread_id"], ki["checkpoint_ns"], ki["checkpoint_id"])
+ obj = self.s3.get_object(Bucket=self.bucket_name, Key=ck_key)
+ data = json.loads(obj["Body"].read().decode("utf-8"))
+
+ checkpoint_type = data["checkpoint_type"]
+ checkpoint_data = data["checkpoint_data"]
+ checkpoint = self.serde.loads_typed((checkpoint_type, checkpoint_data))
+
+ # Derive metadata_type from checkpoint_type as above
+ if checkpoint_type.startswith('bz2'):
+ metadata_type = 'bz2_json'
+ elif checkpoint_type.startswith('gzip'):
+ metadata_type = 'gzip_json'
+ else:
+ metadata_type = 'json'
+ metadata_data = data["metadata_data"]
+ metadata = self.serde.loads_typed((metadata_type, metadata_data))
+
+ parent_checkpoint_id = data.get("parent_checkpoint_id")
+ if parent_checkpoint_id:
+ parent_config = {
+ "configurable": {
+ "thread_id": ki["thread_id"],
+ "checkpoint_ns": ki["checkpoint_ns"],
+ "checkpoint_id": parent_checkpoint_id,
+ }
+ }
+ else:
+ parent_config = None
+
+ pending_writes = self._load_pending_writes(
+ ki["thread_id"], ki["checkpoint_ns"], ki["checkpoint_id"]
+ )
+
+ yield CheckpointTuple(
+ {
+ "configurable": {
+ "thread_id": ki["thread_id"],
+ "checkpoint_ns": ki["checkpoint_ns"],
+ "checkpoint_id": ki["checkpoint_id"],
+ }
+ },
+ checkpoint,
+ metadata,
+ parent_config,
+ pending_writes,
+ )
+
+ def _get_latest_checkpoint_id(self, thread_id: str, checkpoint_ns: str) -> Optional[str]:
+ prefix = _make_s3_namespace_prefix(thread_id, checkpoint_ns)
+ paginator = self.s3.get_paginator("list_objects_v2")
+ pages = paginator.paginate(Bucket=self.bucket_name, Prefix=f"{prefix}/")
+ keys = []
+ for page in pages:
+ for c in page.get("Contents", []):
+ key = c["Key"]
+ if key.endswith(".json") and "/writes/" not in key:
+ keys.append(key)
+
+ if not keys:
+ return None
+
+ keys_info = [_parse_s3_checkpoint_key(k) for k in keys]
+ keys_info.sort(key=lambda x: x["checkpoint_id"], reverse=True)
+ latest_id = keys_info[0]["checkpoint_id"] if keys_info else None
+ return latest_id
+
+ def _load_pending_writes(self, thread_id: str, checkpoint_ns: str, checkpoint_id: str) -> List[PendingWrite]:
+ prefix = _make_s3_checkpoint_prefix(thread_id, checkpoint_ns, checkpoint_id) + "/writes/"
+ paginator = self.s3.get_paginator("list_objects_v2")
+ pages = paginator.paginate(Bucket=self.bucket_name, Prefix=prefix)
+
+ writes = []
+ for page in pages:
+ for c in page.get("Contents", []):
+ wkey = c["Key"]
+ parts = wkey.split("/")
+ if len(parts) < 7:
+ continue
+ task_id = parts[5]
+ wobj = self.s3.get_object(Bucket=self.bucket_name, Key=wkey)
+ wdata = json.loads(wobj["Body"].read().decode("utf-8"))
+ channel = wdata["channel"]
+ value_type = wdata["type"]
+ value_data = wdata["value"]
+ value = self.serde.loads_typed((value_type, value_data))
+ writes.append((task_id, channel, value))
+
+ return writes
+
+ def delete_checkpoints(self, thread_id: str) -> None:
+ """
+ Deletes all items with the specified thread_id from the checkpoint bucket.
+
+ Args:
+ thread_id: The thread_id value to delete
+ """
+ def delete_objects(objects: dict) -> None:
+ if objects['Objects']:
+ self.s3.delete_objects(Bucket=self.bucket_name, Delete=objects)
+
+ paginator = self.s3.get_paginator("list_objects_v2")
+ prefix = f"checkpoints/{thread_id}/"
+ pages = paginator.paginate(Bucket=self.bucket_name, Prefix=prefix)
+
+ to_delete = {'Objects': []}
+ for item in pages.search('Contents'):
+ if item is not None:
+ to_delete['Objects'].append({'Key': item['Key']})
+
+ # Batch deletions in groups of 1000 (S3's limit)
+ if len(to_delete['Objects']) >= 1000:
+ delete_objects(to_delete)
+ to_delete['Objects'] = []
+
+ # Delete any remaining objects
+ delete_objects(to_delete)
\ No newline at end of file
diff --git a/chat/src/persistence/selective_checkpointer.py b/chat/src/persistence/selective_checkpointer.py
new file mode 100644
index 00000000..61ed2c90
--- /dev/null
+++ b/chat/src/persistence/selective_checkpointer.py
@@ -0,0 +1,79 @@
+import os
+from typing import Optional
+from langchain_core.messages import AIMessage, HumanMessage, ToolMessage
+from langchain_core.runnables import RunnableConfig
+from langgraph.checkpoint.base import (
+ ChannelVersions,
+ Checkpoint,
+ CheckpointMetadata
+)
+from persistence.s3_checkpointer import S3Checkpointer
+
+
+# Split messages into interactions, each one starting with a HumanMessage
+def _split_interactions(messages):
+ if messages is None:
+ return []
+
+ interactions = []
+ current_interaction = []
+
+ for message in messages:
+ if isinstance(message, HumanMessage) and current_interaction:
+ interactions.append(current_interaction)
+ current_interaction = []
+ current_interaction.append(message)
+
+ if current_interaction:
+ interactions.append(current_interaction)
+
+ return interactions
+
+def _is_tool_message(message):
+ if isinstance(message, ToolMessage):
+ return True
+ if isinstance(message, AIMessage) and message.response_metadata.get('stop_reason', '') == 'tool_use':
+ return True
+ return False
+
+def _prune_messages(messages):
+ interactions = _split_interactions(messages)
+ # Remove all tool-related messages except those related to the most recent interaction
+ for i, interaction in enumerate(interactions[:-1]):
+ interactions[i] = [message for message in interaction if not _is_tool_message(message)]
+
+ # Return the flattened list of messages
+ return [message for interaction in interactions for message in interaction]
+
+class SelectiveCheckpointer(S3Checkpointer):
+ """S3 Checkpointer that discards ToolMessages from previous checkpoints."""
+
+ def __init__(
+ self,
+ bucket_name: str,
+ region_name: str = os.getenv("AWS_REGION"),
+ endpoint_url: Optional[str] = None,
+ compression: Optional[str] = None,
+ retain_history: Optional[bool] = True,
+ ) -> None:
+ super().__init__(bucket_name, region_name, endpoint_url, compression)
+ self.retain_history = retain_history
+
+ def put(
+ self,
+ config: RunnableConfig,
+ checkpoint: Checkpoint,
+ metadata: CheckpointMetadata,
+ new_versions: ChannelVersions,
+ ) -> RunnableConfig:
+ # Remove previous checkpoints
+ thread_id = config["configurable"]["thread_id"]
+ if not self.retain_history:
+ self.delete_checkpoints(thread_id)
+
+ # Remove all ToolMessages except those related to the most
+ # recent question (HumanMessage)
+ messages = checkpoint.get("channel_values", {}).get("messages", [])
+ checkpoint["channel_values"]["messages"] = _prune_messages(messages)
+
+ return super().put(config, checkpoint, metadata, new_versions)
\ No newline at end of file
diff --git a/chat/src/requirements-dev.txt b/chat/src/requirements-dev.txt
index 13699722..c03376d2 100644
--- a/chat/src/requirements-dev.txt
+++ b/chat/src/requirements-dev.txt
@@ -1,3 +1,5 @@
# Dev/Test Dependencies
-ruff~=0.1.0
-coverage~=7.3.2
+moto~=5.0
+pytest~=8.3
+ruff~=0.2
+coverage~=7.3
diff --git a/chat/src/requirements.txt b/chat/src/requirements.txt
index 79a4f375..748ab395 100644
--- a/chat/src/requirements.txt
+++ b/chat/src/requirements.txt
@@ -1,14 +1,15 @@
# Runtime Dependencies
boto3~=1.34
-honeybadger
+honeybadger~=0.20
langchain~=0.2
-langchain-aws~=0.1
+langchain-aws~=0.2
langchain-openai~=0.1
+langgraph~=0.2
openai~=1.35
-opensearch-py
+opensearch-py~=2.8
pyjwt~=2.6.0
python-dotenv~=1.0.0
-requests
-requests-aws4auth
+requests~=2.32
+requests-aws4auth~=1.3
tiktoken~=0.7
wheel~=0.40
\ No newline at end of file
diff --git a/chat/src/search/__init__.py b/chat/src/search/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/chat/src/helpers/hybrid_query.py b/chat/src/search/hybrid_query.py
similarity index 71%
rename from chat/src/helpers/hybrid_query.py
rename to chat/src/search/hybrid_query.py
index 5850f24e..bb2fae44 100644
--- a/chat/src/helpers/hybrid_query.py
+++ b/chat/src/search/hybrid_query.py
@@ -14,6 +14,15 @@ def filter(query: dict):
def hybrid_query(query: str, model_id: str, vector_field: str = "embedding", k: int = 40, **kwargs: Any):
result = {
"size": kwargs.get("size", 20),
+ "_source": {
+ "include": ["abstract", "accession_number", "alternate_title", "api_link", "ark", "canonical_link",
+ "collection", "contributor", "creator", "date_created_edtf", "description", "genre",
+ "id", "iiif_manifest", "language", "library_unit", "license", "location", "physical_description_material",
+ "physical_description_size", "provenance", "publisher", "rights_holder", "rights_statement",
+ "scope_and_contents", "series", "style_period", "subject", "table_of_contents", "technique",
+ "thumbnail", "title", "visibility", "work_type"],
+ "exclude": ["embedding", "embedding_model"]
+ },
"query": {
"hybrid": {
"queries": [
diff --git a/chat/src/handlers/opensearch_neural_search.py b/chat/src/search/opensearch_neural_search.py
similarity index 60%
rename from chat/src/handlers/opensearch_neural_search.py
rename to chat/src/search/opensearch_neural_search.py
index 0530eab0..55e38dee 100644
--- a/chat/src/handlers/opensearch_neural_search.py
+++ b/chat/src/search/opensearch_neural_search.py
@@ -2,7 +2,7 @@
from langchain_core.vectorstores import VectorStore
from opensearchpy import OpenSearch
from typing import Any, List, Tuple
-from helpers.hybrid_query import hybrid_query
+from search.hybrid_query import hybrid_query, filter
class OpenSearchNeuralSearch(VectorStore):
"""Read-only OpenSearch vectorstore with neural search."""
@@ -55,6 +55,44 @@ def similarity_search_with_score(
return documents_with_scores
+ def aggregations_search(self, agg_field: str, term_field: str = None, term: str = None, **kwargs: Any) -> dict:
+ """Perform a search with aggregations and return the aggregation results."""
+ base_query = {"match_all": {}} if (term is None or term == "") else {"match": {term_field: term}}
+ filtered_query = filter(base_query)
+
+ dsl = {
+ "size": 0,
+ "query": filtered_query,
+ "aggs": {"aggregation_result": {"terms": {"field": agg_field}}},
+ }
+
+ response = self.client.search(
+ index=self.index,
+ body=dsl,
+ params=(
+ {"search_pipeline": self.search_pipeline}
+ if self.search_pipeline
+ else None
+ ),
+ )
+
+ return response.get("aggregations", {})
+
+ def retrieve_documents(self, doc_ids: List[str]) -> List[Document]:
+ """Retrieve documents from the OpenSearch index based on a list of document IDs."""
+ query = {
+ "query": {
+ "ids": {
+ "values": doc_ids
+ }
+ }
+ }
+
+ response = self.client.search(index=self.index, body=query, size=len(doc_ids))
+ documents = [Document(page_content=hit["_source"][self.text_field], metadata=hit["_source"]) for hit in response["hits"]["hits"]]
+ return documents
+
+
def add_texts(self, texts: List[str], metadatas: List[dict], **kwargs: Any) -> None:
pass
diff --git a/chat/src/setup.py b/chat/src/setup.py
deleted file mode 100644
index 60cd6f91..00000000
--- a/chat/src/setup.py
+++ /dev/null
@@ -1,61 +0,0 @@
-from langchain_openai import AzureChatOpenAI
-from handlers.opensearch_neural_search import OpenSearchNeuralSearch
-from opensearchpy import OpenSearch, RequestsHttpConnection
-from requests_aws4auth import AWS4Auth
-from urllib.parse import urlparse
-import os
-import boto3
-
-def prefix(value):
- env_prefix = os.getenv("ENV_PREFIX")
- env_prefix = None if env_prefix == "" else env_prefix
- return '-'.join(filter(None, [env_prefix, value]))
-
-def openai_chat_client(**kwargs):
- return AzureChatOpenAI(
- openai_api_key=os.getenv("AZURE_OPENAI_API_KEY"),
- **kwargs,
- )
-
-def opensearch_endpoint():
- endpoint = os.getenv("OPENSEARCH_ENDPOINT")
- parsed = urlparse(endpoint)
- if parsed.netloc != '':
- return parsed.netloc
- else:
- return endpoint
-
-def opensearch_client(region_name=os.getenv("AWS_REGION")):
- session = boto3.Session(region_name=region_name)
- awsauth = AWS4Auth(region=region_name, service="es", refreshable_credentials=session.get_credentials())
- endpoint = opensearch_endpoint()
-
- return OpenSearch(
- hosts=[{'host': endpoint, 'port': 443}],
- use_ssl = True,
- connection_class=RequestsHttpConnection,
- http_auth=awsauth,
- )
-
-def opensearch_vector_store(region_name=os.getenv("AWS_REGION")):
- session = boto3.Session(region_name=region_name)
- awsauth = AWS4Auth(region=region_name, service="es", refreshable_credentials=session.get_credentials())
-
- docsearch = OpenSearchNeuralSearch(
- index=prefix("dc-v2-work"),
- model_id=os.getenv("OPENSEARCH_MODEL_ID"),
- endpoint=opensearch_endpoint(),
- connection_class=RequestsHttpConnection,
- http_auth=awsauth,
- text_field= "id"
- )
- return docsearch
-
-
-def websocket_client(endpoint_url: str):
- endpoint_url = endpoint_url or os.getenv("APIGATEWAY_URL")
- try:
- client = boto3.client("apigatewaymanagementapi", endpoint_url=endpoint_url)
- return client
- except Exception as e:
- raise e
\ No newline at end of file
diff --git a/chat/template.yaml b/chat/template.yaml
index 2617a07b..bd46f5d3 100644
--- a/chat/template.yaml
+++ b/chat/template.yaml
@@ -1,7 +1,11 @@
AWSTemplateFormatVersion: "2010-09-09"
Transform: AWS::Serverless-2016-10-31
-Description: Websocket Chat API for dc-api-v2
+Description: dc-api-v2 Websocket Chat API
Parameters:
+ ApiConfigPrefix:
+ Type: String
+ Description: Secret Name for API Configuration (if not provided, will use SecretsPath)
+ Default: ""
ApiTokenName:
Type: String
Description: Name of the jwt that DC API issues
@@ -49,9 +53,17 @@ Resources:
ChatWebSocket:
Type: AWS::ApiGatewayV2::Api
Properties:
- Name: ChatWebSocket
+ Name: !Sub "${AWS::StackName}-Api"
ProtocolType: WEBSOCKET
RouteSelectionExpression: "$request.body.message"
+ CheckpointBucket:
+ Type: 'AWS::S3::Bucket'
+ Properties:
+ PublicAccessBlockConfiguration:
+ BlockPublicAcls: true
+ BlockPublicPolicy: true
+ IgnorePublicAcls: true
+ RestrictPublicBuckets: true
ConnectRoute:
Type: AWS::ApiGatewayV2::Route
Properties:
@@ -199,17 +211,20 @@ Resources:
#* Layers:
#* - !Ref ChatDependencies
MemorySize: 1024
- Handler: handlers/chat.handler
+ Handler: handlers.chat
Timeout: 300
Environment:
Variables:
+ API_CONFIG_PREFIX: !Ref ApiConfigPrefix
API_TOKEN_NAME: !Ref ApiTokenName
+ CHECKPOINT_BUCKET_NAME: !Ref CheckpointBucket
ENV_PREFIX: !Ref EnvironmentPrefix
HONEYBADGER_API_KEY: !Ref HoneybadgerApiKey
HONEYBADGER_ENVIRONMENT: !Ref HoneybadgerEnv
HONEYBADGER_REVISION: !Ref HoneybadgerRevision
METRICS_LOG_GROUP: !Ref ChatMetricsLog
SECRETS_PATH: !Ref SecretsPath
+ NO_COLOR: 1
Policies:
- !Ref SecretsPolicy
- Statement:
@@ -230,6 +245,23 @@ Resources:
- logs:CreateLogStream
- logs:PutLogEvents
Resource: !Sub "${ChatMetricsLog.Arn}:*"
+ - Statement:
+ - Effect: Allow
+ Action:
+ - s3:GetObject
+ - s3:ListBucket
+ - s3:PutObject
+ - s3:DeleteObject
+ - s3:ListBucket
+ Resource:
+ - !Sub "arn:aws:s3:::${CheckpointBucket}"
+ - !Sub "arn:aws:s3:::${CheckpointBucket}/*"
+ - Statement:
+ - Effect: Allow
+ Action:
+ - bedrock:InvokeModel
+ - bedrock:InvokeModelWithResponseStream
+ Resource: "*"
#* Metadata:
#* BuildMethod: nodejs20.x
ChatSyncFunction:
@@ -242,17 +274,20 @@ Resources:
#* Layers:
#* - !Ref ChatDependencies
MemorySize: 1024
- Handler: handlers/chat_sync.handler
+ Handler: handlers.chat_sync
Timeout: 300
Environment:
Variables:
+ API_CONFIG_PREFIX: !Ref ApiConfigPrefix
API_TOKEN_NAME: !Ref ApiTokenName
+ CHECKPOINT_BUCKET_NAME: !Ref CheckpointBucket
ENV_PREFIX: !Ref EnvironmentPrefix
HONEYBADGER_API_KEY: !Ref HoneybadgerApiKey
HONEYBADGER_ENVIRONMENT: !Ref HoneybadgerEnv
HONEYBADGER_REVISION: !Ref HoneybadgerRevision
METRICS_LOG_GROUP: !Ref ChatMetricsLog
SECRETS_PATH: !Ref SecretsPath
+ NO_COLOR: 1
FunctionUrlConfig:
AuthType: AWS_IAM
Policies:
@@ -268,7 +303,24 @@ Resources:
Action:
- logs:CreateLogStream
- logs:PutLogEvents
- Resource: !Sub "${ChatMetricsLog.Arn}:*"
+ Resource: !Sub "${ChatMetricsLog.Arn}:*"
+ - Statement:
+ - Effect: Allow
+ Action:
+ - s3:GetObject
+ - s3:ListBucket
+ - s3:PutObject
+ - s3:DeleteObject
+ - s3:ListBucket
+ Resource:
+ - !Sub "arn:aws:s3:::${CheckpointBucket}"
+ - !Sub "arn:aws:s3:::${CheckpointBucket}/*"
+ - Statement:
+ - Effect: Allow
+ Action:
+ - bedrock:InvokeModel
+ - bedrock:InvokeModelWithResponseStream
+ Resource: "*"
#* Metadata:
#* BuildMethod: nodejs20.x
ChatMetricsLog:
diff --git a/chat/test/agent/__init__.py b/chat/test/agent/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/chat/test/agent/callbacks/__init__.py b/chat/test/agent/callbacks/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/chat/test/agent/callbacks/test_metrics.py b/chat/test/agent/callbacks/test_metrics.py
new file mode 100644
index 00000000..81ebcc5f
--- /dev/null
+++ b/chat/test/agent/callbacks/test_metrics.py
@@ -0,0 +1,181 @@
+from unittest import TestCase
+from unittest.mock import patch
+import json
+import sys
+
+sys.path.append("./src")
+
+from agent.callbacks.metrics import MetricsCallbackHandler
+
+class TestSocketCallbackHandler(TestCase):
+ def setUp(self):
+ self.ref = "test_ref"
+ self.handler = MetricsCallbackHandler()
+
+ def test_on_llm_end_with_content(self):
+ # Mocking LLMResult and Generations
+ class MockMessage:
+ def __init__(self, text, response_metadata={}, usage_metadata={}):
+ self.text = text
+ self.message = self # For simplicity, reuse same object for .message
+ self.response_metadata = response_metadata
+ self.usage_metadata = usage_metadata
+
+ class MockLLMResult:
+ def __init__(self, text, stop_reason="end_turn"):
+ response_metadata = {"stop_reason": stop_reason}
+ usage_metadata = {"input_tokens": 10, "output_tokens": 20, "total_tokens": 30}
+ message = MockMessage(text, response_metadata, usage_metadata)
+ self.generations = [[message]]
+
+ # When response has content and end_turn stop reason
+ response = MockLLMResult("Here is the answer", stop_reason="end_turn")
+ with patch.object(self.handler, "on_llm_end", wraps=self.handler.on_llm_end) as mock:
+ self.handler.on_llm_end(response)
+ mock.assert_called_once_with(response)
+ self.assertEqual(self.handler.answers, ["Here is the answer"])
+ self.assertEqual(self.handler.accumulator, {"input_tokens": 10, "output_tokens": 20, "total_tokens": 30})
+
+ def test_on_tool_end_search(self):
+ # Mock tool output
+ class MockToolMessage:
+ def __init__(self, name, content):
+ self.name = name
+ self.content = json.dumps(content)
+
+ content = [
+ {
+ "id": 1,
+ "api_link": "https://example.edu/item/1",
+ "title": "Result 1",
+ "visibility": "public",
+ "work_type": "article",
+ "thumbnail": "img1",
+ },
+ {
+ "id": 2,
+ "api_link": "https://example.edu/item/2",
+ "title": "Result 2",
+ "visibility": "private",
+ "work_type": "document",
+ "thumbnail": "img2",
+ },
+ ]
+
+ output = MockToolMessage("search", content)
+ self.handler.on_tool_end(output)
+ self.assertEqual(self.handler.artifacts, [{"type": "source_urls", "artifact": ["https://example.edu/item/1", "https://example.edu/item/2"]}])
+
+ def test_on_tool_end_aggregate(self):
+ class MockToolMessage:
+ def __init__(self, name, content):
+ self.name = name
+ self.content = json.dumps(content)
+
+ output = MockToolMessage("aggregate", {"aggregation_result": {"count": 10}})
+ self.handler.on_tool_end(output)
+ self.assertEqual(self.handler.artifacts, [{"type": "aggregation", "artifact": {"count": 10}}])
+
+ def test_on_tool_end_discover_fields(self):
+ class MockToolMessage:
+ def __init__(self, name, content):
+ self.name = name
+ self.content = json.dumps(content)
+
+ output = MockToolMessage("discover_fields", {})
+ self.handler.on_tool_end(output)
+ self.assertEqual(self.handler.artifacts, [])
+
+ def test_on_llm_end_with_none_response(self):
+ self.handler.on_llm_end(None)
+ self.assertEqual(self.handler.answers, [])
+ self.assertEqual(self.handler.accumulator, {})
+
+ def test_on_llm_end_with_empty_generations(self):
+ class MockLLMResult:
+ generations = [[]] # Empty list
+
+ response = MockLLMResult()
+ self.handler.on_llm_end(response)
+ self.assertEqual(self.handler.answers, [])
+ self.assertEqual(self.handler.accumulator, {})
+
+ def test_on_llm_end_with_empty_text(self):
+ class MockMessage:
+ def __init__(self):
+ self.text = ""
+ self.message = self
+ self.usage_metadata = {"input_tokens": 5}
+
+ class MockLLMResult:
+ generations = [[MockMessage()]]
+
+ response = MockLLMResult()
+ self.handler.on_llm_end(response)
+ self.assertEqual(self.handler.answers, [])
+ self.assertEqual(self.handler.accumulator, {"input_tokens": 5})
+
+ def test_on_llm_end_missing_message(self):
+ class MockMessage:
+ def __init__(self):
+ self.text = "No message attribute"
+
+ class MockLLMResult:
+ generations = [[MockMessage()]]
+
+ response = MockLLMResult()
+ self.handler.on_llm_end(response)
+ self.assertEqual(self.handler.answers, ["No message attribute"])
+ self.assertEqual(self.handler.accumulator, {})
+
+ def test_on_tool_end_invalid_json(self):
+ class MockToolMessage:
+ def __init__(self, name, content):
+ self.name = name
+ self.content = content
+
+ # Invalid content with a mocked `metadata` attribute
+ invalid_content = 'example_content'
+
+ output = MockToolMessage("search", invalid_content)
+
+ with patch("builtins.print") as mock_print:
+ self.handler.on_tool_end(output)
+ mock_print.assert_called_once_with(
+ "Invalid json (Expecting value: line 1 column 1 (char 0)) returned from search tool: example_content"
+ )
+ self.assertEqual(self.handler.artifacts, [])
+
+ def test_on_tool_end_unrecognized_tool(self):
+ class MockToolMessage:
+ def __init__(self, name, content):
+ self.name = name
+ self.content = json.dumps(content)
+
+ output = MockToolMessage("unknown_tool", {})
+ self.handler.on_tool_end(output)
+ self.assertEqual(self.handler.artifacts, [])
+
+ def test_on_llm_end_with_none_metadata(self):
+ """
+ Test the on_llm_end method when usage_metadata is None.
+ Ensures that answers are processed correctly and accumulator remains unchanged.
+ """
+ # Mocking LLMResult with usage_metadata as None
+ class MockMessage:
+ def __init__(self, text, usage_metadata=None):
+ self.text = text
+ self.message = self # For simplicity, reuse same object for .message
+ self.usage_metadata = usage_metadata
+
+ class MockLLMResult:
+ def __init__(self, text):
+ message = MockMessage(text, usage_metadata=None)
+ self.generations = [[message]]
+
+ response = MockLLMResult("Answer without metadata")
+ with patch.object(self.handler, "on_llm_end", wraps=self.handler.on_llm_end) as mock:
+ self.handler.on_llm_end(response)
+ mock.assert_called_once_with(response)
+ self.assertEqual(self.handler.answers, ["Answer without metadata"])
+ self.assertEqual(self.handler.accumulator, {})
\ No newline at end of file
diff --git a/chat/test/agent/callbacks/test_socket.py b/chat/test/agent/callbacks/test_socket.py
new file mode 100644
index 00000000..93c06b1f
--- /dev/null
+++ b/chat/test/agent/callbacks/test_socket.py
@@ -0,0 +1,165 @@
+import unittest
+from unittest import TestCase
+from unittest.mock import MagicMock
+import sys
+
+sys.path.append("./src")
+
+from agent.callbacks.socket import SocketCallbackHandler
+
+class MockClient:
+ def __init__(self):
+ self.received = []
+ def post_to_connection(self, Data, ConnectionId):
+ self.received.append(Data)
+ return Data
+
+class TestSocketCallbackHandler(TestCase):
+ def setUp(self):
+ self.mock_socket = MagicMock()
+ self.ref = "test_ref"
+ self.handler = SocketCallbackHandler(socket=self.mock_socket, ref=self.ref)
+
+ def test_on_llm_start(self):
+ # Given metadata that includes model name
+ metadata = {"ls_model_name": "test_model"}
+
+ # When on_llm_start is called
+ self.handler.on_llm_start(serialized={}, prompts=["Hello"], metadata=metadata)
+
+ # Then verify the socket was called with the correct start message
+ self.mock_socket.send.assert_called_once_with({
+ "type": "start",
+ "ref": self.ref,
+ "message": {"model": "test_model"}
+ })
+
+ def test_on_llm_end_with_content(self):
+ # Mocking LLMResult and Generations
+ class MockMessage:
+ def __init__(self, text, response_metadata):
+ self.text = text
+ self.message = self # For simplicity, reuse same object for .message
+ self.response_metadata = response_metadata
+
+ class MockLLMResult:
+ def __init__(self, text, stop_reason="end_turn"):
+ self.generations = [[MockMessage(text, {"stop_reason": stop_reason})]]
+
+ # When response has content and end_turn stop reason
+ response = MockLLMResult("Here is the answer", stop_reason="end_turn")
+ self.handler.on_llm_end(response)
+
+ # Verify "stop" and "answer" and then "final_message" were sent
+ expected_calls = [
+ unittest.mock.call({"type": "stop", "ref": self.ref}),
+ unittest.mock.call({"type": "answer", "ref": self.ref, "message": "Here is the answer"}),
+ unittest.mock.call({"type": "final_message", "ref": self.ref})
+ ]
+ self.mock_socket.send.assert_has_calls(expected_calls, any_order=False)
+
+ def test_on_llm_new_token(self):
+ # When a new token arrives
+ self.handler.on_llm_new_token("hello")
+
+ # Then verify the socket sent a token message
+ self.mock_socket.send.assert_called_once_with({
+ "type": "token",
+ "ref": self.ref,
+ "message": "hello"
+ })
+
+ def test_on_tool_start(self):
+ # When tool starts
+ self.handler.on_tool_start({"name": "test_tool"}, "input_value")
+
+ # Verify the tool_start message
+ self.mock_socket.send.assert_called_once_with({
+ "type": "tool_start",
+ "ref": self.ref,
+ "message": {
+ "tool": "test_tool",
+ "input": "input_value"
+ }
+ })
+
+ def test_on_tool_end_search(self):
+ # Mock tool output
+ class MockToolMessage:
+ def __init__(self, name, content):
+ self.name = name
+ self.content = content
+
+ content = [
+ {"id": 1, "title": "Result 1", "visibility": "public", "work_type": "article", "thumbnail": "img1"},
+ {"id": 2, "title": "Result 2", "visibility": "private", "work_type": "document", "thumbnail": "img2"}
+ ]
+
+ output = MockToolMessage("search", content)
+ self.handler.on_tool_end(output)
+
+ # Verify search_result message was sent
+ expected_message = [
+ {"id": 1, "title": "Result 1", "visibility": "public", "work_type": "article", "thumbnail": "img1"},
+ {"id": 2, "title": "Result 2", "visibility": "private", "work_type": "document", "thumbnail": "img2"}
+ ]
+
+ self.mock_socket.send.assert_called_once_with({
+ "type": "search_result",
+ "ref": self.ref,
+ "message": expected_message
+ })
+
+ def test_on_tool_end_aggregate(self):
+ class MockToolMessage:
+ def __init__(self, name, content):
+ self.name = name
+ self.content = content
+
+ output = MockToolMessage("aggregate", {"aggregation_result": {"count": 10}})
+ self.handler.on_tool_end(output)
+
+ # Verify aggregation_result message was sent
+ self.mock_socket.send.assert_called_once_with({
+ "type": "aggregation_result",
+ "ref": self.ref,
+ "message": {"count": 10}
+ })
+
+ def test_on_tool_end_discover_fields(self):
+ class MockToolMessage:
+ def __init__(self, name, content):
+ self.name = name
+ self.content = content
+
+ output = MockToolMessage("discover_fields", {})
+ self.handler.on_tool_end(output)
+
+ self.mock_socket.send.assert_not_called()
+
+ def test_on_tool_end_unknown(self):
+ class MockToolMessage:
+ def __init__(self, name, content):
+ self.name = name
+ self.content = content
+
+ output = MockToolMessage("unknown", {})
+ self.handler.on_tool_end(output)
+
+ self.mock_socket.send.assert_not_called()
+
+ def test_on_agent_finish(self):
+ self.handler.on_agent_finish(finish={})
+ self.mock_socket.send.assert_called_once_with({
+ "type": "final",
+ "ref": self.ref,
+ "message": "Finished"
+ })
+
+class TestSocketCallbackHandlerErrors(TestCase):
+ def test_missing_socket(self):
+ with self.assertRaises(ValueError) as context:
+ SocketCallbackHandler(socket=None, ref="abc123")
+
+ self.assertIn("Socket not provided to agent callback handler", str(context.exception))
+
diff --git a/chat/test/agent/test_search_agent.py b/chat/test/agent/test_search_agent.py
new file mode 100644
index 00000000..68d96a42
--- /dev/null
+++ b/chat/test/agent/test_search_agent.py
@@ -0,0 +1,85 @@
+from unittest import TestCase
+from unittest.mock import patch
+
+from agent.search_agent import SearchAgent
+from langchain_core.language_models.fake_chat_models import FakeListChatModel
+from langgraph.checkpoint.memory import MemorySaver
+
+
+class TestSearchAgent(TestCase):
+
+ @patch('agent.search_agent.checkpoint_saver', return_value=MemorySaver())
+ def test_search_agent_init(self, mock_create_saver):
+ chat_model = FakeListChatModel(responses=["fake response"])
+ search_agent = SearchAgent(model=chat_model, streaming=True)
+ self.assertIsNotNone(search_agent)
+
+ @patch('agent.search_agent.checkpoint_saver', return_value=MemorySaver())
+ def test_search_agent_invoke_simple(self, mock_create_saver):
+ expected_response = "This is a mocked LLM response."
+ chat_model = FakeListChatModel(responses=[expected_response])
+ search_agent = SearchAgent(model=chat_model, streaming=True)
+ result = search_agent.invoke(question="What is the capital of France?", ref="test_ref")
+
+ self.assertIn("messages", result)
+ self.assertGreater(len(result["messages"]), 0)
+ self.assertEqual(result["messages"][-1].content, expected_response)
+
+ @patch('agent.search_agent.checkpoint_saver')
+ def test_search_agent_invocation(self, mock_create_saver):
+ # Create a memory saver instance with a Mock for delete_checkpoints
+ memory_saver = MemorySaver()
+ from unittest.mock import Mock
+ memory_saver.delete_checkpoints = Mock()
+ mock_create_saver.return_value = memory_saver
+
+ # Test that the SearchAgent invokes the model with the correct messages
+ chat_model = FakeListChatModel(responses=["first response", "second response"])
+ search_agent = SearchAgent(model=chat_model, streaming=True)
+
+ # First invocation with some question
+ result_1 = search_agent.invoke(question="First question?", ref="test_ref")
+ self.assertIn("messages", result_1)
+ self.assertEqual(result_1["messages"][-1].content, "first response")
+
+ # Second invocation, same ref, should retain memory
+ result_2 = search_agent.invoke(question="Second question?", ref="test_ref")
+ self.assertEqual(result_2["messages"][-1].content, "second response")
+
+ # Verify delete_checkpoints was not called
+ memory_saver.delete_checkpoints.assert_not_called()
+
+
+ @patch('agent.search_agent.checkpoint_saver')
+ def test_search_agent_invoke_forget(self, mock_create_saver):
+ # Create a memory saver instance with a Mock for delete_checkpoints
+ memory_saver = MemorySaver()
+ from unittest.mock import Mock
+ memory_saver.delete_checkpoints = Mock()
+ mock_create_saver.return_value = memory_saver
+
+
+ # Test `forget=True` to ensure that state is reset and doesn't carry over between invocations
+ chat_model = FakeListChatModel(responses=["first response", "second response"])
+ search_agent = SearchAgent(model=chat_model, streaming=True)
+
+ # First invocation with some question
+ result_1 = search_agent.invoke(question="First question?", ref="test_ref")
+ self.assertIn("messages", result_1)
+ self.assertEqual(result_1["messages"][-1].content, "first response")
+
+ # Second invocation, same ref, should retain memory if we don't forget
+ result_2 = search_agent.invoke(question="Second question?", ref="test_ref")
+ self.assertEqual(result_2["messages"][-1].content, "second response")
+
+ # Now invoke with forget=True, resetting the state
+ new_chat_model = FakeListChatModel(responses=["fresh response"])
+ search_agent = SearchAgent(model=new_chat_model, streaming=True)
+
+ # Forget the state for "test_ref"
+ result_3 = search_agent.invoke(question="Third question after forgetting?", ref="test_ref", forget=True)
+ # With a fresh FakeListChatModel, the response should now be "fresh response"
+ self.assertEqual(result_3["messages"][-1].content, "fresh response")
+
+ # Verify delete_checkpoints was called
+ memory_saver.delete_checkpoints.assert_called_once_with("test_ref")
\ No newline at end of file
diff --git a/chat/test/agent/test_search_workflow.py b/chat/test/agent/test_search_workflow.py
new file mode 100644
index 00000000..9d3a10e7
--- /dev/null
+++ b/chat/test/agent/test_search_workflow.py
@@ -0,0 +1,44 @@
+import unittest
+from langchain_core.messages.base import BaseMessage
+from langchain_core.messages.system import SystemMessage
+from langgraph.graph import END
+
+from agent.search_agent import SearchWorkflow
+
+class FakeMessage(BaseMessage):
+ type: str = "fake" # specify a default type
+ content: str
+ tool_calls: list = []
+
+class FakeModel:
+ def invoke(self, messages):
+ # Just return a mock response
+ return SystemMessage(content="Mock Response")
+
+class TestSearchWorkflow(unittest.TestCase):
+ def setUp(self):
+ self.model = FakeModel()
+ self.workflow = SearchWorkflow(model=self.model, system_message="Test system message")
+
+ def test_should_continue_with_tool_calls(self):
+ state = {"messages": [
+ FakeMessage(content="Hello"),
+ FakeMessage(content="Calling tool", tool_calls=["test_tool"])
+ ]}
+ result = self.workflow.should_continue(state)
+ self.assertEqual(result, "tools")
+
+ def test_should_continue_without_tool_calls(self):
+ state = {"messages": [
+ FakeMessage(content="Hello"),
+ FakeMessage(content="No tool calls here")
+ ]}
+ result = self.workflow.should_continue(state)
+ self.assertEqual(result, END)
+
+ def test_call_model(self):
+ state = {"messages": [FakeMessage(content="User input")]}
+ result = self.workflow.call_model(state)
+ self.assertIn("messages", result)
+ self.assertEqual(len(result["messages"]), 1)
+ self.assertEqual(result["messages"][0].content, "Mock Response")
\ No newline at end of file
diff --git a/chat/test/agent/test_tools.py b/chat/test/agent/test_tools.py
new file mode 100644
index 00000000..b624daa6
--- /dev/null
+++ b/chat/test/agent/test_tools.py
@@ -0,0 +1,162 @@
+from unittest import TestCase
+from unittest.mock import patch, MagicMock
+import json
+
+from agent.tools import discover_fields, search, aggregate, get_keyword_fields
+from test.fixtures.opensearch import TOP_PROPERTIES
+
+
+class TestTools(TestCase):
+ @patch('agent.tools.opensearch_vector_store')
+ def test_discover_fields(self, mock_opensearch):
+ # Mock the OpenSearch response
+ mock_client = MagicMock()
+ mock_client.indices.get_mapping.return_value = {
+ 'index_name': {
+ 'mappings': {
+ 'properties': {
+ 'field1': {'type': 'keyword'},
+ 'field3': {
+ 'properties': {
+ 'subfield1': {'type': 'keyword'}
+ }
+ }
+ }
+ }
+ }
+ }
+ mock_opensearch.return_value.client = mock_client
+
+ # Pass required parameters based on the tool's schema
+ response = discover_fields.invoke({"query": ""}) # Assuming query is the required parameter
+ self.assertEqual(response, ["field1", "field3.subfield1"])
+
+ @patch('agent.tools.opensearch_vector_store')
+ def test_search(self, mock_opensearch):
+ class MockDoc:
+ def __init__(self, content, metadata):
+ self.content = content
+ self.metadata = metadata
+
+ expected_results = [{"id": "doc1", "text": "example result"}]
+ mock_results = [MockDoc(content=doc["id"], metadata=doc) for doc in expected_results]
+ mock_opensearch.return_value.similarity_search.return_value = mock_results
+
+ response = search.invoke("test query")
+ self.assertEqual(response, expected_results)
+
+ @patch('agent.tools.opensearch_vector_store')
+ def test_aggregate(self, mock_opensearch):
+ mock_response = json.dumps({
+ "aggregations": {
+ "example_agg": {
+ "buckets": []
+ }
+ }
+ })
+ mock_opensearch.return_value.aggregations_search.return_value = mock_response
+
+ # Pass parameters directly instead of as JSON string
+ response = aggregate.invoke({
+ "agg_field": "field1",
+ "term_field": "term_field",
+ "term": "term"
+ })
+ self.assertIsInstance(response, str)
+ self.assertEqual(json.loads(response), json.loads(mock_response))
+
+ @patch('agent.tools.opensearch_vector_store')
+ def test_aggregate_no_term(self, mock_opensearch):
+ mock_response = json.dumps({
+ "aggregations": {
+ "all_docs": {
+ "buckets": []
+ }
+ }
+ })
+ mock_opensearch.return_value.aggregations_search.return_value = mock_response
+
+ response = aggregate.invoke({
+ "agg_field": "field1",
+ "term_field": "",
+ "term": ""
+ })
+ self.assertIsInstance(response, str)
+ self.assertEqual(json.loads(response), json.loads(mock_response))
+
+ def test_get_keyword_fields(self):
+ properties = {
+ 'field1': {'type': 'keyword'},
+ 'field2': {'type': 'text'},
+ 'field3': {
+ 'fields': {
+ 'raw': {'type': 'keyword'}
+ }
+ }
+ }
+ result = get_keyword_fields(properties)
+ self.assertEqual(set(result), {'field1', 'field3.raw'})
+
+ def test_nested_get_keyword_fields(self):
+ properties = {
+ 'field1': {
+ 'properties': {
+ 'nested1': {'type': 'keyword'},
+ 'nested2': {'type': 'text'}
+ }
+ }
+ }
+ result = get_keyword_fields(properties)
+ self.assertEqual(result, ['field1.nested1'])
+
+ def test_complex_mapping_get_keyword_fields(self):
+ properties = {
+ 'field1': {
+ 'properties': {
+ 'nested1': {
+ 'type': 'keyword'
+ },
+ 'nested2': {
+ 'properties': {
+ 'subnested1': {'type': 'keyword'}
+ }
+ }
+ }
+ },
+ 'field2': {
+ 'fields': {
+ 'raw': {'type': 'keyword'}
+ }
+ }
+ }
+ result = get_keyword_fields(properties)
+ self.assertEqual(set(result), {'field1.nested1', 'field1.nested2.subnested1', 'field2.raw'})
+
+ def test_meadow_mapping_get_keyword_fields(self):
+ result = get_keyword_fields(TOP_PROPERTIES)
+ expected_result = ['accession_number', 'all_controlled_terms', 'all_ids', 'api_link', 'api_model', 'ark', 'batch_ids', 'box_name', 'box_number', 'canonical_link', 'catalog_key', 'collection.id', 'collection.title.keyword', 'contributor.facet', 'contributor.id', 'contributor.label', 'contributor.label_with_role', 'contributor.role', 'contributor.variants', 'creator.facet', 'creator.id', 'creator.label', 'creator.variants', 'csv_metadata_update_jobs', 'date_created', 'date_created_edtf', 'embedding_model', 'file_sets.accession_number', 'file_sets.download_url', 'file_sets.id', 'file_sets.label', 'file_sets.mime_type', 'file_sets.original_filename', 'file_sets.representative_image_url', 'file_sets.role', 'file_sets.streaming_url', 'file_sets.webvtt', 'folder_name', 'folder_number', 'genre.facet', 'genre.id', 'genre.label', 'genre.label_with_role', 'genre.role', 'genre.variants', 'id', 'identifier', 'iiif_manifest', 'ingest_project.id', 'ingest_project.title', 'ingest_sheet.id', 'ingest_sheet.title', 'keywords', 'language.facet', 'language.id', 'language.label', 'language.variants', 'legacy_identifier', 'library_unit', 'license.id', 'license.label', 'location.facet', 'location.id', 'location.label', 'location.variants', 'notes.note', 'notes.type', 'physical_description_material', 'physical_description_size', 'preservation_level', 'project.cycle', 'project.desc', 'project.manager', 'project.name', 'project.proposer', 'project.task_number', 'provenance', 'publisher', 'related_material', 'related_url.label', 'related_url.url', 'representative_file_set.id', 'representative_file_set.url', 'rights_holder', 'rights_statement.id', 'rights_statement.label', 'scope_and_contents', 'series', 'source', 'status', 'style_period.facet', 'style_period.id', 'style_period.label', 'style_period.variants', 'subject.facet', 'subject.id', 'subject.label', 'subject.label_with_role', 'subject.role', 'subject.variants', 'technique.facet', 'technique.id', 'technique.label', 'technique.variants', 'terms_of_use', 'thumbnail', 'title.keyword', 'visibility', 'work_type']
+ self.assertEqual(
+ result,
+ expected_result)
+
+ @patch('agent.tools.opensearch_vector_store')
+ def test_aggregate_exception(self, mock_opensearch):
+ # Configure the mock to raise an exception
+ mock_client = MagicMock()
+ mock_client.aggregations_search.side_effect = Exception("Test error")
+ mock_opensearch.return_value = mock_client
+
+ # Call aggregate with some parameters
+ response = aggregate.invoke({
+ "agg_field": "field1",
+ "term_field": "term_field",
+ "term": "term"
+ })
+
+ # Verify the response contains the error message
+ self.assertIsInstance(response, str)
+ parsed_response = json.loads(response)
+ self.assertEqual(parsed_response, {"error": "Test error"})
+
+ # Verify the mock was called with expected parameters
+ mock_client.aggregations_search.assert_called_once()
\ No newline at end of file
diff --git a/chat/test/core/__init__.py b/chat/test/core/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/chat/test/helpers/test_apitoken.py b/chat/test/core/test_apitoken.py
similarity index 95%
rename from chat/test/helpers/test_apitoken.py
rename to chat/test/core/test_apitoken.py
index e23a1646..cb1c8e60 100644
--- a/chat/test/helpers/test_apitoken.py
+++ b/chat/test/core/test_apitoken.py
@@ -1,10 +1,7 @@
# ruff: noqa: E402
import os
-import sys
-sys.path.append('./src')
-
-from helpers.apitoken import ApiToken
+from core.apitoken import ApiToken
from test.fixtures.apitoken import DEV_TEAM_TOKEN, SUPER_TOKEN, TEST_SECRET, TEST_TOKEN
from unittest import mock, TestCase
diff --git a/chat/test/core/test_event_config.py b/chat/test/core/test_event_config.py
new file mode 100644
index 00000000..ffe09a91
--- /dev/null
+++ b/chat/test/core/test_event_config.py
@@ -0,0 +1,123 @@
+# ruff: noqa: E402
+import json
+import unittest
+from unittest import TestCase
+from unittest.mock import MagicMock, patch
+
+from core.apitoken import ApiToken
+from core.event_config import EventConfig
+from core.websocket import Websocket
+
+class TestEventConfig(TestCase):
+ def test_defaults(self):
+ actual = EventConfig(event={"body": json.dumps({"question": "Question?"})})
+ self.assertEqual(actual.question, "Question?")
+
+ def test_attempt_override_without_superuser_status(self):
+ actual = EventConfig(
+ event={
+ "body": json.dumps(
+ {
+ "k": 100,
+ "question": "test question",
+ "ref": "test ref",
+ "size": 90,
+ "temperature": 0.9,
+ }
+ )
+ }
+ )
+ expected_output = {
+ "k": 40,
+ "question": "test question",
+ "ref": "test ref",
+ "size": 20,
+ "temperature": 0.2,
+ }
+ self.assertEqual(actual.k, expected_output["k"])
+ self.assertEqual(actual.question, expected_output["question"])
+ self.assertEqual(actual.ref, expected_output["ref"])
+ self.assertEqual(actual.size, expected_output["size"])
+ self.assertEqual(actual.temperature, expected_output["temperature"])
+
+class TestEventConfigSuperuser(unittest.TestCase):
+ def setUp(self):
+ self.event = {
+ "body": json.dumps({
+ "auth": "some_superuser_token",
+ "model": "custom-superuser-model",
+ "prompt": "Custom superuser prompt",
+ "k": 80,
+ "size": 50,
+ "temperature": 0.7,
+ "text_key": "custom_text_key"
+ }),
+ "requestContext": {
+ "connectionId": "test_connection_id",
+ "domainName": "example.com",
+ "stage": "dev"
+ }
+ }
+
+ @patch.object(ApiToken, 'is_superuser', return_value=True)
+ def test_superuser_overrides(self, mock_superuser):
+ """
+ Test that when the user is a superuser, the payload values override the defaults.
+ """
+ config = EventConfig(event=self.event)
+ self.assertTrue(config.is_superuser)
+ # As a superuser, these should reflect the payload values rather than defaults
+ self.assertEqual(config.model, "custom-superuser-model")
+ self.assertEqual(config.k, 80) # should not be clipped since 80 < MAX_K (100)
+ self.assertEqual(config.size, 50)
+ self.assertEqual(config.temperature, 0.7)
+ self.assertEqual(config.text_key, "custom_text_key")
+ self.assertEqual(config.prompt_text, "Custom superuser prompt")
+
+
+class TestEventConfigWebsocket(unittest.TestCase):
+ def setUp(self):
+ self.event = {
+ "body": json.dumps({
+ "auth": "some_superuser_token",
+ "model": "custom-superuser-model",
+ "prompt": "Custom superuser prompt",
+ "k": 80,
+ "size": 50,
+ "temperature": 0.7,
+ "text_key": "custom_text_key"
+ }),
+ "requestContext": {
+ "connectionId": "test_connection_id",
+ "domainName": "example.com",
+ "stage": "dev"
+ }
+ }
+
+ @patch.object(ApiToken, 'is_superuser', return_value=True)
+ @patch('core.event_config.Websocket', autospec=True)
+ def test_setup_websocket_without_socket(self, mock_websocket_class, mock_superuser):
+ config = EventConfig(event=self.event)
+ returned_socket = config.setup_websocket()
+
+ mock_websocket_class.assert_called_once_with(
+ endpoint_url='https://example.com/dev',
+ connection_id='test_connection_id',
+ ref=config.ref
+ )
+
+ # Instead of assertIsInstance(returned_socket, mock_websocket_class),
+ # we check that it's the return_value of the mock:
+ self.assertIs(returned_socket, mock_websocket_class.return_value)
+ self.assertEqual(config.socket, mock_websocket_class.return_value)
+
+ @patch.object(ApiToken, 'is_superuser', return_value=True)
+ def test_setup_websocket_with_existing_socket(self, mock_superuser):
+ """
+ Test that setup_websocket uses the provided socket if one is passed in.
+ """
+ config = EventConfig(event=self.event)
+ mock_socket = MagicMock(spec=Websocket)
+ returned_socket = config.setup_websocket(socket=mock_socket)
+ self.assertEqual(returned_socket, mock_socket)
+ self.assertEqual(config.socket, mock_socket)
\ No newline at end of file
diff --git a/chat/test/helpers/test_prompts.py b/chat/test/core/test_prompts.py
similarity index 90%
rename from chat/test/helpers/test_prompts.py
rename to chat/test/core/test_prompts.py
index b9a7d950..7a1179e1 100644
--- a/chat/test/helpers/test_prompts.py
+++ b/chat/test/core/test_prompts.py
@@ -1,8 +1,5 @@
# ruff: noqa: E402
-import sys
-sys.path.append('./src')
-
-from helpers.prompts import prompt_template, document_template
+from core.prompts import prompt_template, document_template
from unittest import TestCase
diff --git a/chat/test/core/test_secrets.py b/chat/test/core/test_secrets.py
new file mode 100644
index 00000000..4f11d04c
--- /dev/null
+++ b/chat/test/core/test_secrets.py
@@ -0,0 +1,30 @@
+import boto3
+import os
+import pytest
+from moto import mock_aws
+from unittest import TestCase
+
+from core.secrets import load_secrets
+
+@mock_aws
+@mock_aws
+@pytest.mark.filterwarnings("ignore::DeprecationWarning")
+class TestSecrets(TestCase):
+ def setUp(self):
+ client = boto3.client("secretsmanager", region_name="us-east-1")
+ client.create_secret(
+ Name="mock/infrastructure/index",
+ SecretString='{"endpoint": "https://opensearch-endpoint", "embedding_model": "opensearch-model"}')
+ client.create_secret(
+ Name="mock/config/dcapi",
+ SecretString='{"api_token_secret": "dcapi-token"}')
+
+ def test_load_secrets(self):
+ os.environ['SECRETS_PATH'] = 'mock'
+ self.assertNotEqual('dcapi-token', os.getenv('API_TOKEN_SECRET'))
+ self.assertNotEqual('https://opensearch-endpoint', os.getenv('OPENSEARCH_ENDPOINT'))
+ self.assertNotEqual('opensearch-model', os.getenv('OPENSEARCH_MODEL_ID'))
+ load_secrets()
+ self.assertEqual('dcapi-token', os.getenv('API_TOKEN_SECRET'))
+ self.assertEqual('https://opensearch-endpoint', os.getenv('OPENSEARCH_ENDPOINT'))
+ self.assertEqual('opensearch-model', os.getenv('OPENSEARCH_MODEL_ID'))
diff --git a/chat/test/core/test_setup.py b/chat/test/core/test_setup.py
new file mode 100644
index 00000000..b908d801
--- /dev/null
+++ b/chat/test/core/test_setup.py
@@ -0,0 +1,150 @@
+import unittest
+from unittest.mock import patch, MagicMock
+import os
+from opensearchpy import RequestsHttpConnection
+
+from core.setup import chat_model, checkpoint_saver, prefix, opensearch_endpoint, opensearch_client, opensearch_vector_store, websocket_client
+
+
+class TestChatModel(unittest.TestCase):
+ def test_chat_model_returns_bedrock_instance(self):
+ kwargs = {"model_id": "test_model"}
+ with patch("core.setup.ChatBedrock") as mock_bedrock:
+ result = chat_model(**kwargs)
+ mock_bedrock.assert_called_once_with(**kwargs)
+ self.assertEqual(result, mock_bedrock.return_value)
+
+class TestCheckpointSaver(unittest.TestCase):
+ @patch.dict(os.environ, {"CHECKPOINT_BUCKET_NAME": "test-bucket"})
+ @patch("core.setup.SelectiveCheckpointer")
+ def test_checkpoint_saver_initialization(self, mock_checkpointer):
+ kwargs = {"prefix": "test"}
+ result = checkpoint_saver(**kwargs)
+
+ mock_checkpointer.assert_called_once_with(
+ bucket_name="test-bucket",
+ retain_history=False,
+ **kwargs
+ )
+ self.assertEqual(result, mock_checkpointer.return_value)
+
+class TestPrefix(unittest.TestCase):
+ def test_prefix_with_env_prefix(self):
+ with patch.dict(os.environ, {"ENV_PREFIX": "dev"}):
+ result = prefix("test")
+ self.assertEqual(result, "dev-test")
+
+ def test_prefix_without_env_prefix(self):
+ with patch.dict(os.environ, {"ENV_PREFIX": ""}):
+ result = prefix("test")
+ self.assertEqual(result, "test")
+
+ def test_prefix_with_none_env_prefix(self):
+ with patch.dict(os.environ, clear=True):
+ result = prefix("test")
+ self.assertEqual(result, "test")
+
+class TestOpenSearchEndpoint(unittest.TestCase):
+ def test_opensearch_endpoint_with_full_url(self):
+ with patch.dict(os.environ, {"OPENSEARCH_ENDPOINT": "https://test.amazonaws.com"}):
+ result = opensearch_endpoint()
+ self.assertEqual(result, "test.amazonaws.com")
+
+ def test_opensearch_endpoint_with_hostname(self):
+ with patch.dict(os.environ, {"OPENSEARCH_ENDPOINT": "test.amazonaws.com"}):
+ result = opensearch_endpoint()
+ self.assertEqual(result, "test.amazonaws.com")
+
+class TestOpenSearchClient(unittest.TestCase):
+ @patch("core.setup.boto3.Session")
+ @patch("core.setup.AWS4Auth")
+ @patch("core.setup.OpenSearch")
+ def test_opensearch_client_initialization(self, mock_opensearch, mock_aws4auth, mock_session):
+ # Setup mock credentials
+ mock_credentials = MagicMock()
+ mock_session.return_value.get_credentials.return_value = mock_credentials
+
+ with patch.dict(os.environ, {
+ "AWS_REGION": "us-west-2",
+ "OPENSEARCH_ENDPOINT": "test.amazonaws.com"
+ }):
+ _result = opensearch_client()
+
+ # Verify AWS4Auth initialization
+ mock_aws4auth.assert_called_once_with(
+ region="us-west-2",
+ service="es",
+ refreshable_credentials=mock_credentials
+ )
+
+ # Verify OpenSearch initialization
+ mock_opensearch.assert_called_once_with(
+ hosts=[{"host": "test.amazonaws.com", "port": 443}],
+ use_ssl=True,
+ connection_class=RequestsHttpConnection,
+ http_auth=mock_aws4auth.return_value
+ )
+
+class TestOpenSearchVectorStore(unittest.TestCase):
+ @patch("core.setup.boto3.Session")
+ @patch("core.setup.AWS4Auth")
+ @patch("core.setup.OpenSearchNeuralSearch")
+ def test_opensearch_vector_store_initialization(self, mock_neural_search, mock_aws4auth, mock_session):
+ # Setup mock credentials
+ mock_credentials = MagicMock()
+ mock_session.return_value.get_credentials.return_value = mock_credentials
+
+ with patch.dict(os.environ, {
+ "AWS_REGION": "us-west-2",
+ "OPENSEARCH_ENDPOINT": "test.amazonaws.com",
+ "OPENSEARCH_MODEL_ID": "test-model",
+ "ENV_PREFIX": "dev"
+ }):
+ _result = opensearch_vector_store()
+
+ # Verify AWS4Auth initialization
+ mock_aws4auth.assert_called_once_with(
+ region="us-west-2",
+ service="es",
+ refreshable_credentials=mock_credentials
+ )
+
+ # Verify OpenSearchNeuralSearch initialization
+ mock_neural_search.assert_called_once_with(
+ index="dev-dc-v2-work",
+ model_id="test-model",
+ endpoint="test.amazonaws.com",
+ connection_class=RequestsHttpConnection,
+ http_auth=mock_aws4auth.return_value,
+ text_field="id"
+ )
+
+class TestWebsocketClient(unittest.TestCase):
+ @patch("core.setup.boto3.client")
+ def test_websocket_client_with_provided_endpoint(self, mock_boto3_client):
+ endpoint_url = "https://test-ws.amazonaws.com"
+ result = websocket_client(endpoint_url)
+
+ mock_boto3_client.assert_called_once_with(
+ "apigatewaymanagementapi",
+ endpoint_url=endpoint_url
+ )
+ self.assertEqual(result, mock_boto3_client.return_value)
+
+ @patch("core.setup.boto3.client")
+ def test_websocket_client_with_env_endpoint(self, mock_boto3_client):
+ with patch.dict(os.environ, {"APIGATEWAY_URL": "https://test-ws-env.amazonaws.com"}):
+ result = websocket_client(None)
+
+ mock_boto3_client.assert_called_once_with(
+ "apigatewaymanagementapi",
+ endpoint_url="https://test-ws-env.amazonaws.com"
+ )
+ self.assertEqual(result, mock_boto3_client.return_value)
+
+ @patch("core.setup.boto3.client")
+ def test_websocket_client_error_handling(self, mock_boto3_client):
+ mock_boto3_client.side_effect = Exception("Connection error")
+
+ with self.assertRaises(Exception):
+ websocket_client("https://test-ws.amazonaws.com")
\ No newline at end of file
diff --git a/chat/test/core/test_websocket.py b/chat/test/core/test_websocket.py
new file mode 100644
index 00000000..573c8748
--- /dev/null
+++ b/chat/test/core/test_websocket.py
@@ -0,0 +1,102 @@
+# ruff: noqa: E402
+import sys
+import json
+sys.path.append('./src')
+
+from unittest import TestCase, mock
+from core.websocket import Websocket
+
+
+class MockClient:
+ def post_to_connection(self, Data, ConnectionId):
+ return Data
+
+
+class TestWebsocket(TestCase):
+ def setUp(self):
+ self.mock_client = MockClient()
+ self.test_connection_id = "test_connection_id"
+ self.test_ref = {"key": "value"}
+
+ def test_init_with_all_parameters(self):
+ websocket = Websocket(
+ client=self.mock_client,
+ endpoint_url="wss://test.com",
+ connection_id=self.test_connection_id,
+ ref=self.test_ref
+ )
+ self.assertEqual(websocket.client, self.mock_client)
+ self.assertEqual(websocket.connection_id, self.test_connection_id)
+ self.assertEqual(websocket.ref, self.test_ref)
+
+ def test_init_with_minimal_parameters(self):
+ with mock.patch('core.websocket.websocket_client') as mock_client_func:
+ mock_client_func.return_value = self.mock_client
+ websocket = Websocket(endpoint_url="wss://test.com")
+ self.assertEqual(websocket.client, self.mock_client)
+ self.assertEqual(websocket.connection_id, None)
+ self.assertEqual(websocket.ref, {})
+
+ def test_send_string_message(self):
+ websocket = Websocket(
+ client=self.mock_client,
+ connection_id=self.test_connection_id,
+ ref=self.test_ref
+ )
+ message = "test_message"
+ expected = {"message": "test_message", "ref": self.test_ref}
+ self.assertEqual(websocket.send(message), expected)
+
+ def test_send_dict_message(self):
+ websocket = Websocket(
+ client=self.mock_client,
+ connection_id=self.test_connection_id,
+ ref=self.test_ref
+ )
+ message = {"data": "test_data"}
+ expected = {"data": "test_data", "ref": self.test_ref}
+ self.assertEqual(websocket.send(message), expected)
+
+ def test_send_in_debug_mode(self):
+ websocket = Websocket(
+ client=self.mock_client,
+ connection_id="debug",
+ ref=self.test_ref
+ )
+ message = "test_message"
+ expected = {"message": "test_message", "ref": self.test_ref}
+
+ # Capture printed output
+ with mock.patch('builtins.print') as mock_print:
+ result = websocket.send(message)
+ mock_print.assert_called_once_with(expected)
+ self.assertEqual(result, expected)
+
+ def test_string_representation(self):
+ websocket = Websocket(
+ client=self.mock_client,
+ connection_id=self.test_connection_id,
+ ref=self.test_ref
+ )
+ expected_str = f"Websocket({self.test_connection_id}, {self.test_ref})"
+ self.assertEqual(str(websocket), expected_str)
+ self.assertEqual(repr(websocket), expected_str)
+
+ def test_send_converts_to_bytes(self):
+ websocket = Websocket(
+ client=self.mock_client,
+ connection_id=self.test_connection_id,
+ ref=self.test_ref
+ )
+ message = "test_message"
+ expected_bytes = bytes(
+ json.dumps({"message": "test_message", "ref": self.test_ref}),
+ "utf-8"
+ )
+
+ with mock.patch.object(self.mock_client, 'post_to_connection') as mock_post:
+ websocket.send(message)
+ mock_post.assert_called_once_with(
+ Data=expected_bytes,
+ ConnectionId=self.test_connection_id
+ )
\ No newline at end of file
diff --git a/chat/test/fixtures/opensearch.py b/chat/test/fixtures/opensearch.py
new file mode 100644
index 00000000..1e791caf
--- /dev/null
+++ b/chat/test/fixtures/opensearch.py
@@ -0,0 +1,335 @@
+TOP_PROPERTIES = {
+ "abstract": {
+ "type": "text",
+ "copy_to": ["all_text"],
+ "analyzer": "full_analyzer",
+ "search_analyzer": "stopword_analyzer",
+ "search_quote_analyzer": "full_analyzer",
+ },
+ "accession_number": {"type": "keyword", "copy_to": ["all_ids"]},
+ "all_controlled_labels": {
+ "type": "text",
+ "analyzer": "full_analyzer",
+ "search_analyzer": "stopword_analyzer",
+ "search_quote_analyzer": "full_analyzer",
+ },
+ "all_controlled_terms": {"type": "keyword"},
+ "all_ids": {"type": "keyword"},
+ "all_text": {
+ "type": "text",
+ "analyzer": "full_analyzer",
+ "search_analyzer": "stopword_analyzer",
+ "search_quote_analyzer": "full_analyzer",
+ },
+ "alternate_title": {
+ "type": "text",
+ "copy_to": ["all_text"],
+ "analyzer": "full_analyzer",
+ "search_analyzer": "stopword_analyzer",
+ "search_quote_analyzer": "full_analyzer",
+ },
+ "api_link": {"type": "keyword", "copy_to": ["all_text"]},
+ "api_model": {"type": "keyword", "copy_to": ["all_text"]},
+ "ark": {"type": "keyword", "copy_to": ["all_ids"]},
+ "batch_ids": {"type": "keyword", "copy_to": ["all_ids"]},
+ "box_name": {"type": "keyword", "copy_to": ["all_text"]},
+ "box_number": {"type": "keyword", "copy_to": ["all_text"]},
+ "canonical_link": {"type": "keyword", "copy_to": ["all_text"]},
+ "caption": {
+ "type": "text",
+ "copy_to": ["all_text"],
+ "analyzer": "full_analyzer",
+ "search_analyzer": "stopword_analyzer",
+ "search_quote_analyzer": "full_analyzer",
+ },
+ "catalog_key": {"type": "keyword", "copy_to": ["all_ids"]},
+ "collection": {
+ "properties": {
+ "description": {
+ "type": "text",
+ "copy_to": ["all_text"],
+ "analyzer": "full_analyzer",
+ "search_analyzer": "stopword_analyzer",
+ "search_quote_analyzer": "full_analyzer",
+ },
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "title": {
+ "type": "text",
+ "fields": {"keyword": {"type": "keyword", "ignore_above": 256}},
+ "copy_to": ["all_text"],
+ "analyzer": "full_analyzer",
+ "search_analyzer": "stopword_analyzer",
+ "search_quote_analyzer": "full_analyzer",
+ },
+ }
+ },
+ "contributor": {
+ "properties": {
+ "facet": {"type": "keyword", "copy_to": ["all_text"]},
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "label": {
+ "type": "keyword",
+ "copy_to": [
+ "all_text",
+ "all_controlled_terms",
+ "all_controlled_labels",
+ ],
+ },
+ "label_with_role": {"type": "keyword", "copy_to": ["all_text"]},
+ "role": {"type": "keyword", "copy_to": ["all_text"]},
+ "variants": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "create_date": {"type": "date_nanos"},
+ "creator": {
+ "properties": {
+ "facet": {"type": "keyword", "copy_to": ["all_text"]},
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "label": {
+ "type": "keyword",
+ "copy_to": [
+ "all_text",
+ "all_controlled_terms",
+ "all_controlled_labels",
+ ],
+ },
+ "variants": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "csv_metadata_update_jobs": {"type": "keyword", "copy_to": ["all_text"]},
+ "cultural_context": {
+ "type": "text",
+ "copy_to": ["all_text"],
+ "analyzer": "full_analyzer",
+ "search_analyzer": "stopword_analyzer",
+ "search_quote_analyzer": "full_analyzer",
+ },
+ "date_created": {"type": "keyword", "copy_to": ["all_text"]},
+ "date_created_edtf": {"type": "keyword"},
+ "description": {
+ "type": "text",
+ "copy_to": ["all_text"],
+ "analyzer": "full_analyzer",
+ "search_analyzer": "stopword_analyzer",
+ "search_quote_analyzer": "full_analyzer",
+ },
+ "embedding": {
+ "type": "knn_vector",
+ "dimension": 1024,
+ "method": {
+ "engine": "lucene",
+ "space_type": "cosinesimil",
+ "name": "hnsw",
+ "parameters": {},
+ },
+ },
+ "embedding_model": {"type": "keyword"},
+ "embedding_text_length": {"type": "long"},
+ "file_sets": {
+ "properties": {
+ "accession_number": {"type": "keyword", "copy_to": ["all_ids"]},
+ "description": {
+ "type": "text",
+ "copy_to": ["all_text"],
+ "analyzer": "full_analyzer",
+ "search_analyzer": "stopword_analyzer",
+ "search_quote_analyzer": "full_analyzer",
+ },
+ "download_url": {"type": "keyword", "copy_to": ["all_text"]},
+ "duration": {"type": "float"},
+ "height": {"type": "long"},
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "label": {"type": "keyword", "copy_to": ["all_text"]},
+ "mime_type": {"type": "keyword", "copy_to": ["all_text"]},
+ "original_filename": {"type": "keyword", "copy_to": ["all_text"]},
+ "poster_offset": {"type": "long"},
+ "rank": {"type": "long"},
+ "representative_image_url": {"type": "keyword", "copy_to": ["all_text"]},
+ "role": {"type": "keyword", "copy_to": ["all_text"]},
+ "streaming_url": {"type": "keyword", "copy_to": ["all_text"]},
+ "webvtt": {"type": "keyword", "copy_to": ["all_text"]},
+ "width": {"type": "long"},
+ }
+ },
+ "folder_name": {"type": "keyword", "copy_to": ["all_text"]},
+ "folder_number": {"type": "keyword", "copy_to": ["all_text"]},
+ "genre": {
+ "properties": {
+ "facet": {"type": "keyword", "copy_to": ["all_text"]},
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "label": {
+ "type": "keyword",
+ "copy_to": [
+ "all_text",
+ "all_controlled_terms",
+ "all_controlled_labels",
+ ],
+ },
+ "label_with_role": {"type": "keyword", "copy_to": ["all_text"]},
+ "role": {"type": "keyword", "copy_to": ["all_text"]},
+ "variants": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "identifier": {"type": "keyword", "copy_to": ["all_ids"]},
+ "iiif_manifest": {"type": "keyword", "copy_to": ["all_text"]},
+ "indexed_at": {"type": "date_nanos"},
+ "ingest_project": {
+ "properties": {
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "title": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "ingest_sheet": {
+ "properties": {
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "title": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "keywords": {"type": "keyword", "copy_to": ["all_text"]},
+ "language": {
+ "properties": {
+ "facet": {"type": "keyword", "copy_to": ["all_text"]},
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "label": {
+ "type": "keyword",
+ "copy_to": [
+ "all_text",
+ "all_controlled_terms",
+ "all_controlled_labels",
+ ],
+ },
+ "variants": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "legacy_identifier": {"type": "keyword", "copy_to": ["all_ids"]},
+ "library_unit": {"type": "keyword", "copy_to": ["all_text"]},
+ "license": {
+ "properties": {
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "label": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "location": {
+ "properties": {
+ "facet": {"type": "keyword", "copy_to": ["all_text"]},
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "label": {
+ "type": "keyword",
+ "copy_to": [
+ "all_text",
+ "all_controlled_terms",
+ "all_controlled_labels",
+ ],
+ },
+ "variants": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "modified_date": {"type": "date_nanos"},
+ "notes": {
+ "properties": {
+ "note": {"type": "keyword", "copy_to": ["all_text"]},
+ "type": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "physical_description_material": {"type": "keyword", "copy_to": ["all_text"]},
+ "physical_description_size": {"type": "keyword", "copy_to": ["all_text"]},
+ "preservation_level": {"type": "keyword", "copy_to": ["all_text"]},
+ "project": {
+ "properties": {
+ "cycle": {"type": "keyword", "copy_to": ["all_text"]},
+ "desc": {"type": "keyword", "copy_to": ["all_text"]},
+ "manager": {"type": "keyword", "copy_to": ["all_text"]},
+ "name": {"type": "keyword", "copy_to": ["all_text"]},
+ "proposer": {"type": "keyword", "copy_to": ["all_text"]},
+ "task_number": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "provenance": {"type": "keyword", "copy_to": ["all_text"]},
+ "published": {"type": "boolean"},
+ "publisher": {"type": "keyword", "copy_to": ["all_text"]},
+ "related_material": {"type": "keyword", "copy_to": ["all_text"]},
+ "related_url": {
+ "properties": {
+ "label": {"type": "keyword", "copy_to": ["all_text"]},
+ "url": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "representative_file_set": {
+ "properties": {
+ "aspect_ratio": {"type": "float"},
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "url": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "rights_holder": {"type": "keyword", "copy_to": ["all_text"]},
+ "rights_statement": {
+ "properties": {
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "label": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "scope_and_contents": {"type": "keyword", "copy_to": ["all_text"]},
+ "series": {"type": "keyword", "copy_to": ["all_text"]},
+ "source": {"type": "keyword", "copy_to": ["all_text"]},
+ "status": {"type": "keyword", "copy_to": ["all_text"]},
+ "style_period": {
+ "properties": {
+ "facet": {"type": "keyword", "copy_to": ["all_text"]},
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "label": {
+ "type": "keyword",
+ "copy_to": [
+ "all_text",
+ "all_controlled_terms",
+ "all_controlled_labels",
+ ],
+ },
+ "variants": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "subject": {
+ "properties": {
+ "facet": {"type": "keyword", "copy_to": ["all_text"]},
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "label": {"type": "keyword", "copy_to": ["all_text"]},
+ "label_with_role": {"type": "keyword", "copy_to": ["all_text"]},
+ "role": {"type": "keyword", "copy_to": ["all_text"]},
+ "variants": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "table_of_contents": {
+ "type": "text",
+ "copy_to": ["all_text"],
+ "analyzer": "full_analyzer",
+ "search_analyzer": "stopword_analyzer",
+ "search_quote_analyzer": "full_analyzer",
+ },
+ "technique": {
+ "properties": {
+ "facet": {"type": "keyword", "copy_to": ["all_text"]},
+ "id": {"type": "keyword", "copy_to": ["all_ids"]},
+ "label": {
+ "type": "keyword",
+ "copy_to": [
+ "all_text",
+ "all_controlled_terms",
+ "all_controlled_labels",
+ ],
+ },
+ "variants": {"type": "keyword", "copy_to": ["all_text"]},
+ }
+ },
+ "terms_of_use": {"type": "keyword", "copy_to": ["all_text"]},
+ "thumbnail": {"type": "keyword", "copy_to": ["all_text"]},
+ "title": {
+ "type": "text",
+ "fields": {"keyword": {"type": "keyword", "ignore_above": 256}},
+ "copy_to": ["all_text"],
+ "analyzer": "full_analyzer",
+ "search_analyzer": "stopword_analyzer",
+ "search_quote_analyzer": "full_analyzer",
+ },
+ "visibility": {"type": "keyword", "copy_to": ["all_text"]},
+ "work_type": {"type": "keyword", "copy_to": ["all_text"]},
+}
diff --git a/chat/test/handlers/test_chat.py b/chat/test/handlers/test_chat.py
index a2cd93e8..5c19a769 100644
--- a/chat/test/handlers/test_chat.py
+++ b/chat/test/handlers/test_chat.py
@@ -1,18 +1,18 @@
# ruff: noqa: E402
+import boto3
import json
import os
-import sys
-
-sys.path.append('./src')
-
-from unittest import mock, TestCase
+import pytest
+from unittest import TestCase
from unittest.mock import patch
-from handlers.chat import handler
-from helpers.apitoken import ApiToken
-from helpers.response import Response
-from websocket import Websocket
-from event_config import EventConfig
+from moto import mock_aws
+
+from handlers import chat
+from core.apitoken import ApiToken
+from core.websocket import Websocket
+from langchain_core.language_models.fake_chat_models import FakeListChatModel
+from langgraph.checkpoint.memory import MemorySaver
class MockClient:
def __init__(self):
@@ -23,98 +23,81 @@ def post_to_connection(self, Data, ConnectionId):
return Data
class MockContext:
- def __init__(self):
- self.log_stream_name = 'test'
-
-# TODO: Find a way to build a better mock response (maybe using helpers.metrics.debug_response)
-def mock_response(**kwargs):
- result = {
- 'answer': 'Answer.',
- 'attributes': ['accession_number', 'alternate_title', 'api_link', 'canonical_link', 'caption', 'collection', 'contributor', 'date_created', 'date_created_edtf', 'description', 'genre', 'id', 'identifier', 'keywords', 'language', 'notes', 'physical_description_material', 'physical_description_size', 'provenance', 'publisher', 'rights_statement', 'subject', 'table_of_contents', 'thumbnail', 'title', 'visibility', 'work_type'],
- 'azure_endpoint': 'https://nul-ai-east.openai.azure.com/',
- 'deployment_name': 'gpt-4o',
- 'is_dev_team': False,
- 'is_superuser': False,
- 'k': 10,
- 'openai_api_version': '2024-02-01',
- 'prompt': "Prompt",
- 'question': 'Question?',
- 'ref': 'ref123',
- 'size': 20,
- 'source_documents': [],
- 'temperature': 0.2,
- 'text_key': 'id',
- 'token_counts': {'question': 19, 'answer': 348, 'prompt': 329, 'source_documents': 10428,'total': 11124}
- }
- result.update(kwargs)
- return result
+ def __init__(self):
+ self.log_stream_name = 'test_log_stream'
-@mock.patch.dict(
- os.environ,
- {
- "AZURE_OPENAI_RESOURCE_NAME": "test",
- },
-)
-@mock.patch.object(Response, "prepare_response", lambda _: mock_response())
+@mock_aws
+@pytest.mark.filterwarnings("ignore::DeprecationWarning")
class TestHandler(TestCase):
- def test_handler_unauthorized(self):
+
+ @patch.object(ApiToken, 'is_logged_in', return_value=False)
+ @patch('agent.search_agent.checkpoint_saver', return_value=MemorySaver())
+ def test_handler_unauthorized(self, mock_create_saver, mock_is_logged_in):
event = {"socket": Websocket(client=MockClient(), endpoint_url="test", connection_id="test", ref="test")}
- self.assertEqual(handler(event, MockContext()), {'body': 'Unauthorized', 'statusCode': 401})
-
- @patch.object(ApiToken, 'is_logged_in')
- def test_handler_success(self, mock_is_logged_in):
- mock_is_logged_in.return_value = True
- event = {"socket": Websocket(client=MockClient(), endpoint_url="test", connection_id="test", ref="test"), "body": '{"question": "Question?"}' }
- self.assertEqual(handler(event, MockContext()), {'statusCode': 200})
-
- @patch.object(ApiToken, 'is_logged_in')
- @patch.object(ApiToken, 'is_superuser')
- @patch.object(EventConfig, '_is_debug_mode_enabled')
- def test_handler_debug_mode(self, mock_is_debug_enabled, mock_is_superuser, mock_is_logged_in):
- mock_is_debug_enabled.return_value = True
- mock_is_logged_in.return_value = True
- mock_is_superuser.return_value = True
- mock_client = MockClient()
- mock_websocket = Websocket(client=mock_client, endpoint_url="test", connection_id="test", ref="test")
- event = {"socket": mock_websocket, "debug": True, "body": '{"question": "Question?"}' }
- handler(event, MockContext())
- response = json.loads(mock_client.received_data)
- expected_keys = {"attributes", "azure_endpoint", "deployment_name"}
- received_keys = response.keys()
- self.assertTrue(expected_keys.issubset(received_keys))
-
- @patch.object(ApiToken, 'is_logged_in')
- @patch.object(ApiToken, 'is_superuser')
- def test_handler_debug_mode_for_superusers_only(self, mock_is_superuser, mock_is_logged_in):
- mock_is_logged_in.return_value = True
- mock_is_superuser.return_value = False
- mock_client = MockClient()
- mock_websocket = Websocket(client=mock_client, endpoint_url="test", connection_id="test", ref="test")
- event = {"socket": mock_websocket, "body": '{"question": "Question?", "debug": "true"}'}
- handler(event, MockContext())
- response = json.loads(mock_client.received_data)
- expected_keys = {"answer", "ref"}
- received_keys = set(response.keys())
- self.assertSetEqual(received_keys, expected_keys)
+ self.assertEqual(chat(event, MockContext()), {'statusCode': 401, 'body': 'Unauthorized'})
- @patch.object(ApiToken, 'is_logged_in')
- def test_handler_question_missing(self, mock_is_logged_in):
- mock_is_logged_in.return_value = True
+ @patch.object(ApiToken, 'is_logged_in', return_value=True)
+ @patch('agent.search_agent.checkpoint_saver', return_value=MemorySaver())
+ @patch('handlers.chat_model', return_value=FakeListChatModel(responses=["fake response"]))
+ def test_handler_success(self, mock_chat_model, mock_create_saver, mock_is_logged_in):
+ event = {
+ "socket": Websocket(client=MockClient(), endpoint_url="test", connection_id="test", ref="test"),
+ "body": '{"question": "Question?"}'
+ }
+ self.assertEqual(chat(event, MockContext()), {'statusCode': 200})
+
+ @patch.object(ApiToken, 'is_logged_in', return_value=True)
+ @patch('agent.search_agent.checkpoint_saver', return_value=MemorySaver())
+ def test_handler_question_missing(self, mock_create_saver, mock_is_logged_in):
mock_client = MockClient()
mock_websocket = Websocket(client=mock_client, endpoint_url="test", connection_id="test", ref="test")
event = {"socket": mock_websocket}
- handler(event, MockContext())
+ chat(event, MockContext())
response = json.loads(mock_client.received_data)
self.assertEqual(response["type"], "error")
self.assertEqual(response["message"], "Question cannot be blank")
- @patch.object(ApiToken, 'is_logged_in')
- def test_handler_question_blank(self, mock_is_logged_in):
- mock_is_logged_in.return_value = True
+ @patch.object(ApiToken, 'is_logged_in', return_value=True)
+ @patch('agent.search_agent.checkpoint_saver', return_value=MemorySaver())
+ def test_handler_question_typo(self, mock_create_saver, mock_is_logged_in):
mock_client = MockClient()
mock_websocket = Websocket(client=mock_client, endpoint_url="test", connection_id="test", ref="test")
event = {"socket": mock_websocket, "body": '{"quesion": ""}'}
- handler(event, MockContext())
+ chat(event, MockContext())
response = json.loads(mock_client.received_data)
self.assertEqual(response["type"], "error")
self.assertEqual(response["message"], "Question cannot be blank")
+
+ @patch.dict(os.environ, {"METRICS_LOG_GROUP": "/nul/test/metrics/log/group"})
+ @patch.object(ApiToken, 'is_logged_in', return_value=True)
+ @patch('agent.search_agent.checkpoint_saver', return_value=MemorySaver())
+ @patch('handlers.chat_model', return_value=FakeListChatModel(responses=["fake response"]))
+ def test_handler_with_metrics(self, mock_model, mock_create_saver, mock_is_logged_in):
+ client = boto3.client("logs", region_name="us-east-1")
+ client.create_log_group(logGroupName=os.getenv("METRICS_LOG_GROUP"))
+
+ event = {
+ "socket": Websocket(client=MockClient(), endpoint_url="test", connection_id="test", ref="test"),
+ "body": '{"question": "Question?", "ref": "test"}'
+ }
+ chat(event, MockContext())
+ chat(event, MockContext()) # Second call to test if log stream already exists
+
+ response = client.get_log_events(
+ logGroupName="/nul/test/metrics/log/group",
+ logStreamName="test_log_stream"
+ )
+ expected = {
+ "answer": ["fake response"],
+ "artifacts": [],
+ "is_dev_team": False,
+ "is_superuser": False,
+ "k": 40,
+ "model": "us.anthropic.claude-3-5-sonnet-20241022-v2:0",
+ "question": "Question?",
+ "ref": "test",
+ "token_counts": {}
+ }
+ log_events = response["events"]
+ self.assertEqual(len(log_events), 2)
+ self.assertEqual(json.loads(log_events[0]["message"]), expected)
diff --git a/chat/test/handlers/test_chat_sync.py b/chat/test/handlers/test_chat_sync.py
index 773ebfe0..31487be8 100644
--- a/chat/test/handlers/test_chat_sync.py
+++ b/chat/test/handlers/test_chat_sync.py
@@ -1,35 +1,48 @@
# ruff: noqa: E402
-import os
-import sys
+import json
+import pytest
+from unittest import TestCase
+from unittest.mock import patch
+from moto import mock_aws
-sys.path.append('./src')
+from handlers import chat_sync
+from core.apitoken import ApiToken
+from langchain_core.language_models.fake_chat_models import FakeListChatModel
+from langgraph.checkpoint.memory import MemorySaver
-from unittest import mock, TestCase
-from unittest.mock import patch
-from handlers.chat_sync import handler
-from helpers.apitoken import ApiToken
class MockContext:
def __init__(self):
self.log_stream_name = 'test'
-@mock.patch.dict(
- os.environ,
- {
- "AZURE_OPENAI_RESOURCE_NAME": "test",
- },
-)
+@mock_aws
+@pytest.mark.filterwarnings("ignore::DeprecationWarning")
class TestHandler(TestCase):
def test_handler_unauthorized(self):
- self.assertEqual(handler({"body": '{ "question": "Question?"}'}, MockContext()), {'body': 'Unauthorized', 'statusCode': 401})
+ self.assertEqual(chat_sync({"body": '{ "question": "Question?"}'}, MockContext()), {'body': 'Unauthorized', 'statusCode': 401})
- @patch.object(ApiToken, 'is_logged_in')
+ @patch.object(ApiToken, 'is_logged_in', return_value = True)
def test_no_question(self, mock_is_logged_in):
- mock_is_logged_in.return_value = True
- self.assertEqual(handler({"body": '{ "question": ""}'}, MockContext()), {'statusCode': 400, 'body': 'Question cannot be blank'})
+ self.assertEqual(chat_sync({"body": '{ "question": ""}'}, MockContext()), {'statusCode': 400, 'body': 'Question cannot be blank'})
- @patch.object(ApiToken, 'is_logged_in')
- def test_handler_success(self, mock_is_logged_in):
- mock_is_logged_in.return_value = True
- self.assertEqual(handler({"body": '{"question": "Question?"}'}, MockContext()), {'statusCode': 200})
+ @patch.object(ApiToken, 'is_logged_in', return_value = True)
+ @patch("agent.search_agent.checkpoint_saver", return_value=MemorySaver())
+ @patch('handlers.chat_model', return_value=FakeListChatModel(responses=["fake response"]))
+ def test_handler_success(self, mock_chat_model, mock_create_saver, mock_is_logged_in):
+ expected_body = {
+ "answer": ["fake response"],
+ "is_dev_team": False,
+ "is_superuser": False,
+ "k": 40,
+ "model": "us.anthropic.claude-3-5-sonnet-20241022-v2:0",
+ "question": "Question?",
+ "ref": "test_ref",
+ "artifacts": [],
+ "token_counts": {}
+ }
+ response = chat_sync({"body": '{"question": "Question?", "ref": "test_ref"}'}, MockContext())
+
+ self.assertEqual(json.loads(response.get("body")), expected_body)
+ self.assertEqual(response.get("statusCode"), 200)
+ self.assertEqual(response.get("headers", {}).get("Content-Type"), "application/json")
diff --git a/chat/test/handlers/test_opensearch_neural_search.py b/chat/test/handlers/test_opensearch_neural_search.py
deleted file mode 100644
index d7448679..00000000
--- a/chat/test/handlers/test_opensearch_neural_search.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# ruff: noqa: E402
-import sys
-sys.path.append('./src')
-
-from unittest import TestCase
-from handlers.opensearch_neural_search import OpenSearchNeuralSearch
-from langchain_core.documents import Document
-
-class MockClient():
- def search(self, index, body, params):
- return {
- "hits": {
- "hits": [
- {
- "_source": {
- "id": "test"
- },
- "_score": 0.12345
- }
- ]
- }
- }
-
-class TestOpenSearchNeuralSearch(TestCase):
- def test_similarity_search(self):
- docs = OpenSearchNeuralSearch(client=MockClient(), endpoint="test", index="test", model_id="test").similarity_search(query="test", subquery={"_source": {"excludes": ["embedding"]}}, size=10)
- self.assertEqual(docs, [Document(page_content='test', metadata={'id': 'test'})])
-
- def test_similarity_search_with_score(self):
- docs = OpenSearchNeuralSearch(client=MockClient(), endpoint="test", index="test", model_id="test").similarity_search_with_score(query="test")
- self.assertEqual(docs, [(Document(page_content='test', metadata={'id': 'test'}), 0.12345)])
-
- def test_add_texts(self):
- try:
- OpenSearchNeuralSearch(client=MockClient(), endpoint="test", index="test", model_id="test").add_texts(texts=["test"], metadatas=[{"id": "test"}])
- except Exception as e:
- self.fail(f"from_texts raised an exception: {e}")
-
- def test_from_texts(self):
- try:
- OpenSearchNeuralSearch.from_texts(clas="test", texts=["test"], metadatas=[{"id": "test"}])
- except Exception as e:
- self.fail(f"from_texts raised an exception: {e}")
\ No newline at end of file
diff --git a/chat/test/handlers/test_streaming_socket_callback_handler.py b/chat/test/handlers/test_streaming_socket_callback_handler.py
deleted file mode 100644
index 27d6cb2e..00000000
--- a/chat/test/handlers/test_streaming_socket_callback_handler.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# ruff: noqa: E402
-import sys
-sys.path.append('./src')
-
-from unittest import TestCase
-from handlers.streaming_socket_callback_handler import (
- StreamingSocketCallbackHandler,
-)
-from websocket import Websocket
-from langchain_core.outputs.llm_result import LLMResult
-from langchain_core.outputs import ChatGeneration
-from langchain_core.messages.ai import AIMessage
-
-
-class MockClient:
- def post_to_connection(self, Data, ConnectionId):
- return Data
-
-class TestMyStreamingSocketCallbackHandler(TestCase):
- def test_on_new_llm_token(self):
- handler = StreamingSocketCallbackHandler(Websocket(client=MockClient()))
- result = handler.on_llm_new_token(token="test")
- self.assertEqual(result, {'token': 'test', 'ref': {}})
- self.assertTrue(handler.stream)
-
- def test_on_llm_end(self):
- handler = StreamingSocketCallbackHandler(Websocket(client=MockClient()))
- payload = LLMResult(
- generations=[[
- ChatGeneration(
- text='LLM Response',
- generation_info={'finish_reason': 'stop', 'model_name': 'llm-model', 'system_fingerprint': 'fp_012345678'},
- message=AIMessage(
- content='LLM Response',
- response_metadata={'finish_reason': 'stop', 'model_name': 'llm-model', 'system_fingerprint': 'fp_012345678'},
- id='run-5da4fbbc-b663-4851-809d-fd11c27c5b76-0'
- )
- )
- ]],
- llm_output=None,
- run=None
- )
- result = handler.on_llm_end(payload)
- self.assertEqual(result, {'end': {'reason': 'stop'}, 'ref': {}})
- self.assertTrue(handler.stream)
-
- def test_debug_mode(self):
- handler = StreamingSocketCallbackHandler(Websocket(client=MockClient()), stream=False)
- self.assertFalse(handler.stream)
diff --git a/chat/test/helpers/test_http_event_config.py b/chat/test/helpers/test_http_event_config.py
deleted file mode 100644
index 3bc67075..00000000
--- a/chat/test/helpers/test_http_event_config.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# ruff: noqa: E402
-import json
-import os
-import sys
-sys.path.append('./src')
-
-from http_event_config import HTTPEventConfig
-from unittest import TestCase, mock
-
-
-class TestEventConfigWithoutAzureResource(TestCase):
- def test_requires_an_azure_resource(self):
- with self.assertRaises(EnvironmentError):
- HTTPEventConfig()
-
-
-@mock.patch.dict(
- os.environ,
- {
- "AZURE_OPENAI_RESOURCE_NAME": "test",
- },
-)
-class TestHTTPEventConfig(TestCase):
- def test_fetches_attributes_from_vector_database(self):
- os.environ.pop("AZURE_OPENAI_RESOURCE_NAME", None)
- with self.assertRaises(EnvironmentError):
- HTTPEventConfig()
-
- def test_defaults(self):
- actual = HTTPEventConfig(event={"body": json.dumps({"attributes": ["title"]})})
- expected_defaults = {"azure_endpoint": "https://test.openai.azure.com/"}
- self.assertEqual(actual.azure_endpoint, expected_defaults["azure_endpoint"])
-
- def test_attempt_override_without_superuser_status(self):
- actual = HTTPEventConfig(
- event={
- "body": json.dumps(
- {
- "azure_resource_name": "new_name_for_test",
- "attributes": ["title", "subject", "date_created"],
- "index": "testIndex",
- "k": 100,
- "openai_api_version": "2024-01-01",
- "question": "test question",
- "ref": "test ref",
- "size": 90,
- "temperature": 0.9,
- "text_key": "accession_number",
- }
- )
- }
- )
- expected_output = {
- "attributes": HTTPEventConfig.DEFAULT_ATTRIBUTES,
- "azure_endpoint": "https://test.openai.azure.com/",
- "k": 40,
- "openai_api_version": "2024-02-01",
- "question": "test question",
- "size": 5,
- "ref": "test ref",
- "temperature": 0.2,
- "text_key": "id",
- }
- self.assertEqual(actual.azure_endpoint, expected_output["azure_endpoint"])
- self.assertEqual(actual.attributes, expected_output["attributes"])
- self.assertEqual(actual.k, expected_output["k"])
- self.assertEqual(
- actual.openai_api_version, expected_output["openai_api_version"]
- )
- self.assertEqual(actual.question, expected_output["question"])
- self.assertEqual(actual.ref, expected_output["ref"])
- self.assertEqual(actual.temperature, expected_output["temperature"])
- self.assertEqual(actual.text_key, expected_output["text_key"])
-
- def test_debug_message(self):
- self.assertEqual(
- HTTPEventConfig(
- event={"body": json.dumps({"attributes": ["source"]})}
- ).debug_message()["type"],
- "debug",
- )
-
- def test_to_bool(self):
- self.assertEqual(HTTPEventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool(""), False)
- self.assertEqual(HTTPEventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("0"), False)
- self.assertEqual(HTTPEventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("no"), False)
- self.assertEqual(HTTPEventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("false"), False)
- self.assertEqual(HTTPEventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("False"), False)
- self.assertEqual(HTTPEventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("FALSE"), False)
- self.assertEqual(HTTPEventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("no"), False)
- self.assertEqual(HTTPEventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("No"), False)
- self.assertEqual(HTTPEventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("NO"), False)
- self.assertEqual(HTTPEventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("true"), True)
- self.assertEqual(HTTPEventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool(True), True)
- self.assertEqual(HTTPEventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool(False), False)
diff --git a/chat/test/helpers/test_hybrid_query.py b/chat/test/helpers/test_hybrid_query.py
deleted file mode 100644
index 4e38861e..00000000
--- a/chat/test/helpers/test_hybrid_query.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import sys
-from helpers.hybrid_query import hybrid_query
-from unittest import TestCase
-
-sys.path.append('./src')
-
-class TestFunction(TestCase):
- def test_hybrid_query(self):
- dsl = hybrid_query("Question?", "MODEL_ID", k=10)
- subject = dsl["query"]["hybrid"]["queries"]
-
- checks = [
- (lambda x: x["query_string"]["query"], "Question?"),
- (lambda x: x["neural"]["embedding"]["model_id"], "MODEL_ID")
- ]
-
- self.assertEqual(len(subject), 2)
-
- for i in range(2):
- lookup, expected = checks[i]
- queries = subject[i]["bool"]["must"]
- self.assertEqual(lookup(queries[0]), expected)
- self.assertIn({ "terms": { "visibility": ["Public", "Institution"] } }, queries)
- self.assertIn({ "term": { "published": True } }, queries)
\ No newline at end of file
diff --git a/chat/test/helpers/test_metrics.py b/chat/test/helpers/test_metrics.py
deleted file mode 100644
index 84a9f9d0..00000000
--- a/chat/test/helpers/test_metrics.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# ruff: noqa: E402
-import json
-import os
-import sys
-sys.path.append('./src')
-
-from unittest import TestCase, mock
-from helpers.metrics import count_tokens, debug_response, token_usage
-from event_config import EventConfig
-
-
-
-class TestMetrics(TestCase):
- @mock.patch.dict(
- os.environ,
- {
- "AZURE_OPENAI_RESOURCE_NAME": "test",
- "WEAVIATE_URL": "http://test",
- "WEAVIATE_API_KEY": "test"
- },
- )
- def setUp(self):
- self.question = "What is your name?"
- self.original_question = {
- "question": self.question,
- "source_documents": self.generate_source_documents(20),
- }
- self.event = {
- "body": json.dumps({
- "deployment_name": "test",
- "index": "test",
- "k": 40,
- "openai_api_version": "2019-05-06",
- "prompt": "This is a test prompt.",
- "question": self.question,
- "ref": "test",
- "size": 5,
- "temperature": 0.5,
- "text_key": "text",
- "auth": "test123"
- })
- }
- self.config = EventConfig(event=self.event)
- self.response = {
- "output_text": "This is a test response.",
- }
-
- def generate_source_documents(self, count):
- return [
- {
- "accession_number": f"SourceDoc:{i+1}",
- "api_link": f"https://api.dc.library.northwestern.edu/api/v2/works/{i+1:0>32}",
- "canonical_link": f"https://dc.library.northwestern.edu/items/{i+1:0>32}",
- "title": f"Source Document {i+1}!"
- }
- for i in range(count)
- ]
-
- def test_debug_response(self):
- result = debug_response(self.config, self.response, self.original_question)
-
- self.assertEqual(result["k"], 40)
- self.assertEqual(result["question"], self.question)
- self.assertEqual(result["ref"], "test")
- self.assertEqual(result["size"], 20)
- self.assertEqual(len(result["source_documents"]), 20)
- self.assertEqual(
- result["source_documents"],
- [doc["api_link"] for doc in self.original_question["source_documents"]]
- )
-
- def test_token_usage(self):
- result = token_usage(self.config, self.response, self.original_question)
-
- expected_result = {
- "answer": 12,
- "prompt": 329,
- "question": 5,
- "source_documents": 1602,
- "total": 1948
- }
-
- self.assertEqual(result, expected_result)
-
- def test_count_tokens(self):
- val = "Hello, world!"
- expected_result = 4
-
- result = count_tokens(val)
-
- self.assertEqual(result, expected_result)
diff --git a/chat/test/persistence/__init__.py b/chat/test/persistence/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/chat/test/persistence/test_compressible_json_serializer.py b/chat/test/persistence/test_compressible_json_serializer.py
new file mode 100644
index 00000000..3fd09e12
--- /dev/null
+++ b/chat/test/persistence/test_compressible_json_serializer.py
@@ -0,0 +1,135 @@
+# ruff: noqa: E402
+import sys
+sys.path.append("./src")
+
+from unittest import TestCase
+
+from langchain_core.messages import HumanMessage
+from persistence.compressible_json_serializer import CompressibleJsonSerializer
+import warnings
+
+warnings.simplefilter("ignore", DeprecationWarning)
+class TestCompressibleJsonSerializer(TestCase):
+ def test_dumps_typed(self):
+ serializer = CompressibleJsonSerializer()
+ obj = {"key": "value"}
+ data = serializer.dumps_typed(obj)
+ self.assertEqual(data, ("json", '{"key": "value"}'))
+
+ def test_loads_typed(self):
+ serializer = CompressibleJsonSerializer()
+ data = ("json", '{"key": "value"}')
+ obj = serializer.loads_typed(data)
+ self.assertEqual(obj, {"key": "value"})
+
+ def test_dumps_typed_with_bz2_compression(self):
+ serializer = CompressibleJsonSerializer(compression="bz2")
+ obj = {"key": "value"}
+ data = serializer.dumps_typed(obj)
+ self.assertEqual(data[0], "bz2_json")
+
+ def test_loads_typed_with_bz2_compression(self):
+ serializer = CompressibleJsonSerializer(compression="bz2")
+ data = (
+ "bz2_json",
+ "QlpoOTFBWSZTWYByjU0AAAcZgFAAABAiDAMqIAAim0BkEDQNAFPUpFyhWjhdyRThQkIByjU0",
+ )
+ obj = serializer.loads_typed(data)
+ self.assertEqual(obj, {"key": "value"})
+
+ def test_dumps_typed_with_gzip_compression(self):
+ serializer = CompressibleJsonSerializer(compression="gzip")
+ obj = {"key": "value"}
+ data = serializer.dumps_typed(obj)
+ self.assertEqual(data[0], "gzip_json")
+
+ def test_loads_typed_with_gzip_compression(self):
+ serializer = CompressibleJsonSerializer(compression="gzip")
+ data = ("gzip_json", "H4sIALfEW2cC/6tWyk6tVLJSUCpLzClNVaoFABtINTMQAAAA")
+ obj = serializer.loads_typed(data)
+ self.assertEqual(obj, {"key": "value"})
+
+ def test_nested_complex_object(self):
+ serializer = CompressibleJsonSerializer(compression="gzip")
+ data = (
+ "gzip_json",
+ "H4sIAGwoW2cC/2WQMW/CMBCF/0rktU1lpyGELAxdunTrViHrjC8Q4Zyj2IEilP/enAtSpUoezv7e3Tu/mziLJlPPmYhhKUQhizJXxXI+Vd2oqlHFy6belFI+SdlIKRZlZ1mpsDW1WR\
+W5WRuTVxWa3LRtm5dQlXVRrlZr+8rq/RGI0OkzuAnZ4ya0DhHGqHW69RgCHBL6YhavAyYk3qce6OMX8ygLEVLL3lNEiqx5A8qufsoCwrg/Zq0fs4sfTyED46e43H004NyW+8HaLnaewOnTBcZD2mZewIhh8BRQ9xjh4c\
+KAd2GXI2/CIwh6fqDJuUcS9xq/oR8cwxZcwHnezfPf7+MYFuv/AShWPagOiHSXdDRM94zSpAHJdnRYJGRTWLv5B3RajDe+AQAA",
+ )
+ obj = serializer.loads_typed(data)
+ self.assertEqual(obj, {
+ "v": 1,
+ "ts": "2024-12-12T18:16:12.989400+00:00",
+ "id": "1efb8b52-b7bb-66eb-bfff-4a64824557d3",
+ "channel_values": {
+ "__start__": {
+ "messages": [
+ HumanMessage(
+ content="Can you search for works about football?",
+ additional_kwargs={},
+ response_metadata={},
+ )
+ ]
+ }
+ },
+ "channel_versions": {"__start__": 1},
+ "versions_seen": {"__input__": {}},
+ "pending_sends": [],
+ })
+
+ def test_dumps_typed_unsupported_compression(self):
+ serializer = CompressibleJsonSerializer(compression="unsupported")
+ with self.assertRaises(ValueError) as context:
+ serializer.dumps_typed({"key": "value"})
+
+ self.assertIn("Unsupported compression type", str(context.exception))
+
+ def test_loads_typed_unknown_type(self):
+ serializer = CompressibleJsonSerializer()
+ data = ("unknown_type", "payload")
+ with self.assertRaises(ValueError) as context:
+ serializer.loads_typed(data)
+
+ self.assertIn("Unknown data type", str(context.exception))
+
+ def test_object_hook_unknown_type(self):
+ serializer = CompressibleJsonSerializer()
+ invalid_json = '{"__type__": "UnknownType", "data": {}}'
+ data = ("json", invalid_json)
+ with self.assertRaises(ValueError) as context:
+ serializer.loads_typed(data)
+
+ self.assertIn("Unknown type", str(context.exception))
+
+ def test_loads_typed_empty_payload(self):
+ from json.decoder import JSONDecodeError
+ serializer = CompressibleJsonSerializer()
+ data = ("json", "")
+ with self.assertRaises(JSONDecodeError):
+ serializer.loads_typed(data)
+
+ def test_dumps_typed_base_message(self):
+ serializer = CompressibleJsonSerializer()
+ # Create a BaseMessage instance (HumanMessage is one)
+ message = HumanMessage(content="Hello")
+ data_type, data_str = serializer.dumps_typed(message)
+ # Verify it returns a JSON string with the correct type and data
+ self.assertEqual(data_type, "json")
+ # We know that it returns {"__type__": "HumanMessage", "data": {...}}
+ # Check if the resulting JSON contains the expected keys
+ self.assertIn('"__type__": "HumanMessage"', data_str)
+ self.assertIn('"data":', data_str)
+ self.assertIn('"content": "Hello"', data_str)
+
+ def test_dumps_typed_type_error(self):
+ serializer = CompressibleJsonSerializer()
+
+ # Define a class that is not a BaseMessage
+ class NotSerializable:
+ pass
+
+ with self.assertRaises(TypeError) as context:
+ serializer.dumps_typed(NotSerializable())
+
+ self.assertIn("is not JSON serializable", str(context.exception))
diff --git a/chat/test/persistence/test_s3_checkpointer.py b/chat/test/persistence/test_s3_checkpointer.py
new file mode 100644
index 00000000..20d1bbb6
--- /dev/null
+++ b/chat/test/persistence/test_s3_checkpointer.py
@@ -0,0 +1,656 @@
+# ruff: noqa: E402
+import sys
+sys.path.append("./src")
+
+import pytest
+from unittest import TestCase
+
+import boto3
+import json
+import time
+from moto import mock_aws
+from langchain_core.runnables import RunnableConfig
+from langgraph.checkpoint.base import (
+ Checkpoint,
+ CheckpointMetadata,
+)
+from typing import Optional
+from persistence.s3_checkpointer import S3Checkpointer
+
+import bz2
+import base64
+import gzip
+
+BUCKET_NAME = "mybucket"
+REGION = "us-east-1"
+THREAD_ID = "thread1"
+CHECKPOINT_NAMESPACE = ""
+CHECKPOINT_ID_1 = "checkpoint1"
+CHECKPOINT_ID_2 = "checkpoint2"
+
+CHECKPOINTS = [
+ {
+ "id": CHECKPOINT_ID_1,
+ "key": "checkpoints/thread1/__default__/checkpoint1/checkpoint.json",
+ "body": json.dumps(
+ {
+ "checkpoint_type": "json",
+ "checkpoint_data": "{}",
+ "metadata_data": "{}",
+ "parent_checkpoint_id": None,
+ "timestamp": int(time.time() * 1000),
+ }
+ ),
+ },
+ {
+ "id": CHECKPOINT_ID_2,
+ "key": "checkpoints/thread1/__default__/checkpoint2/checkpoint.json",
+ "body": json.dumps(
+ {
+ "checkpoint_type": "json",
+ "checkpoint_data": "{}",
+ "metadata_data": "{}",
+ "parent_checkpoint_id": CHECKPOINT_ID_1,
+ "timestamp": int(time.time() * 1000),
+ }
+ ),
+ },
+]
+
+
+@mock_aws
+@pytest.mark.filterwarnings("ignore::DeprecationWarning")
+class TestS3Checkpointer(TestCase):
+ def setUp(self):
+ """Initialize the mock S3 bucket and S3Checkpointer instance before each test."""
+ self.s3 = boto3.client("s3", region_name=REGION)
+ self.s3.create_bucket(Bucket=BUCKET_NAME)
+ self.checkpointer = S3Checkpointer(bucket_name=BUCKET_NAME, region_name=REGION)
+
+ def tearDown(self):
+ """Clean up after each test."""
+ self.checkpointer.delete_checkpoints(THREAD_ID)
+
+ def setup_s3_bucket(self):
+ """Upload sample checkpoints to the mock S3 bucket."""
+ for checkpoint in CHECKPOINTS:
+ self.s3.put_object(
+ Bucket=BUCKET_NAME,
+ Key=checkpoint["key"],
+ Body=checkpoint["body"],
+ )
+
+ def create_config(self, checkpoint_id: Optional[str] = None) -> RunnableConfig:
+ """Helper method to create RunnableConfig."""
+ config_data = {
+ "configurable": {
+ "thread_id": THREAD_ID,
+ "checkpoint_ns": CHECKPOINT_NAMESPACE,
+ }
+ }
+ if checkpoint_id:
+ config_data["configurable"]["checkpoint_id"] = checkpoint_id
+ return RunnableConfig(config_data)
+
+ #
+ # Basic Put and Get Checkpoints
+ #
+
+ def test_put_checkpoint(self):
+ """Test that S3Checkpointer.put correctly saves a checkpoint to S3."""
+ new_checkpoint = Checkpoint(id="checkpoint3")
+ metadata = CheckpointMetadata()
+ config = self.create_config()
+
+ returned_config = self.checkpointer.put(config, new_checkpoint, metadata, {})
+
+ self.assertEqual(returned_config["configurable"]["checkpoint_id"], "checkpoint3")
+ expected_key = (
+ f"checkpoints/{THREAD_ID}/__default__/checkpoint3/checkpoint.json"
+ )
+ response = self.s3.get_object(Bucket=BUCKET_NAME, Key=expected_key)
+ body = json.loads(response["Body"].read().decode("utf-8"))
+
+ self.assertEqual(body["checkpoint_type"], "json")
+ checkpoint_data = json.loads(body["checkpoint_data"])
+ self.assertEqual(checkpoint_data["id"], "checkpoint3")
+ self.assertEqual(body["metadata_data"], "{}")
+ assert body["parent_checkpoint_id"] is None
+ assert "timestamp" in body
+
+ def test_put_overwrite_checkpoint(self):
+ """Test that putting a checkpoint with an existing ID overwrites it."""
+ initial_checkpoint = Checkpoint(id="checkpoint6")
+ initial_metadata = CheckpointMetadata()
+ config = self.create_config()
+ self.checkpointer.put(config, initial_checkpoint, initial_metadata, {})
+
+ updated_checkpoint = Checkpoint(id="checkpoint6")
+ updated_metadata = CheckpointMetadata()
+ self.checkpointer.put(config, updated_checkpoint, updated_metadata, {})
+
+ checkpoint_tuple = self.checkpointer.get_tuple(config)
+ assert checkpoint_tuple is not None
+ self.assertEqual(checkpoint_tuple.config["configurable"]["checkpoint_id"], "checkpoint6")
+
+ def test_put_invalid_checkpoint(self):
+ """Test putting an invalid checkpoint raises appropriate errors."""
+ with self.assertRaises(KeyError):
+ invalid_checkpoint = {}
+ config = self.create_config()
+ self.checkpointer.put(config, invalid_checkpoint, CheckpointMetadata(), {})
+
+ def test_get_tuple(self):
+ """Test that S3Checkpointer.get_tuple correctly retrieves a checkpoint tuple."""
+ self.setup_s3_bucket()
+ config = self.create_config(checkpoint_id=CHECKPOINT_ID_2)
+ checkpoint_tuple = self.checkpointer.get_tuple(config)
+
+ assert checkpoint_tuple is not None
+ assert (
+ checkpoint_tuple.config["configurable"]["checkpoint_id"] == CHECKPOINT_ID_2
+ )
+ self.assertEqual(checkpoint_tuple.checkpoint, {})
+ self.assertEqual(checkpoint_tuple.metadata, {})
+ assert checkpoint_tuple.parent_config is not None
+ assert (
+ checkpoint_tuple.parent_config["configurable"]["checkpoint_id"]
+ == CHECKPOINT_ID_1
+ )
+ self.assertEqual(checkpoint_tuple.pending_writes, [])
+
+ def test_get_tuple_nonexistent_checkpoint(self):
+ """Test retrieving a checkpoint tuple that does not exist."""
+ config = self.create_config(checkpoint_id="nonexistent")
+ checkpoint_tuple = self.checkpointer.get_tuple(config)
+ assert checkpoint_tuple is None
+
+ def test_get_tuple_no_checkpoint_id_no_existing_checkpoints(self):
+ """Test get_tuple with no checkpoint_id and no existing checkpoints."""
+ config = self.create_config()
+ result = self.checkpointer.get_tuple(config)
+ assert result is None
+
+ def test_get_tuple_missing_metadata(self):
+ """Test get_tuple when metadata is missing."""
+ key = f"checkpoints/{THREAD_ID}/__default__/missing_meta/checkpoint.json"
+ checkpoint_body = json.dumps(
+ {
+ "checkpoint_type": "json",
+ "checkpoint_data": "{}",
+ # "metadata_data": "{}" is intentionally omitted
+ "parent_checkpoint_id": None,
+ "timestamp": int(time.time() * 1000),
+ }
+ )
+ self.s3.put_object(Bucket=BUCKET_NAME, Key=key, Body=checkpoint_body)
+
+ config = self.create_config(checkpoint_id="missing_meta")
+ with self.assertRaises(ValueError) as context:
+ self.checkpointer.get_tuple(config)
+
+ self.assertIn("Metadata is missing", str(context.exception))
+ #
+ # Writes (Pending Writes) Tests
+ #
+
+ def test_put_writes(self):
+ """Test that S3Checkpointer.put_writes correctly saves writes to S3."""
+ checkpoint = Checkpoint(id="checkpoint4")
+ metadata = CheckpointMetadata()
+ config = self.create_config()
+ returned_config = self.checkpointer.put(config, checkpoint, metadata, {})
+
+ writes = [
+ ("channel1", {"data": "value1"}),
+ ("channel2", {"data": "value2"}),
+ ]
+ task_id = "task123"
+ self.checkpointer.put_writes(returned_config, writes, task_id)
+
+ for idx, (channel, value) in enumerate(writes):
+ write_key = f"checkpoints/{THREAD_ID}/__default__/checkpoint4/writes/{task_id}/{idx}.json"
+ response = self.s3.get_object(Bucket=BUCKET_NAME, Key=write_key)
+ body = json.loads(response["Body"].read().decode("utf-8"))
+ self.assertEqual(body["channel"], channel)
+ self.assertEqual(body["type"], "json")
+ self.assertEqual(body["value"], json.dumps(value))
+ assert "timestamp" in body
+
+ def test_put_writes_empty(self):
+ """Test putting an empty list of writes."""
+ checkpoint = Checkpoint(id="checkpoint_empty_writes")
+ metadata = CheckpointMetadata()
+ config = self.create_config()
+ returned_config = self.checkpointer.put(config, checkpoint, metadata, {})
+ self.checkpointer.put_writes(returned_config, [], "task_empty")
+
+ checkpoint_tuple = self.checkpointer.get_tuple(returned_config)
+ assert checkpoint_tuple is not None
+ self.assertEqual(checkpoint_tuple.pending_writes, [])
+
+ def test_put_writes_multiple_tasks(self):
+ """Test putting writes from multiple tasks."""
+ checkpoint = Checkpoint(id="checkpoint_multi_tasks")
+ metadata = CheckpointMetadata()
+ config = self.create_config()
+ returned_config = self.checkpointer.put(config, checkpoint, metadata, {})
+
+ writes_task1 = [
+ ("channel1", {"data": "task1_value1"}),
+ ("channel2", {"data": "task1_value2"}),
+ ]
+ writes_task2 = [
+ ("channel1", {"data": "task2_value1"}),
+ ]
+
+ self.checkpointer.put_writes(returned_config, writes_task1, "task1")
+ self.checkpointer.put_writes(returned_config, writes_task2, "task2")
+
+ checkpoint_tuple = self.checkpointer.get_tuple(returned_config)
+ assert checkpoint_tuple is not None
+ self.assertEqual(len(checkpoint_tuple.pending_writes), 3)
+
+ task1_writes = [w for w in checkpoint_tuple.pending_writes if w[0] == "task1"]
+ self.assertEqual(len(task1_writes), 2)
+ self.assertEqual(task1_writes[0][1], "channel1")
+ self.assertEqual(task1_writes[0][2], {"data": "task1_value1"})
+ self.assertEqual(task1_writes[1][1], "channel2")
+ self.assertEqual(task1_writes[1][2], {"data": "task1_value2"})
+
+ task2_writes = [w for w in checkpoint_tuple.pending_writes if w[0] == "task2"]
+ self.assertEqual(len(task2_writes), 1)
+ self.assertEqual(task2_writes[0][1], "channel1")
+ self.assertEqual(task2_writes[0][2], {"data": "task2_value1"})
+
+ def test_get_tuple_with_writes(self):
+ """Test retrieving a checkpoint tuple that includes pending writes."""
+ checkpoint = Checkpoint(id="checkpoint5")
+ metadata = CheckpointMetadata()
+ config = self.create_config()
+ returned_config = self.checkpointer.put(config, checkpoint, metadata, {})
+
+ writes = [
+ ("channelA", {"info": "dataA"}),
+ ("channelB", {"info": "dataB"}),
+ ]
+ task_id = "task456"
+ self.checkpointer.put_writes(returned_config, writes, task_id)
+
+ checkpoint_tuple = self.checkpointer.get_tuple(returned_config)
+ assert checkpoint_tuple is not None
+ self.assertEqual(checkpoint_tuple.config["configurable"]["checkpoint_id"], "checkpoint5")
+ self.assertEqual(checkpoint_tuple.checkpoint["id"], "checkpoint5")
+ self.assertEqual(checkpoint_tuple.metadata, {})
+ assert checkpoint_tuple.parent_config is None
+ self.assertEqual(len(checkpoint_tuple.pending_writes), 2)
+ for i, (task, channel, value) in enumerate(checkpoint_tuple.pending_writes):
+ self.assertEqual(task, task_id)
+ self.assertEqual(channel, writes[i][0])
+ self.assertEqual(value, writes[i][1])
+
+ #
+ # Listing Checkpoints and Filters
+ #
+
+ def test_list_checkpoints_with_filters(self):
+ """Test listing checkpoints with filters like 'before' and 'limit'."""
+ self.setup_s3_bucket()
+ saver = self.checkpointer
+ config = self.create_config()
+
+ all_checkpoints = list(saver.list(config))
+ self.assertEqual(len(all_checkpoints), len(CHECKPOINTS))
+
+ limited_checkpoints = list(saver.list(config, limit=1))
+ self.assertEqual(len(limited_checkpoints), 1)
+ assert (
+ limited_checkpoints[0].config["configurable"]["checkpoint_id"]
+ == CHECKPOINT_ID_2
+ )
+
+ before_config = self.create_config(checkpoint_id=CHECKPOINT_ID_2)
+ before_checkpoints = list(saver.list(config, before=before_config))
+ self.assertEqual(len(before_checkpoints), 1)
+ assert (
+ before_checkpoints[0].config["configurable"]["checkpoint_id"]
+ == CHECKPOINT_ID_1
+ )
+
+ def test_list_no_checkpoints(self):
+ """Test listing checkpoints when none exist."""
+ config = self.create_config()
+ retrieved_checkpoints = list(self.checkpointer.list(config))
+ self.assertEqual(len(retrieved_checkpoints), 0)
+
+ def test_list_with_limit(self):
+ """Test listing with a limit."""
+ self.setup_s3_bucket()
+ config = self.create_config()
+ results = list(self.checkpointer.list(config, limit=1))
+ self.assertEqual(len(results), 1)
+
+ def test_list_no_config(self):
+ """Test listing when no config is provided."""
+ with self.assertRaises(ValueError) as context:
+ list(self.checkpointer.list(None))
+
+ self.assertIn("config must be provided", str(context.exception))
+
+ def test_list_before_removes_all(self):
+ """Test listing with a 'before' config that removes all results."""
+ self.setup_s3_bucket()
+ config = self.create_config()
+ before_config = self.create_config(checkpoint_id="checkpoint0")
+ results = list(self.checkpointer.list(config, before=before_config))
+ self.assertEqual(len(results), 0)
+
+ #
+ # Parent-Child Checkpoint Relationship
+ #
+
+ def test_put_and_get_with_parent_checkpoint(self):
+ """Test putting a checkpoint with a parent and retrieving the parent config."""
+ parent_checkpoint = Checkpoint(id="parent_checkpoint")
+ parent_metadata = CheckpointMetadata()
+ parent_config = self.create_config()
+ self.checkpointer.put(parent_config, parent_checkpoint, parent_metadata, {})
+
+ child_checkpoint = Checkpoint(id="child_checkpoint")
+ child_metadata = CheckpointMetadata()
+ child_config = RunnableConfig(
+ {
+ "configurable": {
+ "thread_id": THREAD_ID,
+ "checkpoint_ns": CHECKPOINT_NAMESPACE,
+ "checkpoint_id": "parent_checkpoint",
+ }
+ }
+ )
+ self.checkpointer.put(child_config, child_checkpoint, child_metadata, {})
+
+ child_tuple = self.checkpointer.get_tuple(
+ RunnableConfig(
+ {
+ "configurable": {
+ "thread_id": THREAD_ID,
+ "checkpoint_ns": CHECKPOINT_NAMESPACE,
+ "checkpoint_id": "child_checkpoint",
+ }
+ }
+ )
+ )
+ assert child_tuple is not None
+ self.assertEqual(child_tuple.config["configurable"]["checkpoint_id"], "child_checkpoint")
+ assert child_tuple.parent_config is not None
+ assert (
+ child_tuple.parent_config["configurable"]["checkpoint_id"]
+ == "parent_checkpoint"
+ )
+
+ #
+ # Namespaces
+ #
+
+ def test_put_with_namespace(self):
+ """Test putting and retrieving a checkpoint within a specific namespace."""
+ namespace = "custom_ns"
+ config = RunnableConfig(
+ {
+ "configurable": {
+ "thread_id": THREAD_ID,
+ "checkpoint_ns": namespace,
+ }
+ }
+ )
+ checkpoint = Checkpoint(id="checkpoint_ns1")
+ metadata = CheckpointMetadata()
+ returned_config = self.checkpointer.put(config, checkpoint, metadata, {})
+
+ retrieved_tuple = self.checkpointer.get_tuple(returned_config)
+ assert retrieved_tuple is not None
+ self.assertEqual(retrieved_tuple.config["configurable"]["checkpoint_ns"], namespace)
+ assert (
+ retrieved_tuple.config["configurable"]["checkpoint_id"] == "checkpoint_ns1"
+ )
+
+ retrieved_checkpoints = list(self.checkpointer.list(config))
+ self.assertEqual(len(retrieved_checkpoints), 1)
+ assert (
+ retrieved_checkpoints[0].config["configurable"]["checkpoint_id"]
+ == "checkpoint_ns1"
+ )
+
+ def test_list_with_non_default_namespace(self):
+ """Test listing checkpoints in a non-default namespace."""
+ namespace = "ns1"
+ config = RunnableConfig(
+ {
+ "configurable": {
+ "thread_id": THREAD_ID,
+ "checkpoint_ns": namespace,
+ }
+ }
+ )
+
+ checkpoint_ns1 = Checkpoint(id="ns1_ckpt1")
+ checkpoint_ns2 = Checkpoint(id="ns2_ckpt1")
+ metadata = CheckpointMetadata()
+
+ self.checkpointer.put(config, checkpoint_ns1, metadata, {})
+
+ config_ns2 = RunnableConfig(
+ {
+ "configurable": {
+ "thread_id": THREAD_ID,
+ "checkpoint_ns": "ns2",
+ }
+ }
+ )
+ self.checkpointer.put(config_ns2, checkpoint_ns2, metadata, {})
+
+ retrieved_ns1 = list(self.checkpointer.list(config))
+ self.assertEqual(len(retrieved_ns1), 1)
+ self.assertEqual(retrieved_ns1[0].config["configurable"]["checkpoint_id"], "ns1_ckpt1")
+
+ retrieved_ns2 = list(self.checkpointer.list(config_ns2))
+ self.assertEqual(len(retrieved_ns2), 1)
+ self.assertEqual(retrieved_ns2[0].config["configurable"]["checkpoint_id"], "ns2_ckpt1")
+
+ #
+ # Compression
+ #
+
+ def test_put_with_compression(self):
+ """Test putting a checkpoint with compression enabled."""
+ import base64
+
+ saver_compressed = S3Checkpointer(
+ bucket_name=BUCKET_NAME, region_name=REGION, compression="gzip"
+ )
+
+ checkpoint = Checkpoint(id="checkpoint_compressed")
+ metadata = CheckpointMetadata()
+ config = self.create_config()
+ saver_compressed.put(config, checkpoint, metadata, {})
+
+ expected_key = (
+ f"checkpoints/{THREAD_ID}/__default__/checkpoint_compressed/checkpoint.json"
+ )
+ response = self.s3.get_object(Bucket=BUCKET_NAME, Key=expected_key)
+ body = json.loads(response["Body"].read().decode("utf-8"))
+
+ checkpoint_data_encoded = body["checkpoint_data"]
+ checkpoint_data = base64.b64decode(checkpoint_data_encoded)
+ assert checkpoint_data.startswith(b"\x1f\x8b") # Gzip magic number
+
+ def test_list_bz2_checkpoints(self):
+ """Test listing a checkpoint where checkpoint_type starts with 'bz2'."""
+ compressed_data = base64.b64encode(bz2.compress(b"{}")).decode("utf-8")
+ key = f"checkpoints/{THREAD_ID}/__default__/bz2_ckpt/checkpoint.json"
+ data = {
+ "checkpoint_type": "bz2_json",
+ "checkpoint_data": compressed_data,
+ "metadata_data": compressed_data,
+ "parent_checkpoint_id": None,
+ "timestamp": int(time.time() * 1000),
+ }
+ self.s3.put_object(Bucket=BUCKET_NAME, Key=key, Body=json.dumps(data))
+
+ config = self.create_config(checkpoint_id="bz2_ckpt")
+ retrieved_checkpoints = list(self.checkpointer.list(config))
+ self.assertEqual(len(retrieved_checkpoints), 1)
+ self.assertEqual(retrieved_checkpoints[0].checkpoint, {})
+ self.assertEqual(retrieved_checkpoints[0].metadata, {})
+
+ def test_list_gzip_checkpoints(self):
+ """Test listing a checkpoint where checkpoint_type starts with 'gzip'."""
+ compressed_data = base64.b64encode(gzip.compress(b"{}")).decode("utf-8")
+ key = f"checkpoints/{THREAD_ID}/__default__/gzip_ckpt/checkpoint.json"
+ data = {
+ "checkpoint_type": "gzip_json",
+ "checkpoint_data": compressed_data,
+ "metadata_data": compressed_data,
+ "parent_checkpoint_id": None,
+ "timestamp": int(time.time() * 1000),
+ }
+ self.s3.put_object(Bucket=BUCKET_NAME, Key=key, Body=json.dumps(data))
+
+ config = self.create_config(checkpoint_id="gzip_ckpt")
+ retrieved_checkpoints = list(self.checkpointer.list(config))
+ self.assertEqual(len(retrieved_checkpoints), 1)
+ self.assertEqual(retrieved_checkpoints[0].checkpoint, {})
+ self.assertEqual(retrieved_checkpoints[0].metadata, {})
+
+ #
+ # Concurrency
+ #
+
+ def test_concurrent_puts(self):
+ """Test concurrent puts to ensure thread safety (basic simulation)."""
+ import threading
+
+ def put_checkpoint(id_suffix):
+ checkpoint = Checkpoint(id=f"checkpoint_concurrent_{id_suffix}")
+ metadata = CheckpointMetadata()
+ config = self.create_config()
+ self.checkpointer.put(config, checkpoint, metadata, {})
+
+ threads = []
+ for i in range(5):
+ t = threading.Thread(target=put_checkpoint, args=(i,))
+ threads.append(t)
+ t.start()
+
+ for t in threads:
+ t.join()
+
+ config = self.create_config()
+ retrieved_checkpoints = list(self.checkpointer.list(config))
+ expected_ids = {f"checkpoint_concurrent_{i}" for i in range(5)}
+ retrieved_ids = {
+ ck.config["configurable"]["checkpoint_id"] for ck in retrieved_checkpoints
+ }
+ assert expected_ids.issubset(retrieved_ids)
+
+ #
+ # Latest Checkpoint ID
+ #
+
+ def test_get_latest_checkpoint_id(self):
+ """Test the internal method to get the latest checkpoint ID."""
+ self.setup_s3_bucket()
+ latest_id = self.checkpointer._get_latest_checkpoint_id(
+ THREAD_ID, CHECKPOINT_NAMESPACE
+ )
+ self.assertEqual(latest_id, CHECKPOINT_ID_2)
+
+ def test_get_latest_checkpoint_id_no_keys(self):
+ """Test getting the latest checkpoint ID when none exist."""
+ latest_id = self.checkpointer._get_latest_checkpoint_id(
+ THREAD_ID, CHECKPOINT_NAMESPACE
+ )
+ assert latest_id is None
+
+ #
+ # Deleting Checkpoints
+ #
+
+ def test_delete_checkpoints(self):
+ """Test that delete_checkpoints correctly removes all checkpoints for a thread."""
+ self.setup_s3_bucket()
+ config = self.create_config()
+ retrieved_checkpoints = list(self.checkpointer.list(config))
+ self.assertEqual(len(retrieved_checkpoints), len(CHECKPOINTS))
+
+ self.checkpointer.delete_checkpoints(THREAD_ID)
+ retrieved_after_delete = list(self.checkpointer.list(config))
+ self.assertEqual(len(retrieved_after_delete), 0)
+
+ def test_delete_checkpoints_many(self):
+ """Test deleting multiple checkpoints in batches."""
+ for i in range(3):
+ ckpt = Checkpoint(id=f"ckpt_del_{i}")
+ metadata = CheckpointMetadata()
+ config = self.create_config()
+ self.checkpointer.put(config, ckpt, metadata, {})
+
+ self.checkpointer.delete_checkpoints(THREAD_ID)
+ retrieved_after_delete = list(self.checkpointer.list(self.create_config()))
+ self.assertEqual(len(retrieved_after_delete), 0)
+
+ #
+ # Invalid Key Formats and Other Edge Cases
+ #
+
+ def test_load_pending_writes_invalid_key_format(self):
+ """Test _load_pending_writes handling invalid write key formats."""
+ checkpoint = Checkpoint(id="ckpt_invalid_write")
+ metadata = CheckpointMetadata()
+ config = self.create_config()
+ returned_config = self.checkpointer.put(config, checkpoint, metadata, {})
+
+ invalid_write_key = f"checkpoints/{THREAD_ID}/__default__/ckpt_invalid_write/writes/invalid.json"
+ self.s3.put_object(Bucket=BUCKET_NAME, Key=invalid_write_key, Body="{}")
+
+ tuple_result = self.checkpointer.get_tuple(returned_config)
+ assert tuple_result is not None
+ # No valid writes parsed due to invalid format
+ self.assertEqual(tuple_result.pending_writes, [])
+
+ def test_invalid_checkpoint_key_format(self):
+ """Test handling of invalid checkpoint key formats."""
+ invalid_key = "checkpoints/thread1/__default__/invalid_format.json"
+ self.s3.put_object(
+ Bucket=BUCKET_NAME,
+ Key=invalid_key,
+ Body='{"invalid": "data"}',
+ )
+
+ config = self.create_config()
+ with self.assertRaises(ValueError) as context:
+ list(self.checkpointer.list(config))
+
+ self.assertIn("Invalid checkpoint key format", str(context.exception))
+
+ @pytest.mark.slow
+ def test_delete_checkpoints_large_batch(self):
+ """Test deleting more than 1000 checkpoints to verify batch deletion logic."""
+ # Create 1001 objects to force batch deletion
+ for i in range(1001):
+ ckpt = Checkpoint(id=f"ckpt_del_{i}")
+ metadata = CheckpointMetadata()
+ config = self.create_config()
+ self.checkpointer.put(config, ckpt, metadata, {})
+
+ # Verify objects were created
+ config = self.create_config()
+ retrieved_before_delete = list(self.checkpointer.list(config))
+ self.assertEqual(len(retrieved_before_delete), 1001)
+
+ # Delete all checkpoints
+ self.checkpointer.delete_checkpoints(THREAD_ID)
+
+ # Verify all objects were deleted
+ retrieved_after_delete = list(self.checkpointer.list(config))
+ self.assertEqual(len(retrieved_after_delete), 0)
\ No newline at end of file
diff --git a/chat/test/search/__init__.py b/chat/test/search/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/chat/test/search/test_hybrid_query.py b/chat/test/search/test_hybrid_query.py
new file mode 100644
index 00000000..b208cfaa
--- /dev/null
+++ b/chat/test/search/test_hybrid_query.py
@@ -0,0 +1,28 @@
+from search.hybrid_query import hybrid_query, filter
+
+class TestFunction:
+ def test_hybrid_query(self):
+ dsl = hybrid_query("Question?", "MODEL_ID", k=10)
+ subject = dsl["query"]["hybrid"]["queries"]
+
+ assert len(subject) == 2
+
+ queries_first = subject[0]["bool"]["must"]
+ assert queries_first[0]["query_string"]["query"] == "Question?"
+ assert {"terms": {"visibility": ["Public", "Institution"]}} in queries_first
+ assert {"term": {"published": True}} in queries_first
+
+ queries_second = subject[1]["bool"]["must"]
+ assert queries_second[0]["neural"]["embedding"]["model_id"] == "MODEL_ID"
+ assert {"terms": {"visibility": ["Public", "Institution"]}} in queries_second
+ assert {"term": {"published": True}} in queries_second
+
+ def test_filter(self):
+ dummy_query = {"match": {"title": "Hello World"}}
+ result = filter(dummy_query)
+ assert "bool" in result
+ assert "must" in result["bool"]
+ must_clause = result["bool"]["must"]
+ assert must_clause[0] == dummy_query
+ assert {"terms": {"visibility": ["Public", "Institution"]}} in must_clause
+ assert {"term": {"published": True}} in must_clause
\ No newline at end of file
diff --git a/chat/test/search/test_opensearch_neural_search.py b/chat/test/search/test_opensearch_neural_search.py
new file mode 100644
index 00000000..ceb7ee72
--- /dev/null
+++ b/chat/test/search/test_opensearch_neural_search.py
@@ -0,0 +1,161 @@
+# ruff: noqa: E402
+from unittest import TestCase
+from unittest.mock import Mock, patch
+from opensearchpy import ConnectionError, AuthenticationException, NotFoundError
+from search.opensearch_neural_search import OpenSearchNeuralSearch
+from langchain_core.documents import Document
+
+class MockClient():
+ def search(self, index, body, params):
+ return {
+ "hits": {
+ "hits": [
+ {
+ "_source": {
+ "id": "test"
+ },
+ "_score": 0.12345
+ }
+ ]
+ }
+ }
+
+class MockErrorClient():
+ def search(self, index, body, params):
+ raise ConnectionError("Failed to connect to OpenSearch")
+
+class TestOpenSearchNeuralSearch(TestCase):
+ def setUp(self):
+ self.search = OpenSearchNeuralSearch(
+ client=MockClient(),
+ endpoint="test",
+ index="test",
+ model_id="test"
+ )
+
+ self.error_search = OpenSearchNeuralSearch(
+ client=MockErrorClient(),
+ endpoint="test",
+ index="test",
+ model_id="test"
+ )
+
+ def test_similarity_search(self):
+ docs = self.search.similarity_search(
+ query="test",
+ subquery={"_source": {"excludes": ["embedding"]}},
+ size=10
+ )
+ self.assertEqual(
+ docs,
+ [Document(page_content='test', metadata={'id': 'test'})]
+ )
+
+ def test_similarity_search_connection_error(self):
+ with self.assertRaises(ConnectionError):
+ self.error_search.similarity_search(query="test")
+
+ @patch('opensearchpy.OpenSearch')
+ def test_similarity_search_auth_error(self, mock_opensearch):
+ mock_opensearch.return_value.search.side_effect = AuthenticationException(
+ "Authentication failed"
+ )
+ search = OpenSearchNeuralSearch(
+ client=mock_opensearch.return_value,
+ endpoint="test",
+ index="test",
+ model_id="test"
+ )
+ with self.assertRaises(AuthenticationException):
+ search.similarity_search(query="test")
+
+ def test_similarity_search_with_score(self):
+ docs = self.search.similarity_search_with_score(query="test")
+ self.assertEqual(
+ docs,
+ [(Document(page_content='test', metadata={'id': 'test'}), 0.12345)]
+ )
+
+ def test_similarity_search_with_score_connection_error(self):
+ with self.assertRaises(ConnectionError):
+ self.error_search.similarity_search_with_score(query="test")
+
+ @patch('opensearchpy.OpenSearch')
+ def test_aggregations_search_index_not_found(self, mock_opensearch):
+ mock_opensearch.return_value.search.side_effect = NotFoundError(
+ 404,
+ "index_not_found_exception",
+ {"error": "index not found"}
+ )
+ search = OpenSearchNeuralSearch(
+ client=mock_opensearch.return_value,
+ endpoint="test",
+ index="test",
+ model_id="test"
+ )
+ with self.assertRaises(NotFoundError):
+ search.aggregations_search(agg_field="test_field")
+
+ def test_aggregations_search_connection_error(self):
+ with self.assertRaises(ConnectionError):
+ self.error_search.aggregations_search(agg_field="test_field")
+
+ def test_add_texts_exception(self):
+ # Test to ensure the exception handler works
+ with self.assertRaises(AssertionError) as context:
+ search = self.search
+ search.add_texts = Mock(side_effect=Exception("Test exception"))
+ try:
+ search.add_texts(texts=["test"], metadatas=[{"id": "test"}])
+ except Exception as e:
+ self.fail(f"add_texts raised an exception: {e}")
+
+ self.assertTrue("add_texts raised an exception: Test exception" in str(context.exception))
+
+ def test_from_texts_exception(self):
+ with self.assertRaises(AssertionError) as context:
+ OpenSearchNeuralSearch.from_texts = Mock(side_effect=Exception("Test exception"))
+ try:
+ OpenSearchNeuralSearch.from_texts(texts=["test"], metadatas=[{"id": "test"}])
+ except Exception as e:
+ self.fail(f"from_texts raised an exception: {e}")
+
+ self.assertTrue("from_texts raised an exception: Test exception" in str(context.exception))
+
+ def test_client_initialization_error(self):
+ with self.assertRaises(ValueError):
+ OpenSearchNeuralSearch(
+ endpoint="", # Empty endpoint should raise ValueError
+ index="test",
+ model_id="test",
+ client=None
+ )
+
+ def test_add_texts_does_nothing(self):
+ """Test that add_texts method exists but does nothing."""
+ try:
+ # Call add_texts with some sample data
+ result = self.search.add_texts(
+ texts=["test1", "test2"],
+ metadatas=[{"id": "1"}, {"id": "2"}]
+ )
+ # Method should return None
+ self.assertIsNone(result)
+ except Exception as e:
+ self.fail(f"add_texts raised an unexpected exception: {e}")
+
+ def test_from_texts_does_nothing(self):
+ """Test that from_texts classmethod exists but does nothing."""
+ try:
+ # Call from_texts with some sample data
+ result = OpenSearchNeuralSearch.from_texts(
+ texts=["test1", "test2"],
+ metadatas=[{"id": "1"}, {"id": "2"}],
+ endpoint="test",
+ index="test",
+ model_id="test"
+ )
+ # Method should return None
+ self.assertIsNone(result)
+ except Exception as e:
+ self.fail(f"from_texts raised an unexpected exception: {e}")
\ No newline at end of file
diff --git a/chat/test/test_event_config.py b/chat/test/test_event_config.py
deleted file mode 100644
index 0d1a4654..00000000
--- a/chat/test/test_event_config.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# ruff: noqa: E402
-import json
-import os
-import sys
-sys.path.append('./src')
-
-from event_config import EventConfig
-from unittest import TestCase, mock
-
-
-class TestEventConfigWithoutAzureResource(TestCase):
- def test_requires_an_azure_resource(self):
- with self.assertRaises(EnvironmentError):
- EventConfig()
-
-
-@mock.patch.dict(
- os.environ,
- {
- "AZURE_OPENAI_RESOURCE_NAME": "test",
- },
-)
-class TestEventConfig(TestCase):
- def test_fetches_attributes_from_vector_database(self):
- os.environ.pop("AZURE_OPENAI_RESOURCE_NAME", None)
- with self.assertRaises(EnvironmentError):
- EventConfig()
-
- def test_defaults(self):
- actual = EventConfig(event={"body": json.dumps({"attributes": ["title"]})})
- expected_defaults = {"azure_endpoint": "https://test.openai.azure.com/"}
- self.assertEqual(actual.azure_endpoint, expected_defaults["azure_endpoint"])
-
- def test_attempt_override_without_superuser_status(self):
- actual = EventConfig(
- event={
- "body": json.dumps(
- {
- "azure_resource_name": "new_name_for_test",
- "attributes": ["title", "subject", "date_created"],
- "index": "testIndex",
- "k": 100,
- "openai_api_version": "2024-01-01",
- "question": "test question",
- "ref": "test ref",
- "size": 90,
- "temperature": 0.9,
- "text_key": "accession_number",
- }
- )
- }
- )
- expected_output = {
- "attributes": EventConfig.DEFAULT_ATTRIBUTES,
- "azure_endpoint": "https://test.openai.azure.com/",
- "k": 40,
- "openai_api_version": "2024-02-01",
- "question": "test question",
- "size": 20,
- "ref": "test ref",
- "temperature": 0.2,
- "text_key": "id",
- }
- self.assertEqual(actual.azure_endpoint, expected_output["azure_endpoint"])
- self.assertEqual(actual.attributes, expected_output["attributes"])
- self.assertEqual(actual.k, expected_output["k"])
- self.assertEqual(
- actual.openai_api_version, expected_output["openai_api_version"]
- )
- self.assertEqual(actual.question, expected_output["question"])
- self.assertEqual(actual.ref, expected_output["ref"])
- self.assertEqual(actual.temperature, expected_output["temperature"])
- self.assertEqual(actual.text_key, expected_output["text_key"])
-
- def test_debug_message(self):
- self.assertEqual(
- EventConfig(
- event={"body": json.dumps({"attributes": ["source"]})}
- ).debug_message()["type"],
- "debug",
- )
-
- def test_to_bool(self):
- self.assertEqual(EventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool(""), False)
- self.assertEqual(EventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("0"), False)
- self.assertEqual(EventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("no"), False)
- self.assertEqual(EventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("false"), False)
- self.assertEqual(EventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("False"), False)
- self.assertEqual(EventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("FALSE"), False)
- self.assertEqual(EventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("no"), False)
- self.assertEqual(EventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("No"), False)
- self.assertEqual(EventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("NO"), False)
- self.assertEqual(EventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool("true"), True)
- self.assertEqual(EventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool(True), True)
- self.assertEqual(EventConfig(event={"body": json.dumps({"attributes": ["source"]})})._to_bool(False), False)
diff --git a/chat/test/test_websocket.py b/chat/test/test_websocket.py
deleted file mode 100644
index 4d4d8b76..00000000
--- a/chat/test/test_websocket.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# ruff: noqa: E402
-import sys
-sys.path.append('./src')
-
-from unittest import TestCase
-from websocket import Websocket
-
-
-class MockClient:
- def post_to_connection(self, Data, ConnectionId):
- return Data
-
-class TestWebsocket(TestCase):
- def test_post_to_connection(self):
- websocket = Websocket(client=MockClient(), connection_id="test_connection_id", ref="test_ref")
- message = "test_message"
- expected = {"message": "test_message", "ref": "test_ref"}
- self.assertEqual(websocket.send(message), expected)
\ No newline at end of file
diff --git a/node/redirect/index.js b/docs/redirect/index.js
similarity index 97%
rename from node/redirect/index.js
rename to docs/redirect/index.js
index ca667b76..66335872 100644
--- a/node/redirect/index.js
+++ b/docs/redirect/index.js
@@ -8,5 +8,5 @@ exports.handler = async () => {
location: target
},
body: `Redirecting to API Documentation
`
- }
-}
+ };
+};
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 63ccd01b..4ce6a575 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -3,4 +3,5 @@ mkdocs-macros-plugin @ git+https://github.com/fralau/mkdocs_macros_plugin.git@v0
Pygments>=2.7.3,<3.0.0
diagrams>=0.21.1,<1.0.0
mkdocs-material>=9.0.0
-mkdocs-render-swagger-plugin>=0.1.2
\ No newline at end of file
+mkdocs-render-swagger-plugin>=0.1.2
+setuptools>=78.1.0
\ No newline at end of file
diff --git a/docs/template.yaml b/docs/template.yaml
new file mode 100644
index 00000000..fbe23bf4
--- /dev/null
+++ b/docs/template.yaml
@@ -0,0 +1,100 @@
+# Build and Deploy Template for DC API
+#
+# Note: Any comment starting with `#*` will be removed
+# at build time. This allows us to run without the
+# dependency layer in development without removing the
+# layer from the build.
+
+AWSTemplateFormatVersion: "2010-09-09"
+Transform:
+ - AWS::Serverless-2016-10-31
+ - AWS::LanguageExtensions
+Description: dc-api-v2 Docs
+Parameters:
+ CustomDomainHost:
+ Type: String
+ Description: Hostname within Custom Domain Zone
+ CustomDomainZone:
+ Type: String
+ Description: Hosted Zone Name for Custom Domain
+ RootApiID:
+ Type: String
+ Description: ID of the root API
+Resources:
+ rootRedirect:
+ Type: AWS::Serverless::Function
+ Properties:
+ Runtime: nodejs20.x
+ CodeUri: ./redirect
+ Handler: index.handler
+ Timeout: 1
+ Description: Redirects to latest version of docs
+ Environment:
+ Variables:
+ REDIRECT_TO: /docs/v2/index.html
+ rootRedirectIntegration:
+ Type: AWS::ApiGatewayV2::Integration
+ Properties:
+ ApiId: !Ref RootApiID
+ IntegrationType: AWS_PROXY
+ IntegrationUri: !GetAtt rootRedirect.Arn
+ PayloadFormatVersion: "2.0"
+ rootRedirectRouteGet:
+ Type: AWS::ApiGatewayV2::Route
+ Properties:
+ ApiId: !Ref RootApiID
+ RouteKey: GET /
+ Target: !Sub integrations/${rootRedirectIntegration}
+ rootRedirectRouteHead:
+ Type: AWS::ApiGatewayV2::Route
+ Properties:
+ ApiId: !Ref RootApiID
+ RouteKey: HEAD /
+ Target: !Sub integrations/${rootRedirectIntegration}
+ rootRedirectPermission:
+ Type: AWS::Lambda::Permission
+ Properties:
+ Action: lambda:InvokeFunction
+ FunctionName: !Ref rootRedirect
+ Principal: apigateway.amazonaws.com
+ SourceArn: !Sub arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${RootApiID}/*/*/
+ docsBucket:
+ Type: AWS::S3::Bucket
+ Properties:
+ BucketName: !Sub "${CustomDomainHost}-docs.${CustomDomainZone}"
+ PublicAccessBlockConfiguration:
+ BlockPublicAcls: false
+ BlockPublicPolicy: false
+ IgnorePublicAcls: false
+ RestrictPublicBuckets: false
+ WebsiteConfiguration:
+ IndexDocument: index.html
+ ErrorDocument: index.html
+ docsBucketPolicy:
+ Type: AWS::S3::BucketPolicy
+ Properties:
+ PolicyDocument:
+ Id: MyPolicy
+ Version: 2012-10-17
+ Statement:
+ - Sid: PublicReadForGetBucketObjects
+ Effect: Allow
+ Principal: "*"
+ Action: "s3:GetObject"
+ Resource: !Sub "arn:aws:s3:::${docsBucket}/*"
+ Bucket: !Ref docsBucket
+ docsIntegration:
+ Type: AWS::ApiGatewayV2::Integration
+ Properties:
+ ApiId: !Ref RootApiID
+ IntegrationMethod: GET
+ IntegrationType: HTTP_PROXY
+ IntegrationUri: !Sub "http://${docsBucket}.s3-website-us-east-1.amazonaws.com/{proxy}"
+ PayloadFormatVersion: "1.0"
+ docsRoute:
+ Type: AWS::ApiGatewayV2::Route
+ Properties:
+ ApiId: !Ref RootApiID
+ AuthorizationType: NONE
+ RouteKey: GET /docs/v2/{proxy+}
+ Target: !Sub "integrations/${docsIntegration}"
diff --git a/node/src/handlers/default-request.js b/node/src/handlers/default-request.js
deleted file mode 100644
index 4d7e7c55..00000000
--- a/node/src/handlers/default-request.js
+++ /dev/null
@@ -1,6 +0,0 @@
-const { transformError } = require("../api/response/error");
-const { wrap } = require("./middleware");
-
-module.exports.handler = wrap(async () => {
- return transformError({ statusCode: 404 });
-});
diff --git a/node/test/integration/default-handler.test.js b/node/test/integration/default-handler.test.js
deleted file mode 100644
index a8568301..00000000
--- a/node/test/integration/default-handler.test.js
+++ /dev/null
@@ -1,22 +0,0 @@
-"use strict";
-
-const chai = require("chai");
-const expect = chai.expect;
-
-const defaultHandler = requireSource("handlers/default-request");
-
-describe("$default handler", async () => {
- const event = helpers
- .mockEvent("GET", "/blah")
- .headers({
- Origin: "https://dc.library.northwestern.edu/origin-test-path",
- })
- .render();
-
- it("returns a 404 response", async () => {
- const response = await defaultHandler.handler(event);
- expect(response.statusCode).to.eq(404);
- expect(JSON.parse(response.body).status).to.eq(404);
- expect(JSON.parse(response.body).error).to.eq("Not Found");
- });
-});
diff --git a/node/test/integration/post-chat-feedback.test.js b/node/test/integration/post-chat-feedback.test.js
deleted file mode 100644
index 74cb90e0..00000000
--- a/node/test/integration/post-chat-feedback.test.js
+++ /dev/null
@@ -1,140 +0,0 @@
-const chai = require("chai");
-const expect = chai.expect;
-chai.use(require("chai-http"));
-const ApiToken = requireSource("api/api-token");
-const { mockClient } = require("aws-sdk-client-mock");
-const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3");
-const { SNSClient, PublishCommand } = require("@aws-sdk/client-sns");
-
-const { handler } = requireSource("handlers/post-chat-feedback");
-
-describe("Chat feedback route", () => {
- helpers.saveEnvironment();
- // Pass in the S3 and SNS clients to the handler
- // to workound an issue with the mocking library
- // https://github.com/m-radzikowski/aws-sdk-client-mock
- const s3Mock = mockClient(S3Client);
- const s3Client = new S3Client({});
- const snsMock = mockClient(SNSClient);
- const snsClient = new SNSClient({});
-
- beforeEach(() => {
- s3Mock.reset();
- snsMock.reset();
- });
-
- describe("Form POST submission", () => {
- beforeEach(() => {
- s3Mock.on(PutObjectCommand).resolves({});
- snsMock.on(PublishCommand).resolves({});
- });
-
- it("should return 401 if user is not logged in", async () => {
- let requestBody = JSON.stringify({
- sentiment: "positive",
- context: {
- ref: "5a6e1d76-0d4c-43c5-ab2c-4687112ba102",
- question: "What is the capital of France?",
- answer: "Paris",
- source_documents: ["https://doc1", "https://doc2"],
- },
- feedback: {
- options: ["option1"],
- text: "Great answer!",
- email: "user@example.com",
- },
- });
-
- const event = helpers
- .mockEvent("POST", "/chat-feedback")
- .body(requestBody)
- .render();
- const response = await handler(event);
- expect(response.statusCode).to.equal(401);
- expect(response.body).to.equal("Authorization Required");
- });
-
- it("should fail if request body is invalid", async () => {
- const token = new ApiToken().user({ uid: "abc123" }).sign();
-
- let requestBody = JSON.stringify({
- sentiment: "neutral",
- context: {
- ref: "3fc98004-995b-4491-94fd-aea48a0363ba",
- question: "What is the capital of France?",
- answer: "Paris",
- source_documents: ["https://doc1", "https://doc2"],
- },
- feedback: {
- options: ["option1"],
- text: "Great answer!",
- email: "user@example.com",
- },
- });
-
- const event = helpers
- .mockEvent("POST", "/chat-feedback")
- .body(requestBody)
- .headers({
- Cookie: `${process.env.API_TOKEN_NAME}=${token}`,
- })
- .render();
- const response = await handler(event);
- expect(response.statusCode).to.equal(400);
- expect(response.body).to.equal(
- `"sentiment is not one of enum values: positive,negative"`
- );
- });
-
- describe("Saving feedback", () => {
- it("should upload the response to S3 and return 200", async () => {
- const token = new ApiToken().user({ uid: "abc123" }).sign();
-
- const requestBody = {
- sentiment: "negative",
- context: {
- ref: "e6005d7c-e03b-43f7-94a3-e327b4b5a538",
- question: "What is the capital of France?",
- answer: "Rome",
- source_documents: ["https://doc1", "https://doc2"],
- },
- feedback: {
- options: ["option1"],
- text: "Bad answer!",
- email: "example@example.com",
- },
- };
-
- const event = helpers
- .mockEvent("POST", "/chat-feedback")
- .body(JSON.stringify(requestBody))
- .headers({
- Cookie: `${process.env.API_TOKEN_NAME}=${token}`,
- })
- .render();
-
- const response = await handler(event, {
- injections: { s3Client, snsClient },
- });
-
- expect(response.statusCode).to.equal(200);
- expect(response.body).to.equal(
- '{"message":"Feedback received. Thank you."}'
- );
- expect(s3Mock.calls(PutObjectCommand).length).to.equal(1);
- expect(s3Mock.call(0).args[0].input.Bucket).eq(
- process.env.CHAT_FEEDBACK_BUCKET
- );
- expect(s3Mock.call(0).args[0].input.Key).eq(
- "negative/e6005d7c-e03b-43f7-94a3-e327b4b5a538.json"
- );
- expect(s3Mock.call(0).args[0].input.ContentType).eq("application/json");
- expect(JSON.parse(s3Mock.call(0).args[0].input.Body)).to.deep.equal(
- requestBody
- );
-
- expect(snsMock.calls(PublishCommand).length).to.equal(1);
- });
- });
- });
-});
diff --git a/node/test/unit/redirect.test.js b/node/test/unit/redirect.test.js
deleted file mode 100644
index 2a6ab0ce..00000000
--- a/node/test/unit/redirect.test.js
+++ /dev/null
@@ -1,18 +0,0 @@
-"use strict";
-
-const { handler } = require("../../redirect");
-
-const chai = require("chai");
-const expect = chai.expect;
-
-describe("redirect", function () {
- helpers.saveEnvironment();
-
- it("redirects unrecognized routes to the configured path", async () => {
- process.env.REDIRECT_TO = "/redirect/target";
- const event = helpers.mockEvent("GET", "/").render();
- const result = await handler(event);
- expect(result.statusCode).to.eq(302);
- expect(result.headers.location).to.eq("/redirect/target");
- });
-});
diff --git a/state_machines/av_download.json b/state_machines/av_download.json
deleted file mode 100644
index 7e8de941..00000000
--- a/state_machines/av_download.json
+++ /dev/null
@@ -1,110 +0,0 @@
-{
- "Comment": "HLS stiching and save as file in s3 and email download link",
- "StartAt": "audioOrVideo",
- "States": {
- "audioOrVideo": {
- "Type": "Choice",
- "Choices": [
- {
- "Variable": "$.transcodeInput.type",
- "StringEquals": "audio",
- "Next": "startAudioTranscode"
- }
- ],
- "Default": "startTranscode"
- },
- "startAudioTranscode": {
- "Type": "Task",
- "Resource": "arn:aws:states:::lambda:invoke",
- "Parameters": {
- "Payload.$": "$.transcodeInput",
- "FunctionName.$": "$.configuration.startAudioTranscodeFunction"
- },
- "Next": "getDownloadLink",
- "InputPath": "$",
- "ResultPath": "$.audioTranscodeOutput"
- },
- "startTranscode": {
- "Type": "Task",
- "Resource": "arn:aws:states:::lambda:invoke",
- "Parameters": {
- "Payload.$": "$.transcodeInput",
- "FunctionName.$": "$.configuration.startTranscodeFunction"
- },
- "Next": "transcodeStatus",
- "InputPath": "$",
- "ResultPath": "$.transcodeOutput"
- },
- "transcodeStatus": {
- "Type": "Task",
- "Resource": "arn:aws:states:::lambda:invoke",
- "Parameters": {
- "Payload.$": "$.transcodeOutput.Payload",
- "FunctionName.$": "$.configuration.transcodeStatusFunction"
- },
- "InputPath": "$",
- "ResultPath": "$.transcodeOutput",
- "Next": "transcodeCompleted?"
- },
- "transcodeCompleted?": {
- "Type": "Choice",
- "Choices": [
- {
- "Variable": "$.transcodeOutput.Payload.status",
- "StringEquals": "COMPLETE",
- "Next": "getDownloadLink"
- },
- {
- "Variable": "$.transcodeOutput.Payload.status",
- "StringEquals": "ERROR",
- "Next": "failWorkflow"
- },
- {
- "Variable": "$.transcodeOutput.Payload.status",
- "StringEquals": "CANCELED",
- "Next": "failWorkflow"
- }
- ],
- "Default": "Wait 10 seconds"
- },
- "Wait 10 seconds": {
- "Type": "Wait",
- "Seconds": 10,
- "Next": "transcodeStatus"
- },
- "getDownloadLink": {
- "Type": "Task",
- "Resource": "arn:aws:states:::lambda:invoke",
- "Parameters": {
- "Payload.$": "$.presignedUrlInput",
- "FunctionName.$": "$.configuration.getDownloadLinkFunction"
- },
- "InputPath": "$",
- "ResultPath": "$.downloadLinkOutput",
- "Next": "sendTemplatedEmail"
- },
- "sendTemplatedEmail": {
- "Type": "Task",
- "Resource": "arn:aws:states:::lambda:invoke",
- "Parameters": {
- "Payload": {
- "to.$": "$.sendEmailInput.to",
- "from.$": "$.sendEmailInput.from",
- "template.$": "$.sendEmailInput.template",
- "params": {
- "downloadLink.$": "$.downloadLinkOutput.Payload.downloadLink",
- "fileSetId.$": "$.sendEmailInput.params.fileSetId",
- "fileSetLabel.$": "$.sendEmailInput.params.fileSetLabel",
- "workId.$": "$.sendEmailInput.params.workId",
- "fileType.$": "$.sendEmailInput.params.fileType"
- }
- },
- "FunctionName.$": "$.configuration.sendTemplatedEmailFunction"
- },
- "End": true
- },
- "failWorkflow": {
- "Type": "Fail"
- }
- }
-}
\ No newline at end of file
diff --git a/template.yaml b/template.yaml
index 396f6978..64237dfc 100644
--- a/template.yaml
+++ b/template.yaml
@@ -9,33 +9,12 @@ AWSTemplateFormatVersion: "2010-09-09"
Transform:
- AWS::Serverless-2016-10-31
- AWS::LanguageExtensions
-Description: >
- dc-api-v2
-
- SAM Template for dc-api-v2
-# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst
-Globals:
- Function:
- CodeUri: ./node/src
- Runtime: nodejs20.x
- Architectures:
- - x86_64
- MemorySize: 128
- Timeout: 10
- Environment:
- Variables:
- API_TOKEN_NAME: !Ref ApiTokenName
- DC_API_ENDPOINT: !Ref DcApiEndpoint
- DC_URL: !Ref DcUrl
- DEFAULT_SEARCH_SIZE: "100"
- DEV_TEAM_NET_IDS: !Ref DevTeamNetIds
- ENV_PREFIX: !Ref EnvironmentPrefix
- HONEYBADGER_API_KEY: !Ref HoneybadgerApiKey
- HONEYBADGER_ENV: !Ref HoneybadgerEnv
- HONEYBADGER_REVISION: !Ref HoneybadgerRevision
- READING_ROOM_IPS: !Ref ReadingRoomIPs
- SECRETS_PATH: !Ref SecretsPath
+Description: dc-api-v2
Parameters:
+ ApiConfigPrefix:
+ Type: String
+ Description: Secret Name for API Configuration (if not provided, will use SecretsPath)
+ Default: ""
ApiTokenName:
Type: String
Description: Name of the jwt that DC API issues
@@ -57,6 +36,22 @@ Parameters:
DcUrl:
Type: String
Description: URL of Digital Collections website
+ DeployAPI:
+ Type: String
+ Description: Set to true to deploy API
+ Default: "true"
+ DeployAVDownload:
+ Type: String
+ Description: Set to true to deploy AVDownload
+ Default: "true"
+ DeployChat:
+ Type: String
+ Description: Set to true to deploy Chat
+ Default: "true"
+ DeployDocs:
+ Type: String
+ Description: Set to true to deploy Docs
+ Default: "false"
DevTeamNetIds:
Type: String
Description: Northwestern NetIDs of the development team
@@ -112,32 +107,17 @@ Parameters:
Description: Set to something other than "true" to _not_ write configuration secrets
Default: "true"
Conditions:
- WriteSecret:
- Fn::Equals:
- - !Ref WriteConfigSecret
- - true
+ CustomConfigSecret:
+ Fn::Not: [!Equals [!Ref ApiConfigPrefix, ""]]
+ DeployAPI:
+ Fn::Equals: [!Ref DeployAPI, "true"]
+ DeployAVDownload:
+ Fn::Equals: [!Ref DeployAVDownload, "true"]
+ DeployChat:
+ Fn::Equals: [!Ref DeployChat, "true"]
+ DeployDocs:
+ Fn::Equals: [!Ref DeployDocs, "true"]
Resources:
- #* apiDependencies:
- #* Type: AWS::Serverless::LayerVersion
- #* Properties:
- #* LayerName: !Sub "${AWS::StackName}-api-dependencies"
- #* Description: Dependencies for API handlers
- #* ContentUri: ./layers/api_dependencies
- #* CompatibleRuntimes:
- #* - nodejs20.x
- #* LicenseInfo: Apache-2.0
- #* Metadata:
- #* BuildMethod: nodejs20.x
- # Configuration
- apiConfiguration:
- Type: AWS::SecretsManager::Secret
- Condition: WriteSecret
- Properties:
- Name: !Sub "${SecretsPath}/config/dcapi"
- SecretString:
- Fn::ToJsonString:
- api_token_secret: !Ref ApiTokenSecret
- base_url: !Sub "https://${CustomDomainHost}.${CustomDomainZone}/api/v2"
readSecretsPolicy:
Type: AWS::IAM::ManagedPolicy
Properties:
@@ -148,513 +128,71 @@ Resources:
Effect: Allow
Action:
- secretsmanager:GetSecretValue
- Resource: !Sub "arn:aws:secretsmanager:${AWS::Region}:${AWS::AccountId}:secret:${SecretsPath}/*"
+ Resource:
+ - !Sub "arn:aws:secretsmanager:${AWS::Region}:${AWS::AccountId}:secret:${SecretsPath}/*"
+ - Fn::If:
+ - CustomConfigSecret
+ - !Sub "arn:aws:secretsmanager:${AWS::Region}:${AWS::AccountId}:secret:${ApiConfigPrefix}/*"
+ - !Ref AWS::NoValue
- Sid: BatchGetSecrets
Effect: Allow
Action:
- secretsmanager:BatchGetSecretValue
Resource: "*"
- readIndexPolicy:
- Type: AWS::IAM::ManagedPolicy
- Properties:
- PolicyDocument:
- Version: 2012-10-17
- Statement:
- - Sid: ESHTTPPolicy
- Effect: Allow
- Action:
- - es:ESHttp*
- Resource: "*"
- # V2 API
- getAuthCallbackFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-auth-callback.handler
- Description: NUSSO callback function.
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- Events:
- ApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /auth/callback
- Method: GET
- ApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /auth/callback
- Method: HEAD
- getAuthLoginFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-auth-login.handler
- Description: Performs NUSSO login.
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- Events:
- ApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /auth/login
- Method: GET
- getAuthLogoutFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-auth-logout.handler
- Description: Performs NUSSO logout.
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- Events:
- ApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /auth/logout
- Method: GET
- getAuthTokenFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-auth-token.handler
- Description: Function to retrieve raw JWT.
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- Events:
- ApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /auth/token
- Method: GET
- ApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /auth/token
- Method: HEAD
- getAuthWhoAmIFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-auth-whoami.handler
- Description: Exchanges valid JWT token for user information.
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- Events:
- ApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /auth/whoami
- Method: GET
- getCollectionsFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-collections.handler
- Description: Gets Collections.
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- - !Ref readIndexPolicy
- Events:
- ApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /collections
- Method: GET
- ApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /collections
- Method: HEAD
- getCollectionByIdFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-collection-by-id.handler
- Description: Gets a Collection by id.
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- - !Ref readIndexPolicy
- Events:
- ApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /collections/{id}
- Method: GET
- ApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /collections/{id}
- Method: HEAD
- getFileSetByIdFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-file-set-by-id.handler
- Description: Gets a FileSet by id.
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- - !Ref readIndexPolicy
- Events:
- ApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /file-sets/{id}
- Method: GET
- ApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /file-sets/{id}
- Method: HEAD
- getFileSetAuthFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-file-set-auth.handler
- Description: Authorizes access to a file set.
- #* Layers:
- #* - !Ref apiDependencies
- Environment:
- Variables:
- USE_PROXIED_IP: true
- Policies:
- - !Ref readSecretsPolicy
- - !Ref readIndexPolicy
- Events:
- ApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /file-sets/{id}/authorization
- Method: GET
- ApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /file-sets/{id}/authorization
- Method: HEAD
- getFileSetDownloadFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-file-set-download.handler
- Description: Downloads a file set.
- #* Layers:
- #* - !Ref apiDependencies
- Environment:
- Variables:
- STEP_FUNCTION_ENDPOINT: !Ref AWS::NoValue
- AV_DOWNLOAD_STATE_MACHINE_ARN: !Ref avDownloadStateMachine
- AV_DOWNLOAD_EMAIL_TEMPLATE: !Ref avDownloadEmailTemplate
- USE_PROXIED_IP: true
- STREAMING_BUCKET: !Ref StreamingBucket
- MEDIA_CONVERT_DESTINATION_BUCKET: !Ref MediaConvertDestinationBucket
- MEDIA_CONVERT_ENDPOINT: !Ref MediaConvertEndpoint
- MEDIA_CONVERT_JOB_QUEUE_ARN: !Ref MediaConvertJobQueueArn
- MEDIA_CONVERT_ROLE_ARN: !Ref MediaConvertRoleArn
- PYRAMID_BUCKET: !Ref PyramidBucket
- REPOSITORY_EMAIL: !Ref RepositoryEmail
- START_AUDIO_TRANSCODE_FUNCTION: !GetAtt startAudioTranscodeFunction.Arn
- START_TRANSCODE_FUNCTION: !GetAtt startTranscodeFunction.Arn
- TRANSCODE_STATUS_FUNCTION: !GetAtt transcodeStatusFunction.Arn
- GET_DOWNLOAD_LINK_FUNCTION: !GetAtt getDownloadLinkFunction.Arn
- SEND_TEMPLATED_EMAIL_FUNCTION: !GetAtt sendTemplatedEmailFunction.Arn
- Policies:
- - !Ref readSecretsPolicy
- - Version: 2012-10-17
- Statement:
- - Sid: ExecuteAVDownloadStepFunction
- Effect: Allow
- Action:
- - states:StartExecution
- Resource:
- - !Ref avDownloadStateMachine
- - Sid: BucketAccess
- Effect: Allow
- Action:
- - s3:GetObject
- Resource: !Sub "arn:aws:s3:::${PyramidBucket}/*"
- - Sid: ESHTTPPolicy
- Effect: Allow
- Action:
- - es:ESHttp*
- Resource: "*"
- Events:
- ApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /file-sets/{id}/download
- Method: GET
- ApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /file-sets/{id}/download
- Method: HEAD
- getWorkAuthFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-work-auth.handler
- Description: Authorizes access to a work.
- #* Layers:
- #* - !Ref apiDependencies
- Environment:
- Variables:
- USE_PROXIED_IP: true
- Policies:
- - !Ref readSecretsPolicy
- - !Ref readIndexPolicy
- Events:
- ApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /works/{id}/authorization
- Method: GET
- ApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /works/{id}/authorization
- Method: HEAD
- getWorkByIdFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-work-by-id.handler
- Description: Gets a Work by id.
- #* Layers:
- #* - !Ref apiDependencies
- Environment:
- Variables:
- USE_PROXIED_IP: true
- Policies:
- - !Ref readSecretsPolicy
- - !Ref readIndexPolicy
- Events:
- ApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /works/{id}
- Method: GET
- ApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /works/{id}
- Method: HEAD
- getThumbnailFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-thumbnail.handler
- Description: Gets a Work's representative thumbnail.
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- - !Ref readIndexPolicy
- Events:
- CollectionApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /collections/{id}/thumbnail
- Method: GET
- CollectionApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /collections/{id}/thumbnail
- Method: HEAD
- WorkApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /works/{id}/thumbnail
- Method: GET
- WorkApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /works/{id}/thumbnail
- Method: HEAD
- getSimilarFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-similar.handler
- Timeout: 100
- Description: Gets works similar to a specific work.
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- - !Ref readIndexPolicy
- Events:
- WorkApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /works/{id}/similar
- Method: GET
- WorkApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /works/{id}/similar
- Method: HEAD
- searchPostFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/search.postSearch
- Description: Handles OpenSearch search requests, Works only by default.
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- - !Ref readIndexPolicy
- Events:
- SearchApi:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /search
- Method: POST
- SearchWithModelsApi:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /search/{models}
- Method: POST
- searchGetFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/search.getSearch
- Description: Handles paging requests
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- - !Ref readIndexPolicy
- Events:
- SearchApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /search
- Method: GET
- SearchApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /search
- Method: HEAD
- SearchWithModelsApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /search/{models}
- Method: GET
- SearchWithModelsApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /search/{models}
- Method: HEAD
- optionsFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/options-request.handler
- Timeout: 3
- Description: Handles all OPTIONS requests
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- Events:
- Everything:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /{proxy+}
- Method: OPTIONS
- getSharedLinkByIdFunction:
- Type: AWS::Serverless::Function
+ api:
+ Type: AWS::Serverless::Application
+ DependsOn: rootApi
Properties:
- Handler: handlers/get-shared-link-by-id.handler
- Description: Gets a shared link document by id.
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- - !Ref readIndexPolicy
- Events:
- ApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /shared-links/{id}
- Method: GET
- ApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /shared-links/{id}
- Method: HEAD
- oaiFunction:
- Type: AWS::Serverless::Function
+ Location: ./api/template.yaml
+ Parameters:
+ ApiConfigPrefix: !Ref ApiConfigPrefix
+ ApiTokenName: !Ref ApiTokenName
+ ApiTokenSecret: !Ref ApiTokenSecret
+ ChatWebSocketURI: !If [DeployChat, !GetAtt [chatWebsocket, Outputs.WebSocketURI], ""]
+ CustomDomainCertificateArn: !Ref CustomDomainCertificateArn
+ CustomDomainZone: !Ref CustomDomainZone
+ CustomDomainHost: !Ref CustomDomainHost
+ DcApiEndpoint: !Ref DcApiEndpoint
+ DcUrl: !Ref DcUrl
+ DeployAPI: !Ref DeployAPI
+ DeployAVDownload: !Ref DeployAVDownload
+ DeployChat: !Ref DeployChat
+ DevTeamNetIds: !Ref DevTeamNetIds
+ EnvironmentPrefix: !Ref EnvironmentPrefix
+ HoneybadgerApiKey: !Ref HoneybadgerApiKey
+ HoneybadgerEnv: !Ref HoneybadgerEnv
+ HoneybadgerRevision: !Ref HoneybadgerRevision
+ PyramidBucket: !Ref PyramidBucket
+ ReadingRoomIPs: !Ref ReadingRoomIPs
+ SecretsPath: !Ref SecretsPath
+ SecretsPolicy: !Ref readSecretsPolicy
+ WriteConfigSecret: !Ref WriteConfigSecret
+ avDownload:
+ Type: AWS::Serverless::Application
+ Condition: DeployAVDownload
Properties:
- Handler: handlers/oai.handler
- Description: Transforms works into OAI Records.
- #* Layers:
- #* - !Ref apiDependencies
- Timeout: 60
- Policies:
- - !Ref readSecretsPolicy
- - !Ref readIndexPolicy
- Events:
- GetApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /oai
- Method: GET
- GetApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /oai
- Method: HEAD
- PostApi:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /oai
- Method: POST
+ Location: ./av-download/template.yaml
+ Parameters:
+ MediaConvertDestinationBucket: !Ref MediaConvertDestinationBucket
+ MediaConvertEndpoint: !Ref MediaConvertEndpoint
+ MediaConvertJobQueueArn: !Ref MediaConvertJobQueueArn
+ MediaConvertRoleArn: !Ref MediaConvertRoleArn
+ PyramidBucket: !Ref PyramidBucket
+ RepositoryEmail: !Ref RepositoryEmail
+ SecretsPath:
+ Fn::If:
+ - CustomConfigSecret
+ - !Ref ApiConfigPrefix
+ - !Ref SecretsPath
+ SecretsPolicy: !Ref readSecretsPolicy
+ StreamingBucket: !Ref StreamingBucket
chatWebsocket:
Type: AWS::Serverless::Application
+ Condition: DeployChat
Properties:
Location: ./chat/template.yaml
Parameters:
+ ApiConfigPrefix: !Ref ApiConfigPrefix
ApiTokenName: !Ref ApiTokenName
EnvironmentPrefix: !Ref EnvironmentPrefix
HoneybadgerApiKey: !Ref HoneybadgerApiKey
@@ -662,505 +200,31 @@ Resources:
HoneybadgerRevision: !Ref HoneybadgerRevision
SecretsPath: !Ref SecretsPath
SecretsPolicy: !Ref readSecretsPolicy
- chatWebsocketEndpoint:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/get-chat-endpoint.handler
- Description: Returns the URI of the chat websocket API.
- #* Layers:
- #* - !Ref apiDependencies
- Environment:
- Variables:
- WEBSOCKET_URI: !GetAtt chatWebsocket.Outputs.WebSocketURI
- Policies:
- - !Ref readSecretsPolicy
- Events:
- GetApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /chat/endpoint
- Method: GET
- chatFeedback:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/post-chat-feedback.handler
- Description: Handles feedback from the chat.
- #* Layers:
- #* - !Ref apiDependencies
- Environment:
- Variables:
- CHAT_FEEDBACK_BUCKET: !Ref chatFeedbackBucket
- CHAT_FEEDBACK_TOPIC_ARN: !Ref chatFeedbackTopic
- Policies:
- - !Ref readSecretsPolicy
- - Version: 2012-10-17
- Statement:
- - Sid: BucketAccess
- Effect: Allow
- Action:
- - s3:PutObject
- Resource: !Sub "arn:aws:s3:::${chatFeedbackBucket}/*"
- - Sid: TopicAccess
- Effect: Allow
- Action:
- - sns:Publish
- Resource: !Ref chatFeedbackTopic
- Events:
- PostApi:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: /chat/feedback
- Method: POST
- chatFeedbackBucket:
- Type: 'AWS::S3::Bucket'
- Properties:
- BucketName:
- Fn::Join:
- - "-"
- - - !Sub "${AWS::StackName}-chat-feedback"
- - !Select [2, !Split ['/', !Ref AWS::StackId]]
- chatFeedbackTopic:
- Type: AWS::SNS::Topic
- Properties:
- DisplayName: DC Chat Feedback
- TopicName: !Sub "${AWS::StackName}-chat-feedback"
- defaultFunction:
- Type: AWS::Serverless::Function
- Properties:
- Handler: handlers/default-request.handler
- Timeout: 3
- Description: Handles all other requests
- #* Layers:
- #* - !Ref apiDependencies
- Policies:
- - !Ref readSecretsPolicy
- Events:
- Everything:
- Type: HttpApi
- Properties:
- ApiId: !Ref dcApi
- Path: $default
- Method: ANY
-
- # Resources for AV Download state machine
- avDownloadStateMachine:
- Type: AWS::Serverless::StateMachine
- Properties:
- DefinitionUri: ./state_machines/av_download.json
- Name: !Sub "${AWS::StackName}-av-download-state-machine"
- Policies:
- - !Ref readSecretsPolicy
- - Version: 2012-10-17
- Statement:
- - Sid: LambaInvokePermissions
- Effect: Allow
- Action:
- - lambda:InvokeFunction
- Resource:
- - !GetAtt startAudioTranscodeFunction.Arn
- - !GetAtt startTranscodeFunction.Arn
- - !GetAtt transcodeStatusFunction.Arn
- - !GetAtt getDownloadLinkFunction.Arn
- - !GetAtt sendTemplatedEmailFunction.Arn
- avDownloadEmailTemplate:
- Type: AWS::SES::Template
- Properties:
- Template:
- TemplateName: !Sub "${AWS::StackName}-av-download-template"
- SubjectPart: Download of {{fileSetLabel}} is ready!
- TextPart: |
- Hello,
- Your request for {{fileType}} download of {{fileSetLabel}} (file set id: {{fileSetId}}) has been fulfilled. Your download will be available for 3 days.
- The {{fileType}} file can be downloaded from {{downloadLink}}
- HtmlPart: |
-
-
-
-
- NUL Meadow Download
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- |
-
-
- |
-
-
-
- |
-
-
-
-
-
-
-
-
- Hello,
-
-
- Your request for {{fileType}} download of {{fileSetLabel}} (file set id: {{fileSetId}}) has been
- fulfilled. Click below to download your file:
-
-
-
-
-
-
-
-
- (Your download will be available for 3 days)
-
- |
-
-
-
- |
-
-
-
- |
-
- |
-
-
-
-
-
-
- ffmpegLayer:
- Type: AWS::Serverless::LayerVersion
- Properties:
- Description: "FFMPEG Lambda Layer"
- ContentUri: ./layers/ffmpeg
- CompatibleRuntimes:
- - nodejs18.x
- startAudioTranscodeFunction:
- Type: AWS::Serverless::Function
- Properties:
- Runtime: nodejs18.x
- CodeUri: ./lambdas
- Handler: start-audio-transcode.handler
- Description: Performs audio transcode job with ffmpeg
- Timeout: 900
- MemorySize: 10240
- Layers:
- - !Ref ffmpegLayer
- Policies:
- - !Ref readSecretsPolicy
- - Version: 2012-10-17
- Statement:
- - Sid: BucketAccess
- Effect: Allow
- Action:
- - s3:PutObject
- Resource: !Sub "arn:aws:s3:::${MediaConvertDestinationBucket}/*"
- Environment:
- Variables:
- MEDIA_CONVERT_DESTINATION_BUCKET: !Ref MediaConvertDestinationBucket
- startTranscodeFunction:
- Type: AWS::Serverless::Function
- Properties:
- Runtime: nodejs20.x
- CodeUri: ./lambdas
- Handler: start-transcode.handler
- Description: Creates MediaConvert Job to transcode HLS stream
- Environment:
- Variables:
- MEDIA_CONVERT_ENDPOINT: !Ref MediaConvertEndpoint
- MEDIA_CONVERT_JOB_QUEUE_ARN: !Ref MediaConvertJobQueueArn
- MEDIA_CONVERT_ROLE_ARN: !Ref MediaConvertRoleArn
- Policies:
- - !Ref readSecretsPolicy
- - Version: 2012-10-17
- Statement:
- - Sid: PassMediaConvertRole
- Effect: Allow
- Action:
- - iam:PassRole
- Resource:
- - !Ref MediaConvertRoleArn
- - Sid: StartTranscodeJob
- Effect: Allow
- Action:
- - mediaconvert:CreateJob
- Resource: "*"
- transcodeStatusFunction:
- Type: AWS::Serverless::Function
- Properties:
- Runtime: nodejs20.x
- CodeUri: ./lambdas
- Handler: transcode-status.handler
- Description: Determines when transcode job has completed or errored
- Environment:
- Variables:
- MEDIA_CONVERT_ENDPOINT: !Ref MediaConvertEndpoint
- Policies:
- - !Ref readSecretsPolicy
- - Version: 2012-10-17
- Statement:
- - Sid: TranscodeJobStatus
- Effect: Allow
- Action:
- - mediaconvert:GetJob
- - mediaconvert:ListJobs
- Resource: "*"
- getDownloadLinkFunction:
- Type: AWS::Serverless::Function
- Properties:
- Runtime: nodejs20.x
- CodeUri: ./lambdas
- Handler: get-download-link.handler
- Description: Creates presigned url
- Policies:
- - !Ref readSecretsPolicy
- - Version: 2012-10-17
- Statement:
- - Sid: BucketAccess
- Effect: Allow
- Action:
- - s3:GetObject
- Resource: !Sub "arn:aws:s3:::${MediaConvertDestinationBucket}/*"
- sendTemplatedEmailFunction:
- Type: AWS::Serverless::Function
+ docs:
+ Type: AWS::Serverless::Application
+ Condition: DeployDocs
Properties:
- Runtime: nodejs20.x
- CodeUri: ./lambdas
- Handler: send-templated-email.handler
- Description: Sends email
- Policies:
- - !Ref readSecretsPolicy
- - Version: 2012-10-17
- Statement:
- - Sid: SESSendEmail
- Effect: Allow
- Action:
- - ses:SendTemplatedEmail
- Resource: "*"
-
+ Location: ./docs/template.yaml
+ Parameters:
+ CustomDomainHost: !Ref CustomDomainHost
+ CustomDomainZone: !Ref CustomDomainZone
+ RootApiID: !Ref rootApi
# API Gateway Resources
- dcApi:
+ # root API
+ rootApi:
Type: AWS::Serverless::HttpApi
Properties:
- StageName: v2
- StageVariables:
- basePath: api/v2
+ StageName: latest
Domain:
DomainName: !Sub "${CustomDomainHost}.${CustomDomainZone}"
- BasePath: api/v2
+ BasePath: ["/"]
CertificateArn: !Ref CustomDomainCertificateArn
Route53:
HostedZoneName: !Sub "${CustomDomainZone}."
-
- # root API
- rootApi:
- Type: AWS::Serverless::HttpApi
- Properties:
- StageName: latest
- rootRedirect:
- Type: AWS::Serverless::Function
- Properties:
- CodeUri: ./node/redirect
- Handler: index.handler
- Timeout: 1
- Description: Redirects to latest version of docs
- Environment:
- Variables:
- REDIRECT_TO: /docs/v2/index.html
- Events:
- RedirectApiGet:
- Type: HttpApi
- Properties:
- ApiId: !Ref rootApi
- Path: /
- Method: GET
- RedirectApiHead:
- Type: HttpApi
- Properties:
- ApiId: !Ref rootApi
- Path: /
- Method: HEAD
-
- # Documentation
- docsMapping:
- Type: AWS::ApiGatewayV2::ApiMapping
- Properties:
- DomainName: !Sub "${CustomDomainHost}.${CustomDomainZone}"
- ApiId: !Ref rootApi
- Stage: !Ref rootApilatestStage
- DependsOn: dcApi
- docsBucket:
- Type: AWS::S3::Bucket
- Properties:
- BucketName: !Sub "${CustomDomainHost}-docs.${CustomDomainZone}"
- PublicAccessBlockConfiguration:
- BlockPublicAcls: false
- BlockPublicPolicy: false
- IgnorePublicAcls: false
- RestrictPublicBuckets: false
- WebsiteConfiguration:
- IndexDocument: index.html
- ErrorDocument: index.html
- docsBucketPolicy:
- Type: AWS::S3::BucketPolicy
- Properties:
- PolicyDocument:
- Id: MyPolicy
- Version: 2012-10-17
- Statement:
- - Sid: PublicReadForGetBucketObjects
- Effect: Allow
- Principal: "*"
- Action: "s3:GetObject"
- Resource: !Sub "arn:aws:s3:::${docsBucket}/*"
- Bucket: !Ref docsBucket
- docsIntegration:
- Type: AWS::ApiGatewayV2::Integration
- Properties:
- ApiId: !Ref rootApi
- IntegrationMethod: GET
- IntegrationType: HTTP_PROXY
- IntegrationUri: !Sub "http://${docsBucket}.s3-website-us-east-1.amazonaws.com/{proxy}"
- PayloadFormatVersion: "1.0"
- docsRoute:
- Type: AWS::ApiGatewayV2::Route
- Properties:
- ApiId: !Ref rootApi
- AuthorizationType: NONE
- RouteKey: GET /docs/v2/{proxy+}
- Target: !Sub "integrations/${docsIntegration}"
Outputs:
Endpoint:
Description: "The base API endpoint for the stack"
- Value: !Sub "https://${CustomDomainHost}.${CustomDomainZone}/api/v2"
\ No newline at end of file
+ Value: !Sub "https://${CustomDomainHost}.${CustomDomainZone}/api/v2"
+ WebSocketURI:
+ Description: "The WebSocket URI for the chat application"
+ Value: !If [DeployChat, !GetAtt [chatWebsocket, Outputs.WebSocketURI], ""]