WIP 1
This commit is contained in:
parent
c095fa7042
commit
43bfdaa1e3
186 changed files with 276 additions and 37155 deletions
|
|
@ -1,78 +0,0 @@
|
|||
FROM gitpod/workspace-full
|
||||
|
||||
# install tools we need
|
||||
RUN set -ex; \
|
||||
pyenv global system; \
|
||||
sudo add-apt-repository ppa:ansible/ansible; \
|
||||
sudo add-apt-repository ppa:maxmind/ppa; \
|
||||
curl -s https://helm.baltorepo.com/organization/signing.asc | sudo apt-key add - ; \
|
||||
curl -L "https://packages.gitlab.com/install/repositories/runner/gitlab-runner/script.deb.sh" | sudo bash; \
|
||||
echo "deb https://baltocdn.com/helm/stable/debian/ all main" | sudo tee /etc/apt/sources.list.d/helm-stable-debian.list ; \
|
||||
sudo apt-get update; \
|
||||
sudo apt-get -y upgrade ; \
|
||||
sudo apt-get install -y \
|
||||
ansible \
|
||||
build-essential \
|
||||
httpie \
|
||||
fd-find \
|
||||
ffmpeg \
|
||||
geoipupdate \
|
||||
gitlab-runner \
|
||||
helm \
|
||||
htop \
|
||||
iotop \
|
||||
iptraf \
|
||||
jq \
|
||||
kitty-terminfo \
|
||||
libolm-dev \
|
||||
ncdu \
|
||||
postgresql \
|
||||
pwgen \
|
||||
python3-wheel \
|
||||
ripgrep \
|
||||
rsync \
|
||||
scdaemon \
|
||||
socat \
|
||||
tmux \
|
||||
unrar \
|
||||
unzip \
|
||||
vifm \
|
||||
vim \
|
||||
yamllint \
|
||||
zsh \
|
||||
zsh-syntax-highlighting \
|
||||
; sudo rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN set -ex; \
|
||||
brew install \
|
||||
zoxide \
|
||||
fzf;
|
||||
|
||||
# needed for tailscale
|
||||
RUN sudo update-alternatives --set ip6tables /usr/sbin/ip6tables-nft
|
||||
|
||||
# install npm global packages we need
|
||||
RUN set -ex; \
|
||||
npm install -g \
|
||||
standard-version \
|
||||
turbo \
|
||||
;
|
||||
|
||||
# make a place for all our warez
|
||||
RUN sudo mkdir -p /usr/local/bin
|
||||
|
||||
# install AWS' kubectl
|
||||
# from https://docs.aws.amazon.com/eks/latest/userguide/install-kubectl.html
|
||||
ARG KUBECTL_URL="https://amazon-eks.s3.us-west-2.amazonaws.com/1.21.2/2021-07-05/bin/linux/amd64/kubectl"
|
||||
RUN set -ex; \
|
||||
curl -o kubectl "${KUBECTL_URL}"; \
|
||||
chmod +x kubectl; \
|
||||
sudo mv kubectl /usr/local/bin
|
||||
|
||||
# install cloudflared
|
||||
# from https://github.com/cloudflare/cloudflared/releases
|
||||
ARG CLOUDFLARED_VERSION="2023.2.1"
|
||||
RUN set -ex; \
|
||||
wget --progress=dot:mega https://github.com/cloudflare/cloudflared/releases/download/${CLOUDFLARED_VERSION}/cloudflared-linux-amd64.deb; \
|
||||
sudo dpkg -i cloudflared-linux-amd64.deb; \
|
||||
cloudflared --version
|
||||
63
.gitpod.yml
63
.gitpod.yml
|
|
@ -1,63 +0,0 @@
|
|||
---
|
||||
# build the docker image for our gitpod from this dockerfile
|
||||
image:
|
||||
file: .gitpod.dockerfile
|
||||
# all init+before are run in prebuilds, and on workspace startup
|
||||
tasks:
|
||||
- name: npm install
|
||||
init: |
|
||||
npm install
|
||||
# extra extensions we share
|
||||
vscode:
|
||||
extensions:
|
||||
- redhat.vscode-yaml
|
||||
- ms-azuretools.vscode-docker
|
||||
- ms-kubernetes-tools.vscode-kubernetes-tools
|
||||
- ms-vscode.makefile-tools
|
||||
- bungcip.better-toml
|
||||
- sleistner.vscode-fileutils
|
||||
- esbenp.prettier-vscode
|
||||
- darkriszty.markdown-table-prettify
|
||||
- VisualStudioExptTeam.vscodeintellicode
|
||||
|
||||
ports:
|
||||
- name: Zammad
|
||||
port: 8001
|
||||
onOpen: notify
|
||||
|
||||
- name: Leafcutter Local
|
||||
port: 3001
|
||||
onOpen: notify
|
||||
|
||||
- name: Leafcutter
|
||||
port: 8004
|
||||
onOpen: notify
|
||||
|
||||
- name: Link
|
||||
port: 8003
|
||||
onOpen: notify
|
||||
|
||||
- name: Link Local
|
||||
port: 3000
|
||||
onOpen: notify
|
||||
|
||||
|
||||
- name: Metamigo
|
||||
port: 8002
|
||||
onOpen: notify
|
||||
|
||||
- name: Metamigo Local
|
||||
port: 2999
|
||||
onOpen: notify
|
||||
|
||||
- name: Metamigo API
|
||||
port: 8004
|
||||
onOpen: notify
|
||||
|
||||
- name: Zammad Postgres
|
||||
port: 5432
|
||||
onOpen: notify
|
||||
|
||||
- name: Metamigo Postgres
|
||||
port: 5433
|
||||
onOpen: notify
|
||||
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
|
|
@ -1,3 +0,0 @@
|
|||
{
|
||||
"prettier.prettierPath": ""
|
||||
}
|
||||
83
Makefile
83
Makefile
|
|
@ -1,83 +0,0 @@
|
|||
CURRENT_UID := $(shell id -u):$(shell id -g)
|
||||
PACKAGE_NAME ?= $(shell jq -r '.name' package.json)
|
||||
PACKAGE_VERSION?= $(shell jq -r '.version' package.json)
|
||||
BUILD_DATE ?=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
DOCKER_ARGS ?=
|
||||
DOCKER_NS ?= registry.gitlab.com/digiresilience/link/${PACKAGE_NAME}
|
||||
DOCKER_TAG ?= test
|
||||
DOCKER_BUILD := docker build ${DOCKER_ARGS} --build-arg BUILD_DATE=${BUILD_DATE}
|
||||
DOCKER_BUILD_FRESH := ${DOCKER_BUILD} --pull --no-cache
|
||||
DOCKER_BUILD_ARGS := --build-arg VCS_REF=${CI_COMMIT_SHORT_SHA}
|
||||
DOCKER_PUSH := docker push
|
||||
DOCKER_BUILD_TAG := ${DOCKER_NS}:${DOCKER_TAG}
|
||||
|
||||
.PHONY: .npmrc
|
||||
.EXPORT_ALL_VARIABLES:
|
||||
|
||||
.npmrc:
|
||||
ifdef CI_JOB_TOKEN
|
||||
echo '@guardianproject-ops:registry=https://gitlab.com/api/v4/packages/npm/' > .npmrc
|
||||
echo '@digiresilience:registry=https://gitlab.com/api/v4/packages/npm/' >> .npmrc
|
||||
echo '//gitlab.com/api/v4/packages/npm/:_authToken=${CI_JOB_TOKEN}' >> .npmrc
|
||||
echo '//gitlab.com/api/v4/projects/:_authToken=${CI_JOB_TOKEN}' >> .npmrc
|
||||
echo '//gitlab.com/api/v4/projects/${CI_PROJECT_ID}/packages/npm/:_authToken=${CI_JOB_TOKEN}' >> .npmrc
|
||||
endif
|
||||
|
||||
docker/build: .npmrc
|
||||
DOCKER_BUILDKIT=1 ${DOCKER_BUILD} ${DOCKER_BUILD_ARGS} -t ${DOCKER_BUILD_TAG} ${PWD}
|
||||
|
||||
docker/build-fresh: .npmrc
|
||||
DOCKER_BUILDKIT=1 ${DOCKER_BUILD_FRESH} ${DOCKER_BUILD_ARGS} -t ${DOCKER_BUILD_TAG} ${PWD}
|
||||
|
||||
docker/add-tag:
|
||||
docker pull ${DOCKER_NS}:${DOCKER_TAG}
|
||||
docker tag ${DOCKER_NS}:${DOCKER_TAG} ${DOCKER_NS}:${DOCKER_TAG_NEW}
|
||||
docker push ${DOCKER_NS}:${DOCKER_TAG_NEW}
|
||||
|
||||
docker/push:
|
||||
${DOCKER_PUSH} ${DOCKER_BUILD_TAG}
|
||||
|
||||
docker/build-push: docker/build docker/push
|
||||
docker/build-fresh-push: docker/build-fresh docker/push
|
||||
|
||||
# don't use this to generate passwords for production
|
||||
generate-secrets:
|
||||
ZAMMAD_DATABASE_PASSWORD=$(shell openssl rand -hex 16)
|
||||
METAMIGO_DATABASE_ROOT_PASSWORD=$(shell openssl rand -hex 16)
|
||||
METAMIGO_DATABASE_PASSWORD=$(shell openssl rand -hex 16)
|
||||
METAMIGO_DATABASE_AUTHENTICATOR_PASSWORD=$(shell openssl rand -hex 16)
|
||||
NEXTAUTH_AUDIENCE=$(shell openssl rand -hex 16)
|
||||
NEXTAUTH_SECRET=$(shell openssl rand -hex 16)
|
||||
|
||||
generate-keys:
|
||||
docker exec -i $(shell docker ps -aqf "name=metamigo-frontend") bash -c "/opt/metamigo/cli gen-jwks"
|
||||
|
||||
setup-signal:
|
||||
mkdir -p signald
|
||||
|
||||
create-admin-user:
|
||||
docker exec -i $(shell docker ps -aqf "name=metamigo-postgresql") bash < ./scripts/create-admin-user.sh
|
||||
|
||||
|
||||
.env:
|
||||
@test -f .env || echo "You must create .env please refer to the README" && exit 1
|
||||
|
||||
start: .env
|
||||
CURRENT_UID=$(CURRENT_UID) docker compose -f docker-compose.link.yml up -d
|
||||
|
||||
start-dev: .env
|
||||
CURRENT_UID=$(CURRENT_UID) docker compose up --build -d
|
||||
|
||||
restart: .env
|
||||
CURRENT_UID=$(CURRENT_UID) docker restart $(shell docker ps -a -q)
|
||||
|
||||
stop:
|
||||
CURRENT_UID=$(CURRENT_UID) docker compose down
|
||||
|
||||
destroy:
|
||||
docker compose down
|
||||
docker volume prune
|
||||
|
||||
|
||||
dev-metamigo:
|
||||
CURRENT_UID=$(CURRENT_UID) docker compose up -d metamigo-postgresql signald
|
||||
|
|
@ -17,24 +17,24 @@
|
|||
"@emotion/react": "^11.11.4",
|
||||
"@emotion/server": "^11.11.0",
|
||||
"@emotion/styled": "^11.11.0",
|
||||
"@fontsource/playfair-display": "^5.0.21",
|
||||
"@fontsource/playfair-display": "^5.0.23",
|
||||
"@fontsource/poppins": "^5.0.12",
|
||||
"@fontsource/roboto": "^5.0.12",
|
||||
"@mui/icons-material": "^5",
|
||||
"@mui/lab": "^5.0.0-alpha.167",
|
||||
"@mui/lab": "^5.0.0-alpha.168",
|
||||
"@mui/material": "^5",
|
||||
"@mui/x-data-grid-pro": "^6.19.6",
|
||||
"@mui/x-date-pickers-pro": "^6.19.6",
|
||||
"@opensearch-project/opensearch": "^2.5.0",
|
||||
"@mui/x-date-pickers-pro": "^6.19.7",
|
||||
"@opensearch-project/opensearch": "^2.6.0",
|
||||
"cryptr": "^6.3.0",
|
||||
"date-fns": "^3.3.1",
|
||||
"date-fns": "^3.5.0",
|
||||
"http-proxy-middleware": "^2.0.6",
|
||||
"leafcutter-common": "*",
|
||||
"material-ui-popup-state": "^5.0.10",
|
||||
"next": "14.1.2",
|
||||
"next-auth": "^4.24.6",
|
||||
"next": "14.1.3",
|
||||
"next-auth": "^4.24.7",
|
||||
"next-http-proxy-middleware": "^1.2.6",
|
||||
"nodemailer": "^6.9.11",
|
||||
"nodemailer": "^6.9.12",
|
||||
"react": "18.2.0",
|
||||
"react-cookie": "^7.1.0",
|
||||
"react-cookie-consent": "^9.0.0",
|
||||
|
|
@ -49,18 +49,18 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.24.0",
|
||||
"@types/node": "^20.11.24",
|
||||
"@types/react": "18.2.63",
|
||||
"@types/node": "^20.11.28",
|
||||
"@types/react": "18.2.66",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"babel-loader": "^9.1.3",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-airbnb": "^19.0.4",
|
||||
"eslint-config-next": "^14.1.2",
|
||||
"eslint-config-next": "^14.1.3",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-import": "^2.29.1",
|
||||
"eslint-plugin-jsx-a11y": "^6.8.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-react": "^7.34.0",
|
||||
"typescript": "5.3.3"
|
||||
"eslint-plugin-react": "^7.34.1",
|
||||
"typescript": "5.4.2"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,5 +6,10 @@ export const metadata: Metadata = {
|
|||
};
|
||||
|
||||
export default function Page() {
|
||||
return <Home />;
|
||||
return (
|
||||
<iframe src="/opensearch/app/dashboards?security_tenant=global#/view/722b74f0-b882-11e8-a6d9-e546fe2bba5f?embed=true&_g=(filters%3A!()%2CrefreshInterval%3A(pause%3A!f%2Cvalue%3A900000)%2Ctime%3A(from%3Anow-7d%2Cto%3Anow))&hide-filter-bar=true"
|
||||
height="1000"
|
||||
width="1200"
|
||||
></iframe>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
66
apps/link/app/api/proxy/[...path]/route.ts
Normal file
66
apps/link/app/api/proxy/[...path]/route.ts
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
import { createProxyMiddleware } from "http-proxy-middleware";
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { getToken } from "next-auth/jwt";
|
||||
|
||||
/*
|
||||
|
||||
if (validDomains.includes(domain)) {
|
||||
res.headers.set("Access-Control-Allow-Origin", origin);
|
||||
res.headers.set("Access-Control-Allow-Methods", "GET, POST, OPTIONS");
|
||||
res.headers.set("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
|
||||
}
|
||||
|
||||
|
||||
*/
|
||||
|
||||
const withAuthInfo =
|
||||
(handler: any) => async (req: NextApiRequest, res: NextApiResponse) => {
|
||||
const session: any = await getToken({
|
||||
req,
|
||||
secret: process.env.NEXTAUTH_SECRET,
|
||||
});
|
||||
let email = session?.email?.toLowerCase();
|
||||
|
||||
const requestSignature = req.query.signature;
|
||||
const url = new URL(req.headers.referer as string);
|
||||
const referrerSignature = url.searchParams.get("signature");
|
||||
|
||||
console.log({ requestSignature, referrerSignature });
|
||||
const isAppPath = !!req.url?.startsWith("/app");
|
||||
const isResourcePath = !!req.url?.match(/\/(api|app|bootstrap|3961|ui|translations|internal|login|node_modules)/);
|
||||
|
||||
if (requestSignature && isAppPath) {
|
||||
console.log("Has Signature");
|
||||
}
|
||||
|
||||
if (referrerSignature && isResourcePath) {
|
||||
console.log("Has Signature");
|
||||
}
|
||||
|
||||
if (!email) {
|
||||
return res.status(401).json({ error: "Not authorized" });
|
||||
}
|
||||
|
||||
req.headers["x-proxy-user"] = email;
|
||||
req.headers["x-proxy-roles"] = "leafcutter_user";
|
||||
const auth = `${email}:${process.env.OPENSEARCH_USER_PASSWORD}`;
|
||||
const buff = Buffer.from(auth);
|
||||
const base64data = buff.toString("base64");
|
||||
req.headers.Authorization = `Basic ${base64data}`;
|
||||
return handler(req, res);
|
||||
};
|
||||
|
||||
const proxy = createProxyMiddleware({
|
||||
target: process.env.OPENSEARCH_DASHBOARDS_URL,
|
||||
changeOrigin: true,
|
||||
xfwd: true,
|
||||
});
|
||||
|
||||
export default withAuthInfo(proxy);
|
||||
|
||||
export const config = {
|
||||
api: {
|
||||
bodyParser: false,
|
||||
externalResolver: true,
|
||||
},
|
||||
};
|
||||
|
|
@ -24,12 +24,37 @@ const rewriteURL = (request: NextRequestWithAuth, originBaseURL: string, destina
|
|||
const checkRewrites = async (request: NextRequestWithAuth) => {
|
||||
const linkBaseURL = process.env.LINK_URL ?? "http://localhost:3000";
|
||||
const zammadURL = process.env.ZAMMAD_URL ?? "http://zammad-nginx:8080";
|
||||
const opensearchURL = process.env.OPENSEARCH_URL ?? "http://macmini:5601";
|
||||
const metamigoURL = process.env.METAMIGO_URL ?? "http://metamigo-api:3000";
|
||||
const labelStudioURL = process.env.LABEL_STUDIO_URL ?? "http://label-studio:8080";
|
||||
const { token } = request.nextauth;
|
||||
const headers = { 'X-Forwarded-User': token?.email?.toLowerCase() };
|
||||
console.log ({ pathname: request.nextUrl.pathname});
|
||||
|
||||
if (request.nextUrl.pathname.startsWith('/metamigo')) {
|
||||
if (request.nextUrl.pathname.startsWith('/api/v1/configuration/account') ||
|
||||
request.nextUrl.pathname.startsWith('/api/v1/restapiinfo') ||
|
||||
request.nextUrl.pathname.startsWith('/api/v1/auth') ||
|
||||
request.nextUrl.pathname.startsWith('/api/core') ||
|
||||
request.nextUrl.pathname.startsWith('/api/dataconnections') ||
|
||||
request.nextUrl.pathname.startsWith('/api/v1/multitenancy') ||
|
||||
request.nextUrl.pathname.startsWith('/api/ism') ||
|
||||
request.nextUrl.pathname.startsWith('/node_modules') ||
|
||||
request.nextUrl.pathname.startsWith('/translations') || request.nextUrl.pathname.startsWith('/6867') || request.nextUrl.pathname.startsWith('/ui') || request.nextUrl.pathname.startsWith('/bootstrap')) {
|
||||
const headers = {
|
||||
'x-proxy-user': "admin",
|
||||
'x-proxy-roles': "all_access",
|
||||
// 'X-Forwarded-For': "link"
|
||||
};
|
||||
return rewriteURL(request, `${linkBaseURL}`, opensearchURL, headers);
|
||||
}
|
||||
else if (request.nextUrl.pathname.startsWith('/opensearch')) {
|
||||
const headers = {
|
||||
'x-proxy-user': "admin",
|
||||
'x-proxy-roles': "all_access",
|
||||
// 'X-Forwarded-For': "link"
|
||||
};
|
||||
return rewriteURL(request, `${linkBaseURL}/opensearch`, opensearchURL, headers);
|
||||
}else if (request.nextUrl.pathname.startsWith('/metamigo')) {
|
||||
return rewriteURL(request, `${linkBaseURL}/metamigo`, metamigoURL);
|
||||
} else if (request.nextUrl.pathname.startsWith('/label-studio')) {
|
||||
return rewriteURL(request, `${linkBaseURL}/label-studio`, labelStudioURL);
|
||||
|
|
@ -83,4 +108,3 @@ export const config = {
|
|||
'/((?!ws|wss|_next/static|_next/image|favicon.ico).*)',
|
||||
],
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -15,29 +15,29 @@
|
|||
"@emotion/react": "^11.11.4",
|
||||
"@emotion/server": "^11.11.0",
|
||||
"@emotion/styled": "^11.11.0",
|
||||
"@fontsource/playfair-display": "^5.0.21",
|
||||
"@fontsource/playfair-display": "^5.0.23",
|
||||
"@fontsource/poppins": "^5.0.12",
|
||||
"@fontsource/roboto": "^5.0.12",
|
||||
"@mui/icons-material": "^5",
|
||||
"@mui/lab": "^5.0.0-alpha.167",
|
||||
"@mui/lab": "^5.0.0-alpha.168",
|
||||
"@mui/material": "^5",
|
||||
"@mui/x-data-grid-pro": "^6.19.6",
|
||||
"@mui/x-date-pickers-pro": "^6.19.6",
|
||||
"@mui/x-date-pickers-pro": "^6.19.7",
|
||||
"cryptr": "^6.3.0",
|
||||
"date-fns": "^3.3.1",
|
||||
"date-fns": "^3.5.0",
|
||||
"graphql-request": "^6.1.0",
|
||||
"leafcutter-common": "*",
|
||||
"material-ui-popup-state": "^5.0.10",
|
||||
"mui-chips-input": "^2.1.4",
|
||||
"next": "14.1.2",
|
||||
"next-auth": "^4.24.6",
|
||||
"next": "14.1.3",
|
||||
"next-auth": "^4.24.7",
|
||||
"ra-data-graphql": "^4.16.12",
|
||||
"ra-i18n-polyglot": "^4.16.12",
|
||||
"ra-input-rich-text": "^4.16.12",
|
||||
"ra-input-rich-text": "^4.16.13",
|
||||
"ra-language-english": "^4.16.12",
|
||||
"ra-postgraphile": "^6.1.2",
|
||||
"react": "18.2.0",
|
||||
"react-admin": "^4.16.12",
|
||||
"react-admin": "^4.16.13",
|
||||
"react-cookie": "^7.1.0",
|
||||
"react-digit-input": "^2.1.0",
|
||||
"react-dom": "18.2.0",
|
||||
|
|
@ -52,18 +52,18 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.24.0",
|
||||
"@types/node": "^20.11.24",
|
||||
"@types/react": "18.2.63",
|
||||
"@types/node": "^20.11.28",
|
||||
"@types/react": "18.2.66",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"babel-loader": "^9.1.3",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-airbnb": "^19.0.4",
|
||||
"eslint-config-next": "^14.1.2",
|
||||
"eslint-config-next": "^14.1.3",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-import": "^2.29.1",
|
||||
"eslint-plugin-jsx-a11y": "^6.8.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-react": "^7.34.0",
|
||||
"typescript": "5.3.3"
|
||||
"eslint-plugin-react": "^7.34.1",
|
||||
"typescript": "5.4.2"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
.git
|
||||
.idea
|
||||
**/node_modules
|
||||
!/node_modules
|
||||
**/build
|
||||
**/dist
|
||||
**/tmp
|
||||
**/.env*
|
||||
**/coverage
|
||||
**/.next
|
||||
**/amigo.*.json
|
||||
**/cypress/videos
|
||||
**/cypress/screenshots
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
require("eslint-config-link/patch/modern-module-resolution");
|
||||
module.exports = {
|
||||
extends: [
|
||||
"eslint-config-link/profile/node",
|
||||
"eslint-config-link/profile/typescript",
|
||||
"eslint-config-link/profile/jest",
|
||||
],
|
||||
parserOptions: { tsconfigRootDir: __dirname },
|
||||
rules: {
|
||||
"new-cap": "off"
|
||||
},
|
||||
};
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"presets": [
|
||||
"babel-preset-link"
|
||||
]
|
||||
}
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
{
|
||||
"preset": "jest-config-link",
|
||||
"setupFiles": ["<rootDir>/src/setup.test.ts"]
|
||||
}
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
{
|
||||
"name": "@digiresilience/metamigo-api",
|
||||
"version": "0.2.0",
|
||||
"type": "module",
|
||||
"main": "build/main/main.js",
|
||||
"author": "Abel Luck <abel@guardianproject.info>",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@adiwajshing/keyed-db": "0.2.4",
|
||||
"@digiresilience/hapi-nextauth": "*",
|
||||
"@digiresilience/hapi-pg-promise": "*",
|
||||
"@digiresilience/metamigo-common": "*",
|
||||
"@digiresilience/metamigo-config": "*",
|
||||
"@digiresilience/metamigo-db": "*",
|
||||
"@digiresilience/montar": "*",
|
||||
"@digiresilience/node-signald": "*",
|
||||
"@graphile-contrib/pg-simplify-inflector": "^6.1.0",
|
||||
"@hapi/basic": "^7.0.2",
|
||||
"@hapi/boom": "^10.0.1",
|
||||
"@hapi/vision": "^7.0.3",
|
||||
"@hapi/wreck": "^18.0.1",
|
||||
"@hapipal/schmervice": "^3.0.0",
|
||||
"@hapipal/toys": "^4.0.0",
|
||||
"blipp": "^4.0.2",
|
||||
"camelcase-keys": "^9.1.3",
|
||||
"expiry-map": "^2.0.0",
|
||||
"fluent-ffmpeg": "^2.1.2",
|
||||
"graphile-migrate": "^1.4.1",
|
||||
"graphile-worker": "^0.13.0",
|
||||
"hapi-auth-bearer-token": "^8.0.0",
|
||||
"hapi-auth-jwt2": "^10.5.1",
|
||||
"hapi-swagger": "^17.2.1",
|
||||
"joi": "^17.12.2",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jwks-rsa": "^3.1.0",
|
||||
"long": "^5.2.3",
|
||||
"p-memoize": "^7.1.1",
|
||||
"pg": "^8.11.3",
|
||||
"pg-monitor": "^2.0.0",
|
||||
"pg-promise": "^11.5.4",
|
||||
"postgraphile": "4.12.3",
|
||||
"postgraphile-plugin-connection-filter": "^2.3.0",
|
||||
"remeda": "^1.46.2",
|
||||
"twilio": "^4.23.0",
|
||||
"typeorm": "^0.3.20",
|
||||
"@whiskeysockets/baileys": "^6.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/long": "^4.0.2",
|
||||
"@types/node": "*",
|
||||
"babel-preset-link": "*",
|
||||
"camelcase-keys": "^9.1.3",
|
||||
"eslint-config-link": "*",
|
||||
"jest-config-link": "*",
|
||||
"nodemon": "^3.1.0",
|
||||
"pg-monitor": "^2.0.0",
|
||||
"pino-pretty": "^10.3.1",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsc-watch": "^6.0.4",
|
||||
"tsconfig-link": "*",
|
||||
"typedoc": "^0.25.11",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"nodemonConfig": {
|
||||
"ignore": [
|
||||
"docs/*"
|
||||
],
|
||||
"ext": "ts,json,js"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc -p tsconfig.json",
|
||||
"test": "JEST_CIRCUS=1 jest --coverage --forceExit --detectOpenHandles --reporters=default --reporters=jest-junit",
|
||||
"fmt": "prettier \"src/**/*.ts\" --write",
|
||||
"lint": "eslint src --ext .ts",
|
||||
"lint-fmt": "prettier \"src/**/*.ts\" --list-different",
|
||||
"fix:lint": "eslint src --ext .ts --fix",
|
||||
"cli": "NODE_ENV=development nodemon --unhandled-rejections=strict build/main/cli/index.js",
|
||||
"serve": "NODE_ENV=development npm run cli server",
|
||||
"serve:prod": "NODE_ENV=production npm run cli server",
|
||||
"worker": "NODE_ENV=development npm run cli worker",
|
||||
"worker:prod": "NODE_ENV=production npm run cli worker",
|
||||
"watch:build": "tsc -p tsconfig.json -w",
|
||||
"dev": "tsc-watch --build --noClear --onSuccess \"node ./build/main/main.js\""
|
||||
}
|
||||
}
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
import type * as Hapi from "@hapi/hapi";
|
||||
import Joi from "joi";
|
||||
import type { IAppConfig } from "../config.js";
|
||||
import * as Services from "./services/index.js";
|
||||
import * as Routes from "./routes/index.js";
|
||||
import * as Plugins from "./plugins/index.js";
|
||||
|
||||
const AppPlugin = {
|
||||
name: "App",
|
||||
async register(
|
||||
server: Hapi.Server,
|
||||
options: { config: IAppConfig },
|
||||
): Promise<void> {
|
||||
// declare our **run-time** plugin dependencies
|
||||
// these are runtime only deps, not registration time
|
||||
// ref: https://hapipal.com/best-practices/handling-plugin-dependencies
|
||||
server.dependency(["config", "hapi-pino"]);
|
||||
|
||||
server.validator(Joi as any);
|
||||
await Plugins.register(server, options.config);
|
||||
await Services.register(server);
|
||||
await Routes.register(server);
|
||||
},
|
||||
};
|
||||
|
||||
export default AppPlugin;
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
import type * as Hapi from "@hapi/hapi";
|
||||
import AuthBearer from "hapi-auth-bearer-token";
|
||||
import { IAppConfig } from "@digiresilience/metamigo-config";
|
||||
import { IMetamigoRepositories } from "@digiresilience/metamigo-common";
|
||||
|
||||
export const registerAuthBearer = async (
|
||||
server: Hapi.Server,
|
||||
config: IAppConfig
|
||||
): Promise<void> => {
|
||||
await server.register(AuthBearer);
|
||||
|
||||
server.auth.strategy("session-id-bearer-token", "bearer-access-token", {
|
||||
allowQueryToken: false,
|
||||
validate: async (
|
||||
request: Hapi.Request,
|
||||
token: string,
|
||||
h: Hapi.ResponseToolkit
|
||||
) => {
|
||||
const repos = request.db() as IMetamigoRepositories;
|
||||
const session = await repos.sessions.findBy({ sessionToken: token });
|
||||
const isValid = !!session;
|
||||
if (!isValid) return { isValid, credentials: {} };
|
||||
const user = await repos.users.findById({ id: session.userId });
|
||||
const credentials = { sessionToken: token, user };
|
||||
return { isValid, credentials };
|
||||
},
|
||||
});
|
||||
};
|
||||
|
|
@ -1,125 +0,0 @@
|
|||
import * as Boom from "@hapi/boom";
|
||||
import * as Hoek from "@hapi/hoek";
|
||||
import * as Hapi from "@hapi/hapi";
|
||||
import { promisify } from "node:util";
|
||||
import jwt from "jsonwebtoken";
|
||||
import jwksClient, { hapiJwt2KeyAsync } from "jwks-rsa";
|
||||
import type { IAppConfig } from "../../config";
|
||||
|
||||
const CF_JWT_HEADER_NAME = "cf-access-jwt-assertion";
|
||||
const CF_JWT_ALGOS = ["RS256"];
|
||||
|
||||
type VerifyFn = (token: string) => Promise<void>;
|
||||
|
||||
const verifyToken = (settings) => {
|
||||
const { audience, issuer } = settings;
|
||||
const client = jwksClient({
|
||||
jwksUri: `${issuer}/cdn-cgi/access/certs`,
|
||||
});
|
||||
|
||||
return async (token: string) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const getKey = (header: any, callback: any) => {
|
||||
client.getSigningKey(header.kid, (err, key) => {
|
||||
if (err)
|
||||
throw Boom.serverUnavailable(
|
||||
"failed to fetch cloudflare access jwks"
|
||||
);
|
||||
callback(undefined, key?.getPublicKey());
|
||||
});
|
||||
};
|
||||
|
||||
const opts = {
|
||||
algorithms: CF_JWT_ALGOS,
|
||||
audience,
|
||||
issuer,
|
||||
};
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
return (promisify(jwt.verify) as any)(token, getKey, opts);
|
||||
};
|
||||
};
|
||||
|
||||
const handleCfJwt =
|
||||
(verify: VerifyFn) =>
|
||||
async (request: Hapi.Request, h: Hapi.ResponseToolkit) => {
|
||||
const token = request.headers[CF_JWT_HEADER_NAME];
|
||||
if (token) {
|
||||
try {
|
||||
await verify(token);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return Boom.unauthorized("invalid cloudflare access token");
|
||||
}
|
||||
}
|
||||
|
||||
return h.continue;
|
||||
};
|
||||
|
||||
const defaultOpts = {
|
||||
issuer: undefined,
|
||||
audience: undefined,
|
||||
strategyName: "clouflareaccess",
|
||||
validate: undefined,
|
||||
};
|
||||
|
||||
const cfJwtRegister = async (
|
||||
server: Hapi.Server,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
options: any
|
||||
): Promise<void> => {
|
||||
server.dependency(["hapi-auth-jwt2"]);
|
||||
const settings = Hoek.applyToDefaults(defaultOpts, options);
|
||||
const verify = verifyToken(settings);
|
||||
|
||||
const { validate, strategyName, audience, issuer } = settings;
|
||||
server.ext("onPreAuth", handleCfJwt(verify));
|
||||
|
||||
if (!strategyName) {
|
||||
throw new Error("Missing strategyName for cloudflare-jwt hapi plugin!");
|
||||
}
|
||||
|
||||
server.auth.strategy(strategyName, "jwt", {
|
||||
key: hapiJwt2KeyAsync({
|
||||
jwksUri: `${issuer}/cdn-cgi/access/certs`,
|
||||
}),
|
||||
cookieKey: false,
|
||||
urlKey: false,
|
||||
headerKey: CF_JWT_HEADER_NAME,
|
||||
validate,
|
||||
verifyOptions: {
|
||||
audience,
|
||||
issuer,
|
||||
algorithms: ["RS256"],
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const registerCloudflareAccessJwt = async (
|
||||
server: Hapi.Server,
|
||||
config: IAppConfig
|
||||
): Promise<void> => {
|
||||
const { audience, domain } = config.cfaccess;
|
||||
// only enable this plugin if cloudflare access config is configured
|
||||
if (audience && domain) {
|
||||
server.log(["auth"], "cloudflare access authorization enabled");
|
||||
await server.register({
|
||||
plugin: {
|
||||
name: "cloudflare-jwt",
|
||||
version: "0.0.1",
|
||||
register: cfJwtRegister,
|
||||
},
|
||||
options: {
|
||||
issuer: `https://${domain}`,
|
||||
audience,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
validate(decoded: any, _request: any) {
|
||||
const { email, name } = decoded;
|
||||
return {
|
||||
isValid: true,
|
||||
credentials: { user: { email, name } },
|
||||
};
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
import type * as Hapi from "@hapi/hapi";
|
||||
import NextAuthPlugin from "@digiresilience/hapi-nextauth";
|
||||
import { NextAuthAdapter } from "@digiresilience/metamigo-common";
|
||||
import { IAppConfig } from "@digiresilience/metamigo-config";
|
||||
|
||||
export const registerNextAuth = async (
|
||||
server: Hapi.Server,
|
||||
config: IAppConfig
|
||||
): Promise<void> => {
|
||||
const nextAuthAdapterFactory: any = (request: Hapi.Request) =>
|
||||
new NextAuthAdapter(request.db());
|
||||
|
||||
await server.register({
|
||||
plugin: NextAuthPlugin,
|
||||
options: {
|
||||
nextAuthAdapterFactory,
|
||||
sharedSecret: config.nextAuth.secret,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
|
@ -1,71 +0,0 @@
|
|||
import type * as Hapi from "@hapi/hapi";
|
||||
import { IAppConfig } from "@digiresilience/metamigo-config";
|
||||
import { postgraphile, HttpRequestHandler } from "postgraphile";
|
||||
import { getPostGraphileOptions } from "@digiresilience/metamigo-db";
|
||||
|
||||
export interface HapiPostgraphileOptions { }
|
||||
|
||||
const PostgraphilePlugin: Hapi.Plugin<HapiPostgraphileOptions> = {
|
||||
name: "postgraphilePlugin",
|
||||
version: "1.0.0",
|
||||
register: async function (server, options: HapiPostgraphileOptions) {
|
||||
const config = server.config();
|
||||
const postgraphileMiddleware: HttpRequestHandler = postgraphile(
|
||||
config.postgraphile.authConnection,
|
||||
"app_public",
|
||||
{
|
||||
...getPostGraphileOptions(),
|
||||
jwtSecret: "",
|
||||
pgSettings: async (req) => {
|
||||
const auth = (req as any).hapiAuth;
|
||||
if (auth.isAuthenticated && auth.credentials.user.userRole) {
|
||||
return {
|
||||
role: `app_${auth.credentials.user.userRole}`,
|
||||
"jwt.claims.session_id": auth.credentials.sessionToken,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
role: "app_anonymous",
|
||||
};
|
||||
}
|
||||
},
|
||||
} as any
|
||||
);
|
||||
|
||||
server.route({
|
||||
method: ["POST"],
|
||||
path: "/graphql",
|
||||
options: {
|
||||
auth: "session-id-bearer-token",
|
||||
payload: {
|
||||
parse: false, // this disables payload parsing
|
||||
output: "stream", // ensures the payload is a readable stream which postgraphile expects
|
||||
},
|
||||
},
|
||||
handler: (request: Hapi.Request, h: Hapi.ResponseToolkit) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const rawReq = request.raw.req as any;
|
||||
rawReq.hapiAuth = request.auth;
|
||||
postgraphileMiddleware(rawReq, request.raw.res, (error) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
// PostGraphile responds directly to the request
|
||||
resolve(h.abandon);
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
export const registerPostgraphile = async (
|
||||
server: Hapi.Server,
|
||||
config: IAppConfig
|
||||
): Promise<void> => {
|
||||
await server.register({
|
||||
plugin: PostgraphilePlugin,
|
||||
options: {},
|
||||
});
|
||||
};
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
import type * as Hapi from "@hapi/hapi";
|
||||
import type { IInitOptions } from "pg-promise";
|
||||
import Schmervice from "@hapipal/schmervice";
|
||||
import { makePlugin } from "@digiresilience/hapi-pg-promise";
|
||||
|
||||
import type { IAppConfig } from "../../config";
|
||||
import { dbInitOptions, IRepositories } from "@digiresilience/metamigo-db";
|
||||
import { registerNextAuth } from "./hapi-nextauth.js";
|
||||
import { registerSwagger } from "./swagger.js";
|
||||
import { registerCloudflareAccessJwt } from "./cloudflare-jwt.js";
|
||||
import { registerAuthBearer } from "./auth-bearer.js";
|
||||
import pg from "pg-promise/typescript/pg-subset";
|
||||
|
||||
import { registerPostgraphile } from "./hapi-postgraphile.js";
|
||||
|
||||
export const register = async (
|
||||
server: Hapi.Server,
|
||||
config: IAppConfig
|
||||
): Promise<void> => {
|
||||
await server.register(Schmervice);
|
||||
|
||||
const pgpInit = dbInitOptions(config);
|
||||
const options = {
|
||||
// the only required parameter is the connection string
|
||||
connection: config.db.connection,
|
||||
// ... and the pg-promise initialization options
|
||||
pgpInit,
|
||||
};
|
||||
|
||||
await server.register([
|
||||
{
|
||||
plugin: makePlugin<IInitOptions<IRepositories, pg.IClient>>(),
|
||||
options,
|
||||
},
|
||||
]);
|
||||
|
||||
// await registerNextAuth(server, config);
|
||||
await registerSwagger(server);
|
||||
//await registerCloudflareAccessJwt(server, config);
|
||||
// await registerAuthBearer(server, config);
|
||||
await registerPostgraphile(server, config);
|
||||
};
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
import * as Inert from "@hapi/inert";
|
||||
import * as Vision from "@hapi/vision";
|
||||
import type * as Hapi from "@hapi/hapi";
|
||||
import * as HapiSwagger from "hapi-swagger";
|
||||
|
||||
export const registerSwagger = async (server: Hapi.Server): Promise<void> => {
|
||||
const swaggerOptions: HapiSwagger.RegisterOptions = {
|
||||
info: {
|
||||
title: "Metamigo API Docs",
|
||||
description: "part of CDR Link",
|
||||
version: "0.1",
|
||||
},
|
||||
// group sets of endpoints by tag
|
||||
tags: [
|
||||
{
|
||||
name: "users",
|
||||
description: "API for Users",
|
||||
},
|
||||
],
|
||||
documentationRouteTags: ["swagger"],
|
||||
documentationPath: "/api-docs",
|
||||
};
|
||||
|
||||
await server.register([
|
||||
{ plugin: Inert },
|
||||
{ plugin: Vision },
|
||||
{
|
||||
plugin: HapiSwagger,
|
||||
options: swaggerOptions,
|
||||
},
|
||||
]);
|
||||
};
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
import * as Metamigo from "@digiresilience/metamigo-common";
|
||||
import Toys from "@hapipal/toys";
|
||||
|
||||
export const withDefaults = Toys.withRouteDefaults({
|
||||
options: {
|
||||
cors: true,
|
||||
auth: "session-id-bearer-token",
|
||||
validate: {
|
||||
failAction: Metamigo.validatingFailAction,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const noAuth = Toys.withRouteDefaults({
|
||||
options: {
|
||||
cors: true,
|
||||
validate: {
|
||||
failAction: Metamigo.validatingFailAction,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
import isFunction from "lodash/isFunction.js";
|
||||
import type * as Hapi from "@hapi/hapi";
|
||||
import * as UserRoutes from "./users/index.js";
|
||||
import * as VoiceRoutes from "./voice/index.js";
|
||||
import * as WhatsappRoutes from "./whatsapp/index.js";
|
||||
import * as SignalRoutes from "./signal/index.js";
|
||||
|
||||
const loadRouteIndex = async (server, index) => {
|
||||
const routes = [];
|
||||
for (const exported in index) {
|
||||
if (Object.prototype.hasOwnProperty.call(index, exported)) {
|
||||
const route = index[exported];
|
||||
routes.push(route);
|
||||
}
|
||||
}
|
||||
|
||||
routes.forEach(async (route) => {
|
||||
if (isFunction(route)) server.route(await route(server));
|
||||
else server.route(route);
|
||||
});
|
||||
};
|
||||
|
||||
export const register = async (server: Hapi.Server): Promise<void> => {
|
||||
// Load your routes here.
|
||||
// routes are loaded from the list of exported vars
|
||||
// a route file should export routes directly or an async function that returns the routes.
|
||||
loadRouteIndex(server, UserRoutes);
|
||||
loadRouteIndex(server, VoiceRoutes);
|
||||
loadRouteIndex(server, WhatsappRoutes);
|
||||
loadRouteIndex(server, SignalRoutes);
|
||||
};
|
||||
|
|
@ -1,250 +0,0 @@
|
|||
import * as Hapi from "@hapi/hapi";
|
||||
import Joi from "joi";
|
||||
import * as Helpers from "../helpers/index.js";
|
||||
import Boom from "@hapi/boom";
|
||||
|
||||
const getSignalService = (request) => request.services("app").signaldService;
|
||||
|
||||
export const GetAllSignalBotsRoute = Helpers.withDefaults({
|
||||
method: "get",
|
||||
path: "/api/signal/bots",
|
||||
options: {
|
||||
description: "Get all bots",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const signalService = getSignalService(request);
|
||||
const bots = await signalService.findAll();
|
||||
|
||||
if (bots) {
|
||||
// with the pino logger the first arg is an object of data to log
|
||||
// the second arg is a message
|
||||
// all other args are formated args for the msg
|
||||
request.logger.info({ bots }, "Retrieved bot(s) at %s", new Date());
|
||||
|
||||
return { bots };
|
||||
}
|
||||
|
||||
return _h.response().code(204);
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const GetBotsRoute = Helpers.noAuth({
|
||||
method: "get",
|
||||
path: "/api/signal/bots/{token}",
|
||||
options: {
|
||||
description: "Get one bot",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { token } = request.params;
|
||||
const signalService = getSignalService(request);
|
||||
|
||||
const bot = await signalService.findByToken(token);
|
||||
|
||||
if (bot) {
|
||||
// with the pino logger the first arg is an object of data to log
|
||||
// the second arg is a message
|
||||
// all other args are formated args for the msg
|
||||
request.logger.info({ bot }, "Retrieved bot(s) at %s", new Date());
|
||||
|
||||
return bot;
|
||||
}
|
||||
|
||||
throw Boom.notFound("Bot not found");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
interface MessageRequest {
|
||||
phoneNumber: string;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export const SendBotRoute = Helpers.noAuth({
|
||||
method: "post",
|
||||
path: "/api/signal/bots/{token}/send",
|
||||
options: {
|
||||
description: "Send a message",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { token } = request.params;
|
||||
const { phoneNumber, message } = request.payload as MessageRequest;
|
||||
const signalService = getSignalService(request);
|
||||
|
||||
const bot = await signalService.findByToken(token);
|
||||
|
||||
if (bot) {
|
||||
request.logger.info({ bot }, "Sent a message at %s", new Date());
|
||||
|
||||
await signalService.send(bot, phoneNumber, message as string);
|
||||
return _h
|
||||
.response({
|
||||
result: {
|
||||
recipient: phoneNumber,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: bot.phoneNumber,
|
||||
},
|
||||
})
|
||||
.code(200); // temp
|
||||
}
|
||||
|
||||
throw Boom.notFound("Bot not found");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
interface ResetSessionRequest {
|
||||
phoneNumber: string;
|
||||
}
|
||||
|
||||
export const ResetSessionBotRoute = Helpers.noAuth({
|
||||
method: "post",
|
||||
path: "/api/signal/bots/{token}/resetSession",
|
||||
options: {
|
||||
description: "Reset a session with another user",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { token } = request.params;
|
||||
const { phoneNumber } = request.payload as ResetSessionRequest;
|
||||
const signalService = getSignalService(request);
|
||||
|
||||
const bot = await signalService.findByToken(token);
|
||||
|
||||
if (bot) {
|
||||
await signalService.resetSession(bot, phoneNumber);
|
||||
return _h
|
||||
.response({
|
||||
result: {
|
||||
recipient: phoneNumber,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: bot.phoneNumber,
|
||||
},
|
||||
})
|
||||
.code(200); // temp
|
||||
}
|
||||
|
||||
throw Boom.notFound("Bot not found");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const ReceiveBotRoute = Helpers.withDefaults({
|
||||
method: "get",
|
||||
path: "/api/signal/bots/{token}/receive",
|
||||
options: {
|
||||
description: "Receive messages",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { token } = request.params;
|
||||
const signalService = getSignalService(request);
|
||||
|
||||
const bot = await signalService.findByToken(token);
|
||||
|
||||
if (bot) {
|
||||
request.logger.info({ bot }, "Received messages at %s", new Date());
|
||||
|
||||
return signalService.receive(bot);
|
||||
}
|
||||
|
||||
throw Boom.notFound("Bot not found");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const RegisterBotRoute = Helpers.withDefaults({
|
||||
method: "get",
|
||||
path: "/api/signal/bots/{id}/register",
|
||||
options: {
|
||||
description: "Register a bot",
|
||||
async handler(request: Hapi.Request, h: Hapi.ResponseToolkit) {
|
||||
const { id } = request.params;
|
||||
const signalService = getSignalService(request);
|
||||
const { code } = request.query;
|
||||
|
||||
const bot = await signalService.findById(id);
|
||||
if (!bot) throw Boom.notFound("Bot not found");
|
||||
|
||||
try {
|
||||
request.logger.info({ bot }, "Create bot at %s", new Date());
|
||||
await signalService.register(bot, code);
|
||||
return h.response(bot).code(200);
|
||||
} catch (error) {
|
||||
return h.response().code(error.code);
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
interface BotRequest {
|
||||
phoneNumber: string;
|
||||
description: string;
|
||||
}
|
||||
|
||||
export const CreateBotRoute = Helpers.withDefaults({
|
||||
method: "post",
|
||||
path: "/api/signal/bots",
|
||||
options: {
|
||||
description: "Register a bot",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { phoneNumber, description } = request.payload as BotRequest;
|
||||
const signalService = getSignalService(request);
|
||||
console.log("request.auth.credentials:", request.auth.credentials);
|
||||
|
||||
const bot = await signalService.create(
|
||||
phoneNumber,
|
||||
description,
|
||||
request.auth.credentials.email as string
|
||||
);
|
||||
if (bot) {
|
||||
request.logger.info({ bot }, "Create bot at %s", new Date());
|
||||
return bot;
|
||||
}
|
||||
|
||||
throw Boom.notFound("Bot not found");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const RequestCodeRoute = Helpers.withDefaults({
|
||||
method: "get",
|
||||
path: "/api/signal/bots/{id}/requestCode",
|
||||
options: {
|
||||
description: "Register a bot",
|
||||
validate: {
|
||||
params: Joi.object({
|
||||
id: Joi.string().uuid().required(),
|
||||
}),
|
||||
query: Joi.object({
|
||||
mode: Joi.string().valid("sms", "voice").required(),
|
||||
captcha: Joi.string(),
|
||||
}),
|
||||
},
|
||||
async handler(request: Hapi.Request, h: Hapi.ResponseToolkit) {
|
||||
const { id } = request.params;
|
||||
const { mode, captcha } = request.query;
|
||||
const signalService = getSignalService(request);
|
||||
|
||||
const bot = await signalService.findById(id);
|
||||
|
||||
if (!bot) {
|
||||
throw Boom.notFound("Bot not found");
|
||||
}
|
||||
|
||||
try {
|
||||
if (mode === "sms") {
|
||||
await signalService.requestSMSVerification(bot, captcha);
|
||||
} else if (mode === "voice") {
|
||||
await signalService.requestVoiceVerification(bot, captcha);
|
||||
}
|
||||
|
||||
return h.response().code(200);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
if (error.name === "CaptchaRequiredException") {
|
||||
return h.response().code(402);
|
||||
}
|
||||
|
||||
if (error.code) {
|
||||
return h.response().code(error.code);
|
||||
}
|
||||
|
||||
return h.response().code(500);
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
|
|
@ -1,64 +0,0 @@
|
|||
import Joi from "joi";
|
||||
import * as Hapi from "@hapi/hapi";
|
||||
import {
|
||||
UserRecord,
|
||||
crudRoutesFor,
|
||||
CrudControllerBase,
|
||||
} from "@digiresilience/metamigo-common";
|
||||
import * as RouteHelpers from "../helpers/index.js";
|
||||
|
||||
class UserRecordController extends CrudControllerBase(UserRecord) {}
|
||||
|
||||
const validator = (): Record<string, Hapi.RouteOptionsValidate> => ({
|
||||
create: {
|
||||
payload: Joi.object({
|
||||
name: Joi.string().required(),
|
||||
email: Joi.string().email().required(),
|
||||
emailVerified: Joi.string().isoDate().required(),
|
||||
createdBy: Joi.string().required(),
|
||||
avatar: Joi.string()
|
||||
.uri({ scheme: ["http", "https"] })
|
||||
.optional(),
|
||||
userRole: Joi.string().optional(),
|
||||
isActive: Joi.boolean().optional(),
|
||||
}).label("UserCreate"),
|
||||
},
|
||||
updateById: {
|
||||
params: {
|
||||
userId: Joi.string().uuid().required(),
|
||||
},
|
||||
payload: Joi.object({
|
||||
name: Joi.string().optional(),
|
||||
email: Joi.string().email().optional(),
|
||||
emailVerified: Joi.string().isoDate().optional(),
|
||||
createdBy: Joi.boolean().optional(),
|
||||
avatar: Joi.string()
|
||||
.uri({ scheme: ["http", "https"] })
|
||||
.optional(),
|
||||
userRole: Joi.string().optional(),
|
||||
isActive: Joi.boolean().optional(),
|
||||
createdAt: Joi.string().isoDate().optional(),
|
||||
updatedAt: Joi.string().isoDate().optional(),
|
||||
}).label("UserUpdate"),
|
||||
},
|
||||
deleteById: {
|
||||
params: {
|
||||
userId: Joi.string().uuid().required(),
|
||||
},
|
||||
},
|
||||
getById: {
|
||||
params: {
|
||||
userId: Joi.string().uuid().required(),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const UserRoutes = RouteHelpers.withDefaults(
|
||||
crudRoutesFor(
|
||||
"user",
|
||||
"/api/users",
|
||||
new UserRecordController("users", "userId"),
|
||||
"userId",
|
||||
validator()
|
||||
)
|
||||
);
|
||||
|
|
@ -1,125 +0,0 @@
|
|||
import * as Hapi from "@hapi/hapi";
|
||||
import Joi from "joi";
|
||||
import * as Boom from "@hapi/boom";
|
||||
import * as R from "remeda";
|
||||
import * as Helpers from "../helpers/index.js";
|
||||
import Twilio from "twilio";
|
||||
import {
|
||||
crudRoutesFor,
|
||||
CrudControllerBase,
|
||||
} from "@digiresilience/metamigo-common";
|
||||
import { VoiceLineRecord, SavedVoiceLine } from "@digiresilience/metamigo-db";
|
||||
|
||||
const TwilioHandlers = {
|
||||
async freeNumbers(provider, request: Hapi.Request) {
|
||||
const { accountSid, apiKeySid, apiKeySecret } = provider.credentials;
|
||||
const client = Twilio(apiKeySid, apiKeySecret, {
|
||||
accountSid,
|
||||
});
|
||||
const numbers = R.pipe(
|
||||
await client.incomingPhoneNumbers.list({ limit: 100 }),
|
||||
R.filter((n) => n.capabilities.voice),
|
||||
R.map(R.pick(["sid", "phoneNumber"]))
|
||||
);
|
||||
const numberSids: any = R.map(numbers, R.prop("sid"));
|
||||
const voiceLineRepo = request.db().voiceLines;
|
||||
const voiceLines: SavedVoiceLine[] =
|
||||
await voiceLineRepo.findAllByProviderLineSids(numberSids);
|
||||
const voiceLineSids = new Set(R.map(voiceLines, R.prop("providerLineSid")));
|
||||
|
||||
return R.pipe(
|
||||
numbers,
|
||||
R.reject((n) => voiceLineSids.has(n.sid as any)),
|
||||
R.map((n) => ({ id: n.sid, name: n.phoneNumber }))
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
export const VoiceProviderRoutes = Helpers.withDefaults([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/api/voice/providers/{providerId}/freeNumbers",
|
||||
options: {
|
||||
description:
|
||||
"get a list of the incoming numbers for a provider account that aren't assigned to a voice line",
|
||||
validate: {
|
||||
params: {
|
||||
providerId: Joi.string().uuid().required(),
|
||||
},
|
||||
},
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { providerId } = request.params;
|
||||
const voiceProvidersRepo = request.db().voiceProviders;
|
||||
const provider = await voiceProvidersRepo.findById(providerId);
|
||||
if (!provider) return Boom.notFound();
|
||||
switch (provider.kind) {
|
||||
case "TWILIO": {
|
||||
return TwilioHandlers.freeNumbers(provider, request);
|
||||
}
|
||||
|
||||
default: {
|
||||
return Boom.badImplementation();
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
class VoiceLineRecordController extends CrudControllerBase(VoiceLineRecord) {}
|
||||
|
||||
const validator = (): Record<string, Hapi.RouteOptionsValidate> => ({
|
||||
create: {
|
||||
payload: Joi.object({
|
||||
providerType: Joi.string().required(),
|
||||
providerId: Joi.string().required(),
|
||||
number: Joi.string().required(),
|
||||
language: Joi.string().required(),
|
||||
voice: Joi.string().required(),
|
||||
promptText: Joi.string().optional(),
|
||||
promptRecording: Joi.binary()
|
||||
.encoding("base64")
|
||||
.max(50 * 1000 * 1000)
|
||||
.optional(),
|
||||
}).label("VoiceLineCreate"),
|
||||
},
|
||||
updateById: {
|
||||
params: {
|
||||
id: Joi.string().uuid().required(),
|
||||
},
|
||||
payload: Joi.object({
|
||||
providerType: Joi.string().optional(),
|
||||
providerId: Joi.string().optional(),
|
||||
number: Joi.string().optional(),
|
||||
language: Joi.string().optional(),
|
||||
voice: Joi.string().optional(),
|
||||
promptText: Joi.string().optional(),
|
||||
promptRecording: Joi.binary()
|
||||
.encoding("base64")
|
||||
.max(50 * 1000 * 1000)
|
||||
.optional(),
|
||||
}).label("VoiceLineUpdate"),
|
||||
},
|
||||
deleteById: {
|
||||
params: {
|
||||
id: Joi.string().uuid().required(),
|
||||
},
|
||||
},
|
||||
getById: {
|
||||
params: {
|
||||
id: Joi.string().uuid().required(),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const VoiceLineRoutes = Helpers.withDefaults(
|
||||
crudRoutesFor(
|
||||
"voice-line",
|
||||
"/api/voice/voice-line",
|
||||
new VoiceLineRecordController("voiceLines", "id"),
|
||||
"id",
|
||||
validator()
|
||||
)
|
||||
);
|
||||
|
||||
export * from "./twilio/index.js";
|
||||
|
|
@ -1,230 +0,0 @@
|
|||
import * as Hapi from "@hapi/hapi";
|
||||
import Joi from "joi";
|
||||
import * as Boom from "@hapi/boom";
|
||||
import Twilio from "twilio";
|
||||
import { SavedVoiceProvider } from "@digiresilience/metamigo-db";
|
||||
import pMemoize from "p-memoize";
|
||||
import ExpiryMap from "expiry-map";
|
||||
import ms from "ms";
|
||||
import * as Helpers from "../../helpers/index.js";
|
||||
import workerUtils from "../../../../worker-utils.js";
|
||||
|
||||
const queueRecording = async (meta) =>
|
||||
workerUtils.addJob("twilio-recording", meta, { jobKey: meta.callSid });
|
||||
|
||||
const twilioClientFor = (provider: SavedVoiceProvider): Twilio.Twilio => {
|
||||
const { accountSid, apiKeySid, apiKeySecret } = provider.credentials;
|
||||
if (!accountSid || !apiKeySid || !apiKeySecret)
|
||||
throw new Error(
|
||||
`twilio provider ${provider.name} does not have credentials`
|
||||
);
|
||||
|
||||
return Twilio(apiKeySid, apiKeySecret, {
|
||||
accountSid,
|
||||
});
|
||||
};
|
||||
|
||||
const _getOrCreateTTSTestApplication = async (
|
||||
url,
|
||||
name,
|
||||
client: Twilio.Twilio
|
||||
) => {
|
||||
const application = await client.applications.list({ friendlyName: name });
|
||||
|
||||
if (application[0] && application[0].voiceUrl === url) {
|
||||
return application[0];
|
||||
}
|
||||
|
||||
return client.applications.create({
|
||||
voiceMethod: "POST",
|
||||
voiceUrl: url,
|
||||
friendlyName: name,
|
||||
});
|
||||
};
|
||||
|
||||
const cache = new ExpiryMap(ms("1h"));
|
||||
const getOrCreateTTSTestApplication = pMemoize(_getOrCreateTTSTestApplication, {
|
||||
cache,
|
||||
});
|
||||
|
||||
export const TwilioRoutes = Helpers.noAuth([
|
||||
{
|
||||
method: "get",
|
||||
path: "/api/voice/twilio/prompt/{voiceLineId}",
|
||||
options: {
|
||||
description: "download the mp3 file to play as a prompt for the user",
|
||||
validate: {
|
||||
params: {
|
||||
voiceLineId: Joi.string().uuid().required(),
|
||||
},
|
||||
},
|
||||
async handler(request: Hapi.Request, h: Hapi.ResponseToolkit) {
|
||||
const { voiceLineId } = request.params;
|
||||
const voiceLine = await request
|
||||
.db()
|
||||
.voiceLines.findById({ id: voiceLineId });
|
||||
|
||||
if (!voiceLine) return Boom.notFound();
|
||||
if (!voiceLine.audioPromptEnabled) return Boom.badRequest();
|
||||
|
||||
const mp3 = voiceLine.promptAudio["audio/mpeg"];
|
||||
if (!mp3) {
|
||||
return Boom.serverUnavailable();
|
||||
}
|
||||
|
||||
return h
|
||||
.response(Buffer.from(mp3, "base64"))
|
||||
.header("Content-Type", "audio/mpeg")
|
||||
.header("Content-Disposition", "attachment; filename=prompt.mp3");
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
method: "post",
|
||||
path: "/api/voice/twilio/record/{voiceLineId}",
|
||||
options: {
|
||||
description: "webhook for twilio to handle an incoming call",
|
||||
validate: {
|
||||
params: {
|
||||
voiceLineId: Joi.string().uuid().required(),
|
||||
},
|
||||
},
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { voiceLineId } = request.params;
|
||||
const { To } = request.payload as { To: string; };
|
||||
const voiceLine = await request.db().voiceLines.findBy({ number: To });
|
||||
if (!voiceLine) return Boom.notFound();
|
||||
if (voiceLine.id !== voiceLineId) return Boom.badRequest();
|
||||
|
||||
const frontendUrl = request.server.config().frontend.url;
|
||||
const useTextPrompt = !voiceLine.audioPromptEnabled;
|
||||
|
||||
const twiml = new Twilio.twiml.VoiceResponse();
|
||||
if (useTextPrompt) {
|
||||
let prompt = voiceLine.promptText;
|
||||
if (!prompt || prompt.length === 0)
|
||||
prompt =
|
||||
"The grabadora text prompt is unconfigured. Please set a prompt in the administration screen.";
|
||||
twiml.say(
|
||||
{
|
||||
language: voiceLine.language as any,
|
||||
voice: voiceLine.voice as any,
|
||||
},
|
||||
prompt
|
||||
);
|
||||
} else {
|
||||
const promptUrl = `${frontendUrl}/api/v1/voice/twilio/prompt/${voiceLineId}`;
|
||||
twiml.play({ loop: 1 }, promptUrl);
|
||||
}
|
||||
|
||||
twiml.record({
|
||||
playBeep: true,
|
||||
finishOnKey: "1",
|
||||
recordingStatusCallback: `${frontendUrl}/api/v1/voice/twilio/recording-ready/${voiceLineId}`,
|
||||
});
|
||||
return twiml.toString();
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
method: "post",
|
||||
path: "/api/voice/twilio/recording-ready/{voiceLineId}",
|
||||
options: {
|
||||
description: "webhook for twilio to handle a recording",
|
||||
validate: {
|
||||
params: {
|
||||
voiceLineId: Joi.string().uuid().required(),
|
||||
},
|
||||
},
|
||||
async handler(request: Hapi.Request, h: Hapi.ResponseToolkit) {
|
||||
const { voiceLineId } = request.params;
|
||||
const voiceLine = await request
|
||||
.db()
|
||||
.voiceLines.findById({ id: voiceLineId });
|
||||
if (!voiceLine) return Boom.notFound();
|
||||
|
||||
const { AccountSid, RecordingSid, CallSid } = request.payload as {
|
||||
AccountSid: string;
|
||||
RecordingSid: string;
|
||||
CallSid: string;
|
||||
};
|
||||
|
||||
await queueRecording({
|
||||
voiceLineId,
|
||||
accountSid: AccountSid,
|
||||
callSid: CallSid,
|
||||
recordingSid: RecordingSid,
|
||||
});
|
||||
return h.response().code(203);
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
method: "post",
|
||||
path: "/api/voice/twilio/text-to-speech/{providerId}",
|
||||
options: {
|
||||
description: "webook for twilio to test the twilio text-to-speech",
|
||||
validate: {
|
||||
params: {
|
||||
providerId: Joi.string().uuid().required(),
|
||||
},
|
||||
},
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { language, voice, prompt } = request.payload as {
|
||||
language: any;
|
||||
voice: any;
|
||||
prompt: string;
|
||||
};
|
||||
const twiml = new Twilio.twiml.VoiceResponse();
|
||||
twiml.say({ language, voice }, prompt);
|
||||
return twiml.toString();
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
method: "get",
|
||||
path: "/api/voice/twilio/text-to-speech-token/{providerId}",
|
||||
options: {
|
||||
description:
|
||||
"generates a one time token to test the twilio text-to-speech",
|
||||
validate: {
|
||||
params: {
|
||||
providerId: Joi.string().uuid().required(),
|
||||
},
|
||||
},
|
||||
async handler(request: Hapi.Request, h: Hapi.ResponseToolkit) {
|
||||
const { providerId } = request.params as { providerId: string; };
|
||||
const provider: SavedVoiceProvider = await request
|
||||
.db()
|
||||
.voiceProviders.findById({ id: providerId });
|
||||
if (!provider) return Boom.notFound();
|
||||
|
||||
const frontendUrl = request.server.config().frontend.url;
|
||||
const url = `${frontendUrl}/api/v1/voice/twilio/text-to-speech/${providerId}`;
|
||||
const name = `Grabadora text-to-speech tester: ${providerId}`;
|
||||
const app = await getOrCreateTTSTestApplication(
|
||||
url,
|
||||
name,
|
||||
twilioClientFor(provider)
|
||||
);
|
||||
|
||||
const { accountSid, apiKeySecret, apiKeySid } = provider.credentials;
|
||||
const token = new Twilio.jwt.AccessToken(
|
||||
accountSid,
|
||||
apiKeySid,
|
||||
apiKeySecret,
|
||||
{ identity: "tts-test" }
|
||||
);
|
||||
|
||||
const grant = new Twilio.jwt.AccessToken.VoiceGrant({
|
||||
outgoingApplicationSid: app.sid,
|
||||
incomingAllow: true,
|
||||
});
|
||||
token.addGrant(grant);
|
||||
return h.response({
|
||||
token: token.toJwt(),
|
||||
});
|
||||
},
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
|
@ -1,215 +0,0 @@
|
|||
import * as Hapi from "@hapi/hapi";
|
||||
import * as Helpers from "../helpers/index.js";
|
||||
import Boom from "@hapi/boom";
|
||||
|
||||
export const GetAllWhatsappBotsRoute = Helpers.withDefaults({
|
||||
method: "get",
|
||||
path: "/api/whatsapp/bots",
|
||||
options: {
|
||||
description: "Get all bots",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { whatsappService } = request.services("app");
|
||||
|
||||
const bots = await whatsappService.findAll();
|
||||
|
||||
if (bots) {
|
||||
// with the pino logger the first arg is an object of data to log
|
||||
// the second arg is a message
|
||||
// all other args are formated args for the msg
|
||||
request.logger.info({ bots }, "Retrieved bot(s) at %s", new Date());
|
||||
|
||||
return { bots };
|
||||
}
|
||||
|
||||
return _h.response().code(204);
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const GetBotsRoute = Helpers.noAuth({
|
||||
method: "get",
|
||||
path: "/api/whatsapp/bots/{token}",
|
||||
options: {
|
||||
description: "Get one bot",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { token } = request.params;
|
||||
const { whatsappService } = request.services("app");
|
||||
|
||||
const bot = await whatsappService.findByToken(token);
|
||||
|
||||
if (bot) {
|
||||
// with the pino logger the first arg is an object of data to log
|
||||
// the second arg is a message
|
||||
// all other args are formated args for the msg
|
||||
request.logger.info({ bot }, "Retrieved bot(s) at %s", new Date());
|
||||
|
||||
return bot;
|
||||
}
|
||||
|
||||
throw Boom.notFound("Bot not found");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
interface MessageRequest {
|
||||
phoneNumber: string;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export const SendBotRoute = Helpers.noAuth({
|
||||
method: "post",
|
||||
path: "/api/whatsapp/bots/{token}/send",
|
||||
options: {
|
||||
description: "Send a message",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { token } = request.params;
|
||||
const { phoneNumber, message } = request.payload as MessageRequest;
|
||||
const { whatsappService } = request.services("app");
|
||||
|
||||
const bot = await whatsappService.findByToken(token);
|
||||
|
||||
if (bot) {
|
||||
request.logger.info({ bot }, "Sent a message at %s", new Date());
|
||||
|
||||
await whatsappService.send(bot, phoneNumber, message as string);
|
||||
return _h
|
||||
.response({
|
||||
result: {
|
||||
recipient: phoneNumber,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: bot.phoneNumber,
|
||||
},
|
||||
})
|
||||
.code(200); // temp
|
||||
}
|
||||
|
||||
throw Boom.notFound("Bot not found");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const ReceiveBotRoute = Helpers.withDefaults({
|
||||
method: "get",
|
||||
path: "/api/whatsapp/bots/{token}/receive",
|
||||
options: {
|
||||
description: "Receive messages",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { token } = request.params;
|
||||
const { whatsappService } = request.services("app");
|
||||
|
||||
const bot = await whatsappService.findByToken(token);
|
||||
|
||||
if (bot) {
|
||||
request.logger.info({ bot }, "Received messages at %s", new Date());
|
||||
|
||||
// temp
|
||||
const date = new Date();
|
||||
const twoDaysAgo = new Date(date.getTime());
|
||||
twoDaysAgo.setDate(date.getDate() - 2);
|
||||
return whatsappService.receive(bot, twoDaysAgo);
|
||||
}
|
||||
|
||||
throw Boom.notFound("Bot not found");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const RegisterBotRoute = Helpers.withDefaults({
|
||||
method: "get",
|
||||
path: "/api/whatsapp/bots/{id}/register",
|
||||
options: {
|
||||
description: "Register a bot",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { id } = request.params;
|
||||
const { whatsappService } = request.services("app");
|
||||
|
||||
const bot = await whatsappService.findById(id);
|
||||
|
||||
if (bot) {
|
||||
await whatsappService.register(bot, (error: string) => {
|
||||
if (error) {
|
||||
return _h.response(error).code(500);
|
||||
}
|
||||
|
||||
request.logger.info({ bot }, "Register bot at %s", new Date());
|
||||
return _h.response().code(200);
|
||||
});
|
||||
}
|
||||
|
||||
throw Boom.notFound("Bot not found");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const UnverifyBotRoute = Helpers.withDefaults({
|
||||
method: "post",
|
||||
path: "/api/whatsapp/bots/{id}/unverify",
|
||||
options: {
|
||||
description: "Unverify bot",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { id } = request.params;
|
||||
const { whatsappService } = request.services("app");
|
||||
|
||||
const bot = await whatsappService.findById(id);
|
||||
|
||||
if (bot) {
|
||||
return whatsappService.unverify(bot);
|
||||
}
|
||||
|
||||
throw Boom.notFound("Bot not found");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const RefreshBotRoute = Helpers.withDefaults({
|
||||
method: "get",
|
||||
path: "/api/whatsapp/bots/{id}/refresh",
|
||||
options: {
|
||||
description: "Refresh messages",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { id } = request.params;
|
||||
const { whatsappService } = request.services("app");
|
||||
|
||||
const bot = await whatsappService.findById(id);
|
||||
|
||||
if (bot) {
|
||||
request.logger.info({ bot }, "Refreshed messages at %s", new Date());
|
||||
|
||||
// await whatsappService.refresh(bot);
|
||||
return;
|
||||
}
|
||||
|
||||
throw Boom.notFound("Bot not found");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
interface BotRequest {
|
||||
phoneNumber: string;
|
||||
description: string;
|
||||
}
|
||||
|
||||
export const CreateBotRoute = Helpers.withDefaults({
|
||||
method: "post",
|
||||
path: "/api/whatsapp/bots",
|
||||
options: {
|
||||
description: "Register a bot",
|
||||
async handler(request: Hapi.Request, _h: Hapi.ResponseToolkit) {
|
||||
const { phoneNumber, description } = request.payload as BotRequest;
|
||||
const { whatsappService } = request.services("app");
|
||||
console.log("request.auth.credentials:", request.auth.credentials);
|
||||
|
||||
const bot = await whatsappService.create(
|
||||
phoneNumber,
|
||||
description,
|
||||
request.auth.credentials.email as string
|
||||
);
|
||||
if (bot) {
|
||||
request.logger.info({ bot }, "Register bot at %s", new Date());
|
||||
return bot;
|
||||
}
|
||||
|
||||
throw Boom.notFound("Bot not found");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
import type * as Hapi from "@hapi/hapi";
|
||||
import SettingsService from "./settings.js";
|
||||
import WhatsappService from "./whatsapp.js";
|
||||
import SignaldService from "./signald.js";
|
||||
|
||||
export const register = async (server: Hapi.Server): Promise<void> => {
|
||||
// register your services here
|
||||
// don't forget to add them to the AppServices interface in ../types/index.ts
|
||||
server.registerService(SettingsService);
|
||||
server.registerService(WhatsappService);
|
||||
server.registerService(SignaldService);
|
||||
};
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
import * as Hapi from "@hapi/hapi";
|
||||
import * as Schmervice from "@hapipal/schmervice";
|
||||
import { settingInfo, SettingsService } from "@digiresilience/metamigo-db";
|
||||
|
||||
export const VoicemailPrompt = settingInfo<string>("voicemail-prompt");
|
||||
export const VoicemailMinLength = settingInfo<number>("voicemail-min-length");
|
||||
export const VoicemailUseTextPrompt = settingInfo<boolean>(
|
||||
"voicemail-use-text-prompt"
|
||||
);
|
||||
|
||||
export { ISettingsService } from "@digiresilience/metamigo-db";
|
||||
const service = (server: Hapi.Server): Schmervice.ServiceFunctionalInterface =>
|
||||
SettingsService(server.db().settings);
|
||||
|
||||
export default service;
|
||||
|
|
@ -1,231 +0,0 @@
|
|||
import { Server } from "@hapi/hapi";
|
||||
import { Service } from "@hapipal/schmervice";
|
||||
import { promises as fs } from "node:fs";
|
||||
import {
|
||||
SignaldAPI,
|
||||
SendResponsev1,
|
||||
IncomingMessagev1,
|
||||
ClientMessageWrapperv1,
|
||||
} from "@digiresilience/node-signald";
|
||||
import { SavedSignalBot as Bot } from "@digiresilience/metamigo-db";
|
||||
import workerUtils from "../../worker-utils.js";
|
||||
|
||||
export default class SignaldService extends Service {
|
||||
signald: SignaldAPI;
|
||||
subscriptions: Set<string>;
|
||||
|
||||
constructor(server: Server, options: never) {
|
||||
super(server, options);
|
||||
|
||||
if (this.server.config().signald.enabled) {
|
||||
this.signald = new SignaldAPI();
|
||||
this.signald.setLogger((level, msg, extra?) => {
|
||||
this.server.logger[level]({ extra }, msg);
|
||||
});
|
||||
this.subscriptions = new Set();
|
||||
}
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
if (this.server.config().signald.enabled && this.signald) {
|
||||
this.setupListeners();
|
||||
this.connect();
|
||||
}
|
||||
}
|
||||
|
||||
async teardown(): Promise<void> {
|
||||
if (this.server.config().signald.enabled && this.signald)
|
||||
this.signald.disconnect();
|
||||
}
|
||||
|
||||
private connect() {
|
||||
const { enabled, socket } = this.server.config().signald;
|
||||
if (!enabled) return;
|
||||
this.signald.connectWithBackoff(socket);
|
||||
}
|
||||
|
||||
private async onConnected() {
|
||||
await this.subscribeAll();
|
||||
}
|
||||
|
||||
private setupListeners() {
|
||||
this.signald.on("transport_error", async (error) => {
|
||||
this.server.logger.info({ error }, "signald transport error");
|
||||
});
|
||||
this.signald.on("transport_connected", async () => {
|
||||
this.onConnected();
|
||||
});
|
||||
this.signald.on(
|
||||
"transport_received_payload",
|
||||
async (payload: ClientMessageWrapperv1) => {
|
||||
this.server.logger.debug({ payload }, "signald payload received");
|
||||
if (payload.type === "IncomingMessage") {
|
||||
this.receiveMessage(payload.data);
|
||||
}
|
||||
}
|
||||
);
|
||||
this.signald.on("transport_sent_payload", async (payload) => {
|
||||
this.server.logger.debug({ payload }, "signald payload sent");
|
||||
});
|
||||
}
|
||||
|
||||
private async subscribeAll() {
|
||||
const result = await this.signald.listAccounts();
|
||||
const accounts = result.accounts.map((account) => account.address.number);
|
||||
await Promise.all(
|
||||
accounts.map(async (account) => {
|
||||
await this.signald.subscribe(account);
|
||||
this.subscriptions.add(account);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
private async unsubscribeAll() {
|
||||
await Promise.all(
|
||||
[...this.subscriptions].map(async (account) => {
|
||||
await this.signald.unsubscribe(account);
|
||||
this.subscriptions.delete(account);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
async create(
|
||||
phoneNumber: string,
|
||||
description: string,
|
||||
email: string
|
||||
): Promise<Bot> {
|
||||
const db = this.server.db();
|
||||
const user = await db.users.findBy({ email });
|
||||
const row = await db.signalBots.insert({
|
||||
phoneNumber,
|
||||
description,
|
||||
userId: user.id,
|
||||
});
|
||||
return row;
|
||||
}
|
||||
|
||||
async findAll(): Promise<Bot[]> {
|
||||
const db = this.server.db();
|
||||
return db.signalBots.findAll();
|
||||
}
|
||||
|
||||
async findById(id: string): Promise<Bot> {
|
||||
const db = this.server.db();
|
||||
return db.signalBots.findById({ id });
|
||||
}
|
||||
|
||||
async findByToken(token: string): Promise<Bot> {
|
||||
const db = this.server.db();
|
||||
return db.signalBots.findBy({ token });
|
||||
}
|
||||
|
||||
async register(bot: Bot, code: string): Promise<void> {
|
||||
const address = await this.signald.verify(bot.phoneNumber, code);
|
||||
this.server.db().signalBots.updateAuthInfo(bot, address.address.uuid);
|
||||
}
|
||||
|
||||
async send(
|
||||
bot: Bot,
|
||||
phoneNumber: string,
|
||||
message: string
|
||||
): Promise<SendResponsev1> {
|
||||
this.server.logger.debug(
|
||||
{ us: bot.phoneNumber, them: phoneNumber, message },
|
||||
"signald send"
|
||||
);
|
||||
return this.signald.send(
|
||||
bot.phoneNumber,
|
||||
{ number: phoneNumber },
|
||||
undefined,
|
||||
message
|
||||
);
|
||||
}
|
||||
|
||||
async resetSession(bot: Bot, phoneNumber: string): Promise<SendResponsev1> {
|
||||
return this.signald.resetSession(bot.phoneNumber, {
|
||||
number: phoneNumber,
|
||||
});
|
||||
}
|
||||
|
||||
async requestVoiceVerification(bot: Bot, captcha?: string): Promise<void> {
|
||||
this.server.logger.debug(
|
||||
{ number: bot.phoneNumber, captcha },
|
||||
"requesting voice verification for"
|
||||
);
|
||||
|
||||
await this.signald.register(bot.phoneNumber, true, captcha);
|
||||
}
|
||||
|
||||
async requestSMSVerification(bot: Bot, captcha?: string): Promise<void> {
|
||||
this.server.logger.debug(
|
||||
{ number: bot.phoneNumber, captcha },
|
||||
"requesting sms verification for"
|
||||
);
|
||||
await this.signald.register(bot.phoneNumber, false, captcha);
|
||||
}
|
||||
|
||||
private async receiveMessage(message: IncomingMessagev1) {
|
||||
const { account } = message;
|
||||
if (!account) {
|
||||
this.server.logger.debug({ message }, "invalid message received");
|
||||
this.server.logger.error("invalid message received");
|
||||
}
|
||||
|
||||
const bot = await this.server
|
||||
.db()
|
||||
.signalBots.findBy({ phoneNumber: account });
|
||||
if (!bot) {
|
||||
this.server.logger.info("message received for unknown bot", {
|
||||
account,
|
||||
message,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
await this.queueMessage(bot, message);
|
||||
}
|
||||
|
||||
private async getAttachmentInfo(dataMessage: IncomingMessagev1) {
|
||||
if (dataMessage.attachments?.length > 0) {
|
||||
const attachmentInfo = dataMessage.attachments[0];
|
||||
const buffer = await fs.readFile(attachmentInfo.storedFilename);
|
||||
const attachment = buffer.toString("base64");
|
||||
const mimetype = attachmentInfo.contentType ?? "application/octet-stream";
|
||||
const filename = attachmentInfo.customFilename ?? "unknown-filename";
|
||||
|
||||
return { attachment, mimetype, filename };
|
||||
}
|
||||
|
||||
return { attachment: undefined, mimetype: undefined, filename: undefined };
|
||||
}
|
||||
|
||||
private async queueMessage(bot: Bot, message: IncomingMessagev1) {
|
||||
const { timestamp, account, data_message: dataMessage } = message;
|
||||
if (!dataMessage?.body && !dataMessage?.attachments) {
|
||||
this.server.logger.info({ message }, "message received with no content");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!timestamp || !account) {
|
||||
this.server.logger.debug({ message }, "invalid message received");
|
||||
}
|
||||
|
||||
const { attachment, mimetype, filename } = await this.getAttachmentInfo(
|
||||
dataMessage
|
||||
);
|
||||
|
||||
const receivedMessage = {
|
||||
message,
|
||||
botId: bot.id,
|
||||
botPhoneNumber: bot.phoneNumber,
|
||||
attachment,
|
||||
mimetype,
|
||||
filename,
|
||||
};
|
||||
|
||||
workerUtils.addJob("signald-message", receivedMessage, {
|
||||
jobKey: `signal-bot-${bot.id}-${timestamp}`,
|
||||
queueName: `signal-bot-${bot.id}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -1,297 +0,0 @@
|
|||
/* eslint-disable unicorn/no-abusive-eslint-disable */
|
||||
/* eslint-disable */
|
||||
import { Server } from "@hapi/hapi";
|
||||
import { Service } from "@hapipal/schmervice";
|
||||
import { SavedWhatsappBot as Bot } from "@digiresilience/metamigo-db";
|
||||
import makeWASocket, {
|
||||
DisconnectReason,
|
||||
proto,
|
||||
downloadContentFromMessage,
|
||||
MediaType,
|
||||
fetchLatestBaileysVersion,
|
||||
isJidBroadcast,
|
||||
isJidStatusBroadcast,
|
||||
useMultiFileAuthState,
|
||||
} from "@whiskeysockets/baileys";
|
||||
import fs from "fs";
|
||||
import workerUtils from "../../worker-utils.js";
|
||||
|
||||
export type AuthCompleteCallback = (error?: string) => void;
|
||||
|
||||
export default class WhatsappService extends Service {
|
||||
connections: { [key: string]: any; } = {};
|
||||
loginConnections: { [key: string]: any; } = {};
|
||||
|
||||
static browserDescription: [string, string, string] = [
|
||||
"Metamigo",
|
||||
"Chrome",
|
||||
"2.0",
|
||||
];
|
||||
|
||||
constructor(server: Server, options: never) {
|
||||
super(server, options);
|
||||
}
|
||||
|
||||
getAuthDirectory(bot: Bot): string {
|
||||
return `/baileys/${bot.id}`;
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
this.updateConnections();
|
||||
}
|
||||
|
||||
async teardown(): Promise<void> {
|
||||
this.resetConnections();
|
||||
}
|
||||
|
||||
private async sleep(ms: number): Promise<void> {
|
||||
console.log(`pausing ${ms}`);
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
private async resetConnections() {
|
||||
for (const connection of Object.values(this.connections)) {
|
||||
try {
|
||||
connection.end(null);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
this.connections = {};
|
||||
}
|
||||
|
||||
private async createConnection(
|
||||
bot: Bot,
|
||||
server: Server,
|
||||
options: any,
|
||||
authCompleteCallback?: any
|
||||
) {
|
||||
const directory = this.getAuthDirectory(bot);
|
||||
const { state, saveCreds } = await useMultiFileAuthState(directory);
|
||||
const msgRetryCounterMap: any = {};
|
||||
const socket = makeWASocket({
|
||||
...options,
|
||||
auth: state,
|
||||
msgRetryCounterMap,
|
||||
shouldIgnoreJid: (jid) =>
|
||||
isJidBroadcast(jid) || isJidStatusBroadcast(jid),
|
||||
});
|
||||
let pause = 5000;
|
||||
|
||||
socket.ev.process(async (events) => {
|
||||
if (events["connection.update"]) {
|
||||
const update = events["connection.update"];
|
||||
const {
|
||||
connection: connectionState,
|
||||
lastDisconnect,
|
||||
qr,
|
||||
isNewLogin,
|
||||
} = update;
|
||||
if (qr) {
|
||||
console.log("got qr code");
|
||||
await this.server.db().whatsappBots.updateQR(bot, qr);
|
||||
} else if (isNewLogin) {
|
||||
console.log("got new login");
|
||||
await this.server.db().whatsappBots.updateVerified(bot, true);
|
||||
} else if (connectionState === "open") {
|
||||
console.log("opened connection");
|
||||
} else if (connectionState === "close") {
|
||||
console.log("connection closed due to ", lastDisconnect.error);
|
||||
const disconnectStatusCode = (lastDisconnect?.error as any)?.output
|
||||
?.statusCode;
|
||||
|
||||
if (disconnectStatusCode === DisconnectReason.restartRequired) {
|
||||
console.log("reconnecting after got new login");
|
||||
const updatedBot = await this.findById(bot.id);
|
||||
await this.createConnection(updatedBot, server, options);
|
||||
authCompleteCallback?.();
|
||||
} else if (disconnectStatusCode !== DisconnectReason.loggedOut) {
|
||||
console.log("reconnecting");
|
||||
await this.sleep(pause);
|
||||
pause *= 2;
|
||||
this.createConnection(bot, server, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (events["creds.update"]) {
|
||||
console.log("creds update");
|
||||
await saveCreds();
|
||||
}
|
||||
|
||||
if (events["messages.upsert"]) {
|
||||
console.log("messages upsert");
|
||||
const upsert = events["messages.upsert"];
|
||||
const { messages } = upsert;
|
||||
if (messages) {
|
||||
await this.queueUnreadMessages(bot, messages);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this.connections[bot.id] = { socket, msgRetryCounterMap };
|
||||
}
|
||||
|
||||
private async updateConnections() {
|
||||
this.resetConnections();
|
||||
|
||||
const bots = await this.server.db().whatsappBots.findAll();
|
||||
for await (const bot of bots) {
|
||||
if (bot.isVerified) {
|
||||
const { version, isLatest } = await fetchLatestBaileysVersion();
|
||||
console.log(`using WA v${version.join(".")}, isLatest: ${isLatest}`);
|
||||
|
||||
await this.createConnection(bot, this.server, {
|
||||
browser: WhatsappService.browserDescription,
|
||||
printQRInTerminal: false,
|
||||
version,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async queueMessage(bot: Bot, webMessageInfo: proto.IWebMessageInfo) {
|
||||
const {
|
||||
key: { id, fromMe, remoteJid },
|
||||
message,
|
||||
messageTimestamp,
|
||||
} = webMessageInfo;
|
||||
if (!fromMe && message && remoteJid !== "status@broadcast") {
|
||||
const { audioMessage, documentMessage, imageMessage, videoMessage } =
|
||||
message;
|
||||
const isMediaMessage =
|
||||
audioMessage || documentMessage || imageMessage || videoMessage;
|
||||
|
||||
const messageContent = Object.values(message)[0];
|
||||
let messageType: MediaType;
|
||||
let attachment: string;
|
||||
let filename: string;
|
||||
let mimetype: string;
|
||||
if (isMediaMessage) {
|
||||
if (audioMessage) {
|
||||
messageType = "audio";
|
||||
filename = id + "." + audioMessage.mimetype.split("/").pop();
|
||||
mimetype = audioMessage.mimetype;
|
||||
} else if (documentMessage) {
|
||||
messageType = "document";
|
||||
filename = documentMessage.fileName;
|
||||
mimetype = documentMessage.mimetype;
|
||||
} else if (imageMessage) {
|
||||
messageType = "image";
|
||||
filename = id + "." + imageMessage.mimetype.split("/").pop();
|
||||
mimetype = imageMessage.mimetype;
|
||||
} else if (videoMessage) {
|
||||
messageType = "video";
|
||||
filename = id + "." + videoMessage.mimetype.split("/").pop();
|
||||
mimetype = videoMessage.mimetype;
|
||||
}
|
||||
|
||||
const stream = await downloadContentFromMessage(
|
||||
messageContent,
|
||||
messageType
|
||||
);
|
||||
let buffer = Buffer.from([]);
|
||||
for await (const chunk of stream) {
|
||||
buffer = Buffer.concat([buffer, chunk]);
|
||||
}
|
||||
attachment = buffer.toString("base64");
|
||||
}
|
||||
|
||||
if (messageContent || attachment) {
|
||||
const receivedMessage = {
|
||||
waMessageId: id,
|
||||
waMessage: JSON.stringify(webMessageInfo),
|
||||
waTimestamp: new Date((messageTimestamp as number) * 1000),
|
||||
attachment,
|
||||
filename,
|
||||
mimetype,
|
||||
whatsappBotId: bot.id,
|
||||
botPhoneNumber: bot.phoneNumber,
|
||||
};
|
||||
|
||||
workerUtils.addJob("whatsapp-message", receivedMessage, {
|
||||
jobKey: id,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async queueUnreadMessages(
|
||||
bot: Bot,
|
||||
messages: proto.IWebMessageInfo[]
|
||||
) {
|
||||
for await (const message of messages) {
|
||||
await this.queueMessage(bot, message);
|
||||
}
|
||||
}
|
||||
|
||||
async create(
|
||||
phoneNumber: string,
|
||||
description: string,
|
||||
email: string
|
||||
): Promise<Bot> {
|
||||
const db = this.server.db();
|
||||
const user = await db.users.findBy({ email });
|
||||
const row = await db.whatsappBots.insert({
|
||||
phoneNumber,
|
||||
description,
|
||||
userId: user.id,
|
||||
});
|
||||
return row;
|
||||
}
|
||||
|
||||
async unverify(bot: Bot): Promise<Bot> {
|
||||
const directory = this.getAuthDirectory(bot);
|
||||
fs.rmSync(directory, { recursive: true, force: true });
|
||||
return this.server.db().whatsappBots.updateVerified(bot, false);
|
||||
}
|
||||
|
||||
async remove(bot: Bot): Promise<number> {
|
||||
const directory = this.getAuthDirectory(bot);
|
||||
fs.rmSync(directory, { recursive: true, force: true });
|
||||
return this.server.db().whatsappBots.remove(bot);
|
||||
}
|
||||
|
||||
async findAll(): Promise<Bot[]> {
|
||||
return this.server.db().whatsappBots.findAll();
|
||||
}
|
||||
|
||||
async findById(id: string): Promise<Bot> {
|
||||
return this.server.db().whatsappBots.findById({ id });
|
||||
}
|
||||
|
||||
async findByToken(token: string): Promise<Bot> {
|
||||
return this.server.db().whatsappBots.findBy({ token });
|
||||
}
|
||||
|
||||
async register(bot: Bot, callback: AuthCompleteCallback): Promise<void> {
|
||||
const { version } = await fetchLatestBaileysVersion();
|
||||
await this.createConnection(bot, this.server, { version }, callback);
|
||||
}
|
||||
|
||||
async send(bot: Bot, phoneNumber: string, message: string): Promise<void> {
|
||||
const connection = this.connections[bot.id]?.socket;
|
||||
const recipient = `${phoneNumber.replace(/\D+/g, "")}@s.whatsapp.net`;
|
||||
await connection.sendMessage(recipient, { text: message });
|
||||
}
|
||||
|
||||
async receiveSince(bot: Bot, lastReceivedDate: Date): Promise<void> {
|
||||
const connection = this.connections[bot.id]?.socket;
|
||||
const messages = await connection.messagesReceivedAfter(
|
||||
lastReceivedDate,
|
||||
false
|
||||
);
|
||||
for (const message of messages) {
|
||||
this.queueMessage(bot, message);
|
||||
}
|
||||
}
|
||||
|
||||
async receive(
|
||||
bot: Bot,
|
||||
_lastReceivedDate: Date
|
||||
): Promise<proto.IWebMessageInfo[]> {
|
||||
const connection = this.connections[bot.id]?.socket;
|
||||
const messages = await connection.loadAllUnreadMessages();
|
||||
return messages;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
import type { IMain } from "pg-promise";
|
||||
import type { ISettingsService } from "../services/settings";
|
||||
import type WhatsappService from "../services/whatsapp";
|
||||
import type SignaldService from "../services/signald";
|
||||
import type { IAppConfig } from "../../config";
|
||||
import type { AppDatabase } from "@digiresilience/metamigo-db";
|
||||
|
||||
// add your service interfaces here
|
||||
|
||||
// extend the hapi types with our services and config
|
||||
declare module "@hapi/hapi" {
|
||||
export interface Request {
|
||||
db(): AppDatabase;
|
||||
pgp: IMain;
|
||||
}
|
||||
export interface Server {
|
||||
config(): IAppConfig;
|
||||
db(): AppDatabase;
|
||||
pgp: IMain;
|
||||
}
|
||||
}
|
||||
|
||||
declare module "@hapipal/schmervice" {
|
||||
interface AppServices {
|
||||
settingsService: ISettingsService;
|
||||
whatsappService: WhatsappService;
|
||||
signaldService: SignaldService;
|
||||
}
|
||||
|
||||
interface SchmerviceDecorator {
|
||||
(namespace: "app"): AppServices;
|
||||
}
|
||||
type ServiceFunctionalInterface = { name: string };
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
export {
|
||||
default,
|
||||
loadConfig,
|
||||
loadConfigRaw,
|
||||
IAppConfig,
|
||||
IAppConvict,
|
||||
} from "@digiresilience/metamigo-config";
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
export * from "./server/index.js";
|
||||
export * from "./logger.js";
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
import { defState } from "@digiresilience/montar";
|
||||
import { configureLogger } from "@digiresilience/metamigo-common";
|
||||
import config from "@digiresilience/metamigo-config";
|
||||
|
||||
export const logger = defState("apiLogger", {
|
||||
start: async () => configureLogger(config),
|
||||
});
|
||||
export default logger;
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
import { startWithout } from "@digiresilience/montar";
|
||||
import "./index.js";
|
||||
|
||||
async function runServer(): Promise<void> {
|
||||
await startWithout(["worker"]);
|
||||
}
|
||||
|
||||
runServer();
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
import * as Metamigo from "@digiresilience/metamigo-common";
|
||||
import { defState } from "@digiresilience/montar";
|
||||
import Manifest from "./manifest.js";
|
||||
import config, { IAppConfig } from "../config.js";
|
||||
|
||||
export const deployment = async (
|
||||
config: IAppConfig,
|
||||
start = false
|
||||
): Promise<Metamigo.Server> => {
|
||||
// Build the manifest, which describes all the plugins needed for our application server
|
||||
const manifest = await Manifest.build(config);
|
||||
|
||||
// Create the server and optionally start it
|
||||
const server = Metamigo.deployment(manifest, config, start);
|
||||
|
||||
return server;
|
||||
};
|
||||
|
||||
export const stopDeployment = async (server: Metamigo.Server): Promise<void> =>
|
||||
Metamigo.stopDeployment(server);
|
||||
|
||||
const server = defState("server", {
|
||||
start: () => deployment(config, true),
|
||||
stop: () => stopDeployment(server),
|
||||
});
|
||||
|
||||
export default server;
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
import * as Glue from "@hapi/glue";
|
||||
import * as Metamigo from "@digiresilience/metamigo-common";
|
||||
import * as Blipp from "blipp";
|
||||
import HapiBasic from "@hapi/basic";
|
||||
import AppPlugin from "../app/index.js";
|
||||
import type { IAppConfig } from "../config.js";
|
||||
|
||||
const build = async (config: IAppConfig): Promise<Glue.Manifest> => {
|
||||
const { port, address } = config.server;
|
||||
const metamigoPlugins = Metamigo.defaultPlugins(config);
|
||||
return {
|
||||
server: {
|
||||
port,
|
||||
address,
|
||||
debug: false, // We use pino not the built-in hapi logger
|
||||
routes: {
|
||||
validate: {
|
||||
failAction: Metamigo.validatingFailAction,
|
||||
},
|
||||
},
|
||||
},
|
||||
register: {
|
||||
plugins: [
|
||||
// Blipp prints the nicely formatted list of endpoints at app boot
|
||||
{ plugin: Blipp },
|
||||
|
||||
// load the metamigo base plugins
|
||||
...metamigoPlugins,
|
||||
|
||||
// basic authentication, required by hapi-nextauth
|
||||
{ plugin: HapiBasic },
|
||||
|
||||
// load our main app
|
||||
{
|
||||
plugin: AppPlugin,
|
||||
options: {
|
||||
config,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const Manifest = {
|
||||
build,
|
||||
};
|
||||
|
||||
export default Manifest;
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
import * as Worker from "graphile-worker";
|
||||
import { defState } from "@digiresilience/montar";
|
||||
import config from "./config.js";
|
||||
|
||||
const startWorkerUtils = async (): Promise<Worker.WorkerUtils> => {
|
||||
const workerUtils = await Worker.makeWorkerUtils({
|
||||
connectionString: config.worker.connection,
|
||||
});
|
||||
return workerUtils;
|
||||
};
|
||||
|
||||
const stopWorkerUtils = async (): Promise<void> => workerUtils.release();
|
||||
|
||||
const workerUtils = defState("apiWorkerUtils", {
|
||||
start: startWorkerUtils,
|
||||
stop: stopWorkerUtils,
|
||||
});
|
||||
|
||||
export default workerUtils;
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
{
|
||||
"extends": "tsconfig-link",
|
||||
"compilerOptions": {
|
||||
"outDir": "build/main",
|
||||
"rootDir": "src",
|
||||
"skipLibCheck": true,
|
||||
"types": ["jest", "node", "long"],
|
||||
"lib": ["es2020", "DOM"],
|
||||
"composite": true,
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/.*.ts"],
|
||||
"exclude": ["node_modules/**"],
|
||||
"references": [
|
||||
{"path": "../../packages/metamigo-common" },
|
||||
{"path": "../../packages/metamigo-config" },
|
||||
{"path": "../../packages/metamigo-db" },
|
||||
{"path": "../../packages/hapi-nextauth" },
|
||||
{"path": "../../packages/hapi-pg-promise" },
|
||||
{"path": "../../packages/node-signald" },
|
||||
{"path": "../../packages/montar" }
|
||||
]
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
require("eslint-config-link/patch/modern-module-resolution");
|
||||
module.exports = {
|
||||
extends: [
|
||||
"eslint-config-link/profile/node",
|
||||
"eslint-config-link/profile/typescript",
|
||||
"eslint-config-link/profile/jest",
|
||||
],
|
||||
parserOptions: { tsconfigRootDir: __dirname },
|
||||
rules: {
|
||||
"new-cap": "off"
|
||||
},
|
||||
};
|
||||
|
|
@ -1,53 +0,0 @@
|
|||
FROM node:20 as base
|
||||
|
||||
FROM base AS builder
|
||||
ARG APP_DIR=/opt/metamigo-cli
|
||||
RUN mkdir -p ${APP_DIR}/
|
||||
RUN npm i -g turbo
|
||||
WORKDIR ${APP_DIR}
|
||||
COPY . .
|
||||
RUN turbo prune --scope=@digiresilience/metamigo-cli --docker
|
||||
|
||||
FROM base AS installer
|
||||
ARG APP_DIR=/opt/metamigo-cli
|
||||
WORKDIR ${APP_DIR}
|
||||
COPY .gitignore .gitignore
|
||||
COPY --from=builder ${APP_DIR}/out/json/ .
|
||||
COPY --from=builder ${APP_DIR}/out/package-lock.json ./package-lock.json
|
||||
RUN npm i
|
||||
|
||||
COPY --from=builder ${APP_DIR}/out/full/ .
|
||||
RUN npm i -g turbo
|
||||
RUN turbo run build --filter=metamigo-cli
|
||||
|
||||
FROM base AS runner
|
||||
ARG APP_DIR=/opt/metamigo-cli
|
||||
WORKDIR ${APP_DIR}/
|
||||
ARG BUILD_DATE
|
||||
ARG VERSION
|
||||
LABEL maintainer="Darren Clarke <darren@redaranj.com>"
|
||||
LABEL org.label-schema.build-date=$BUILD_DATE
|
||||
LABEL org.label-schema.version=$VERSION
|
||||
ENV APP_DIR ${APP_DIR}
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
dumb-init
|
||||
RUN mkdir -p ${APP_DIR}
|
||||
RUN chown -R node ${APP_DIR}/
|
||||
|
||||
USER node
|
||||
WORKDIR ${APP_DIR}
|
||||
COPY --from=installer ${APP_DIR}/node_modules/ ./node_modules/
|
||||
COPY --from=installer ${APP_DIR}/packages/ ./packages/
|
||||
COPY --from=installer ${APP_DIR}/apps/metamigo-cli/ ./apps/metamigo-cli/
|
||||
COPY --from=installer ${APP_DIR}/apps/metamigo-api/ ./apps/metamigo-api/
|
||||
COPY --from=installer ${APP_DIR}/apps/metamigo-worker/ ./apps/metamigo-worker/
|
||||
COPY --from=installer ${APP_DIR}/package.json ./package.json
|
||||
USER root
|
||||
WORKDIR ${APP_DIR}/apps/metamigo-cli/
|
||||
RUN chmod +x docker-entrypoint.sh
|
||||
USER node
|
||||
EXPOSE 3000
|
||||
ENV PORT 3000
|
||||
ENV NODE_ENV production
|
||||
ENTRYPOINT ["/opt/metamigo-cli/apps/metamigo-cli/docker-entrypoint.sh"]
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
{
|
||||
"presets": ["babel-preset-link"]
|
||||
}
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
|
||||
node ./build/main/index.js ${@}
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
if [[ "$1" == "api" ]]; then
|
||||
echo "docker-entrypoint: starting api server"
|
||||
./cli db -- migrate
|
||||
exec dumb-init ./cli api
|
||||
elif [[ "$1" == "worker" ]]; then
|
||||
echo "docker-entrypoint: starting worker"
|
||||
exec dumb-init ./cli worker
|
||||
elif [[ "$1" == "cli" ]]; then
|
||||
echo "docker-entrypoint: starting cli"
|
||||
shift 1
|
||||
exec ./cli "$@"
|
||||
else
|
||||
echo "docker-entrypoint: missing argument, one of: api, worker, cli"
|
||||
exit 1
|
||||
fi
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
{
|
||||
"preset": "jest-config-link",
|
||||
"setupFiles": ["<rootDir>/src/setup.test.ts"]
|
||||
}
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
{
|
||||
"name": "@digiresilience/metamigo-cli",
|
||||
"version": "0.2.0",
|
||||
"main": "build/main/index.js",
|
||||
"author": "Abel Luck <abel@guardianproject.info>",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"metamigo": "./build/main/index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@digiresilience/montar": "*",
|
||||
"@digiresilience/metamigo-config": "*",
|
||||
"@digiresilience/metamigo-common": "*",
|
||||
"@digiresilience/metamigo-db": "*",
|
||||
"@digiresilience/metamigo-api": "*",
|
||||
"@digiresilience/metamigo-worker": "*",
|
||||
"commander": "^12.0.0",
|
||||
"graphile-migrate": "^1.4.1",
|
||||
"graphile-worker": "^0.13.0",
|
||||
"node-jose": "^2.2.0",
|
||||
"postgraphile": "4.13.0",
|
||||
"graphql": "15.8.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.5.12",
|
||||
"pino-pretty": "^10.3.1",
|
||||
"nodemon": "^3.1.0",
|
||||
"tsconfig-link": "*",
|
||||
"eslint-config-link": "*",
|
||||
"jest-config-link": "*",
|
||||
"babel-preset-link": "*",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"scripts": {
|
||||
"migrate": "NODE_ENV=development node --unhandled-rejections=strict build/main/index.js db -- migrate",
|
||||
"build": "tsc -p tsconfig.json",
|
||||
"fix:lint": "eslint src --ext .ts --fix",
|
||||
"fmt": "prettier \"src/**/*.ts\" --write",
|
||||
"lint": "eslint src --ext .ts && prettier \"src/**/*.ts\" --list-different",
|
||||
"test": "echo no tests"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
import {
|
||||
generateConfig,
|
||||
printConfigOptions,
|
||||
} from "@digiresilience/metamigo-common";
|
||||
import { IAppConfig, IAppConvict } from "@digiresilience/metamigo-config";
|
||||
import { loadConfigRaw } from "@digiresilience/metamigo-config";
|
||||
|
||||
export const genConf = async (): Promise<void> => {
|
||||
const c = (await loadConfigRaw()) as any;
|
||||
const generated = generateConfig(c) as any;
|
||||
console.log(generated);
|
||||
};
|
||||
|
||||
export const genSchema = async (): Promise<void> => {
|
||||
const c: any = await loadConfigRaw();
|
||||
console.log(c.getSchemaString());
|
||||
};
|
||||
|
||||
export const listConfig = async (): Promise<void> => {
|
||||
const c = (await loadConfigRaw()) as any;
|
||||
printConfigOptions(c);
|
||||
};
|
||||
|
|
@ -1,66 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
import { Command } from "commander";
|
||||
import { startWithout } from "@digiresilience/montar";
|
||||
import { migrateWrapper } from "@digiresilience/metamigo-db";
|
||||
import { loadConfig } from "@digiresilience/metamigo-config";
|
||||
import { genConf, listConfig } from "./config.js";
|
||||
import { createTokenForTesting, generateJwks } from "./jwks.js";
|
||||
import { exportGraphqlSchema } from "./metamigo-postgraphile.js";
|
||||
import "@digiresilience/metamigo-api";
|
||||
import "@digiresilience/metamigo-worker";
|
||||
|
||||
const program = new Command();
|
||||
|
||||
export async function runServer(): Promise<void> {
|
||||
await startWithout(["worker"]);
|
||||
}
|
||||
|
||||
export async function runWorker(): Promise<void> {
|
||||
await startWithout(["server"]);
|
||||
}
|
||||
|
||||
program
|
||||
.command("config-generate")
|
||||
.description("Generate a sample JSON configuration file (to stdout)")
|
||||
.action(genConf);
|
||||
|
||||
program
|
||||
.command("config-help")
|
||||
.description("Prints the entire convict config ")
|
||||
.action(listConfig);
|
||||
|
||||
program
|
||||
.command("api")
|
||||
.description("Run the application api server")
|
||||
.action(runServer);
|
||||
|
||||
program
|
||||
.command("worker")
|
||||
.description("Run the worker to process jobs")
|
||||
.action(runWorker);
|
||||
|
||||
program
|
||||
.command("db <commands...>")
|
||||
.description("Run graphile-migrate commands with your app's config loaded.")
|
||||
.action(async (args) => {
|
||||
const config = await loadConfig();
|
||||
return migrateWrapper(args, config);
|
||||
});
|
||||
|
||||
program
|
||||
.command("gen-jwks")
|
||||
.description("Generate the JWKS")
|
||||
.action(generateJwks);
|
||||
|
||||
program
|
||||
.command("gen-testing-jwt")
|
||||
.description("Generate a JWT for the test suite")
|
||||
.action(createTokenForTesting);
|
||||
|
||||
program
|
||||
.command("export-graphql-schema")
|
||||
.description("Export the graphql schema")
|
||||
.action(exportGraphqlSchema);
|
||||
|
||||
program.parse(process.argv);
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
import jose from "node-jose";
|
||||
import * as jwt from "jsonwebtoken";
|
||||
|
||||
const generateKeystore = async () => {
|
||||
const keystore = jose.JWK.createKeyStore();
|
||||
await keystore.generate("oct", 256, {
|
||||
alg: "A256GCM",
|
||||
use: "enc",
|
||||
});
|
||||
await keystore.generate("oct", 256, {
|
||||
alg: "HS512",
|
||||
use: "sig",
|
||||
});
|
||||
return keystore;
|
||||
};
|
||||
|
||||
const safeString = (input) =>
|
||||
Buffer.from(JSON.stringify(input)).toString("base64");
|
||||
|
||||
const stringify = (v) => JSON.stringify(v, undefined, 2);
|
||||
|
||||
const _generateJwks = async () => {
|
||||
const keystore = await generateKeystore();
|
||||
const encryption = keystore.all({ use: "enc" })[0].toJSON(true);
|
||||
const signing = keystore.all({ use: "sig" })[0].toJSON(true);
|
||||
|
||||
return {
|
||||
nextAuth: {
|
||||
signingKeyB64: safeString(signing),
|
||||
encryptionKeyB64: safeString(encryption),
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export const generateJwks = async (): Promise<void> => {
|
||||
console.log(stringify(await _generateJwks()));
|
||||
};
|
||||
|
||||
export const createTokenForTesting = async (): Promise<void> => {
|
||||
const keys = await _generateJwks();
|
||||
const signingKey = Buffer.from(
|
||||
JSON.parse(
|
||||
Buffer.from(keys.nextAuth.signingKeyB64, "base64").toString("utf-8")
|
||||
).k,
|
||||
"base64"
|
||||
);
|
||||
|
||||
const token = jwt.sign(
|
||||
{
|
||||
iss: "Test Env",
|
||||
iat: 1606893960,
|
||||
aud: "metamigo",
|
||||
sub: "abel@guardianproject.info",
|
||||
name: "Abel Luck",
|
||||
email: "abel@guardianproject.info",
|
||||
userRole: "admin",
|
||||
},
|
||||
signingKey,
|
||||
{ expiresIn: "100y", algorithm: "HS512" }
|
||||
);
|
||||
console.log("CONFIG");
|
||||
console.log(stringify(keys));
|
||||
console.log();
|
||||
console.log("TOKEN");
|
||||
console.log(token);
|
||||
console.log();
|
||||
};
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
import { writeFileSync } from "node:fs";
|
||||
import {
|
||||
getIntrospectionQuery,
|
||||
GraphQLSchema,
|
||||
graphqlSync,
|
||||
lexicographicSortSchema,
|
||||
printSchema,
|
||||
} from "graphql";
|
||||
import { createPostGraphileSchema } from "postgraphile";
|
||||
import pg from "pg";
|
||||
import { loadConfig } from "@digiresilience/metamigo-config";
|
||||
import { getPostGraphileOptions } from "@digiresilience/metamigo-db";
|
||||
|
||||
const { Pool } = pg;
|
||||
|
||||
export const exportGraphqlSchema = async (): Promise<void> => {
|
||||
const config = await loadConfig();
|
||||
|
||||
const rootPgPool = new Pool({
|
||||
connectionString: config.db.connection,
|
||||
});
|
||||
const exportSchema = `../../data/schema.graphql`;
|
||||
const exportJson = `../../frontend/lib/graphql-schema.json`;
|
||||
try {
|
||||
const schema = (await createPostGraphileSchema(
|
||||
config.postgraphile.authConnection,
|
||||
"app_public",
|
||||
getPostGraphileOptions()
|
||||
)) as unknown as GraphQLSchema;
|
||||
const sorted = lexicographicSortSchema(schema);
|
||||
const json = graphqlSync({ schema, source: getIntrospectionQuery() });
|
||||
writeFileSync(exportSchema, printSchema(sorted));
|
||||
writeFileSync(exportJson, JSON.stringify(json));
|
||||
|
||||
console.log(`GraphQL schema exported to ${exportSchema}`);
|
||||
console.log(`GraphQL schema json exported to ${exportJson}`);
|
||||
} finally {
|
||||
rootPgPool.end();
|
||||
}
|
||||
};
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
{
|
||||
"extends": "tsconfig-link",
|
||||
"compilerOptions": {
|
||||
"incremental": true,
|
||||
"outDir": "build/main",
|
||||
"rootDir": "src",
|
||||
"baseUrl": "./",
|
||||
"skipLibCheck": true,
|
||||
"types": ["jest", "node"],
|
||||
"esModuleInterop": true
|
||||
},
|
||||
"include": ["src/**/*.ts"],
|
||||
"exclude": ["node_modules/**"]
|
||||
}
|
||||
|
|
@ -13,20 +13,20 @@
|
|||
"@emotion/react": "^11.11.4",
|
||||
"@emotion/server": "^11.11.0",
|
||||
"@emotion/styled": "^11.11.0",
|
||||
"@fontsource/playfair-display": "^5.0.21",
|
||||
"@fontsource/playfair-display": "^5.0.23",
|
||||
"@fontsource/poppins": "^5.0.12",
|
||||
"@fontsource/roboto": "^5.0.12",
|
||||
"@mui/icons-material": "^5",
|
||||
"@mui/lab": "^5.0.0-alpha.167",
|
||||
"@mui/lab": "^5.0.0-alpha.168",
|
||||
"@mui/material": "^5",
|
||||
"@mui/x-data-grid-pro": "^6.19.6",
|
||||
"@mui/x-date-pickers-pro": "^6.19.6",
|
||||
"date-fns": "^3.3.1",
|
||||
"@mui/x-date-pickers-pro": "^6.19.7",
|
||||
"date-fns": "^3.5.0",
|
||||
"leafcutter-common": "*",
|
||||
"material-ui-popup-state": "^5.0.10",
|
||||
"mui-chips-input": "^2.1.4",
|
||||
"next": "14.1.2",
|
||||
"next-auth": "^4.24.6",
|
||||
"next": "14.1.3",
|
||||
"next-auth": "^4.24.7",
|
||||
"react": "18.2.0",
|
||||
"react-cookie": "^7.1.0",
|
||||
"react-digit-input": "^2.1.0",
|
||||
|
|
@ -42,6 +42,6 @@
|
|||
"@types/react": "^18",
|
||||
"@types/react-dom": "^18",
|
||||
"eslint": "^8",
|
||||
"eslint-config-next": "14.1.2"
|
||||
"eslint-config-next": "14.1.3"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,8 +14,8 @@
|
|||
"html-to-text": "^9.0.5",
|
||||
"node-fetch": "^3",
|
||||
"pg-promise": "^11.5.4",
|
||||
"remeda": "^1.46.2",
|
||||
"twilio": "^4.23.0"
|
||||
"remeda": "^1.50.1",
|
||||
"twilio": "^5.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "7.24.0",
|
||||
|
|
@ -31,8 +31,8 @@
|
|||
"pino-pretty": "^10.3.1",
|
||||
"prettier": "^3.2.5",
|
||||
"ts-node": "^10.9.2",
|
||||
"typedoc": "^0.25.11",
|
||||
"typescript": "^5.3.3"
|
||||
"typedoc": "^0.25.12",
|
||||
"typescript": "^5.4.2"
|
||||
},
|
||||
"nodemonConfig": {
|
||||
"ignore": [
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"extends": "tsconfig-link",
|
||||
"extends": "tsconfig",
|
||||
"compilerOptions": {
|
||||
"outDir": "build/main",
|
||||
"esModuleInterop": true,
|
||||
|
|
|
|||
|
|
@ -1,4 +0,0 @@
|
|||
{
|
||||
"image": "mcr.microsoft.com/devcontainers/typescript-node:0-20",
|
||||
"forwardPorts": [3000]
|
||||
}
|
||||
|
|
@ -3,7 +3,7 @@ version: "3.4"
|
|||
services:
|
||||
label-studio:
|
||||
container_name: label-studio
|
||||
build: ./docker/label-studio
|
||||
build: ../label-studio
|
||||
restart: ${RESTART}
|
||||
ports:
|
||||
- 8007:8080
|
||||
|
|
@ -5,8 +5,8 @@ services:
|
|||
container_name: leafcutter
|
||||
restart: ${RESTART}
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./apps/leafcutter/Dockerfile
|
||||
context: ../../
|
||||
dockerfile: ../../apps/leafcutter/Dockerfile
|
||||
image: registry.gitlab.com/digiresilience/link/link-stack/leafcutter:${LINK_STACK_VERSION}
|
||||
expose:
|
||||
- "3000"
|
||||
|
|
@ -5,8 +5,8 @@ services:
|
|||
container_name: link
|
||||
restart: ${RESTART}
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./apps/link/Dockerfile
|
||||
context: ../../
|
||||
dockerfile: ../../apps/link/Dockerfile
|
||||
image: registry.gitlab.com/digiresilience/link/link-stack/link:${LINK_STACK_VERSION}
|
||||
expose:
|
||||
- "3000"
|
||||
|
|
@ -33,7 +33,7 @@ x-metamigo-vars:
|
|||
|
||||
services:
|
||||
metamigo-postgresql:
|
||||
build: ./docker/postgresql
|
||||
build: ../postgresql
|
||||
image: registry.gitlab.com/digiresilience/link/link-stack/postgresql:${LINK_STACK_VERSION}
|
||||
container_name: metamigo-postgresql
|
||||
restart: ${RESTART}
|
||||
|
|
@ -3,7 +3,7 @@ version: "3.4"
|
|||
services:
|
||||
nginx-proxy:
|
||||
container_name: nginx-proxy
|
||||
build: ./docker/nginx-proxy
|
||||
build: ../nginx-proxy
|
||||
restart: ${RESTART}
|
||||
ports:
|
||||
- "8080:80"
|
||||
|
|
@ -3,15 +3,22 @@ version: "3.4"
|
|||
services:
|
||||
opensearch:
|
||||
container_name: opensearch
|
||||
build: ./docker/opensearch
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: ../../opensearch
|
||||
image: registry.gitlab.com/digiresilience/link/link-stack/opensearch:${LINK_STACK_VERSION}
|
||||
restart: ${RESTART}
|
||||
environment:
|
||||
- discovery.type=single-node
|
||||
# - plugins.security.ssl.transport.enforce_hostname_verification=false
|
||||
# - plugins.security.ssl.transport.resolve_hostname=false
|
||||
- cluster.routing.allocation.disk.watermark.low=3gb
|
||||
- cluster.routing.allocation.disk.watermark.high=2gb
|
||||
- cluster.routing.allocation.disk.watermark.flood_stage=500mb
|
||||
- cluster.info.update.interval=1m
|
||||
# - config.dynamic.http.xff.enabled=true
|
||||
# - config.dynamic.http.xff.remoteIpHeader="x-forwarded-for"
|
||||
# - config.dynamic.http.xff.internalProxies=".*"
|
||||
- node.name=opensearch-node1
|
||||
- bootstrap.memory_lock=true
|
||||
- "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m"
|
||||
|
|
@ -26,21 +33,30 @@ services:
|
|||
hard: 65536
|
||||
volumes:
|
||||
- opensearch-data:/usr/share/opensearch/data
|
||||
- ../opensearch/config.yml:/usr/share/opensearch/config/opensearch-security/config.yml
|
||||
ports:
|
||||
- 9200:9200
|
||||
- 9600:9600
|
||||
|
||||
opensearch-dashboards:
|
||||
container_name: opensearch-dashboards
|
||||
build: ./docker/opensearch-dashboards
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: ../../opensearch-dashboards
|
||||
image: registry.gitlab.com/digiresilience/link/link-stack/opensearch-dashboards:${LINK_STACK_VERSION}
|
||||
restart: ${RESTART}
|
||||
ports:
|
||||
- 5601:5601
|
||||
expose:
|
||||
- "5601"
|
||||
volumes:
|
||||
- ../opensearch-dashboards/opensearch_dashboards.yml:/usr/share/opensearch-dashboards/config/opensearch_dashboards.yml
|
||||
environment:
|
||||
OPENSEARCH_HOSTS: '["https://opensearch:9200"]'
|
||||
# OPENSEARCH_SECURITY_AUTH_TYPE: "proxy"
|
||||
# OPENSEARCH_SECURITY_PROXYCACHE_USER_HEADER: "x-proxy-user"
|
||||
# OPENSEARCH_SECURITY_PROXYCACHE_ROLES_HEADER: "x-proxy-roles"
|
||||
# OPENSEARCH_REQUESTHEADERSALLOWLIST: '["securitytenant","Authorization","x-forwarded-for","x-proxy-user","x-proxy-roles"]'
|
||||
|
||||
volumes:
|
||||
opensearch-data:
|
||||
|
|
@ -16,8 +16,8 @@ x-zammad-vars:
|
|||
ELASTICSEARCH_HOST: ${OPENSEARCH_HOST}
|
||||
ELASTICSEARCH_USER: ${OPENSEARCH_USER}
|
||||
ELASTICSEARCH_PASS: ${OPENSEARCH_PASS}
|
||||
ELASTICSEARCH_SSL_VERIFY: false
|
||||
ELASTICSEARCH_SCHEMA: https
|
||||
ELASTICSEARCH_SSL_VERIFY: false # this doesn't set es_ssl_verify as expected, but ideally it would
|
||||
ELASTICSEARCH_SCHEMA: "https"
|
||||
|
||||
services:
|
||||
zammad-init:
|
||||
|
|
@ -31,7 +31,7 @@ services:
|
|||
POSTGRESQL_USER: zammad
|
||||
POSTGRESQL_PASS: ${ZAMMAD_DATABASE_PASSWORD}
|
||||
build:
|
||||
context: ./docker/zammad
|
||||
context: ../zammad
|
||||
args:
|
||||
EMBEDDED: "true"
|
||||
image: registry.gitlab.com/digiresilience/link/link-stack/zammad:${LINK_STACK_VERSION}
|
||||
|
|
@ -44,7 +44,7 @@ services:
|
|||
zammad-memcached:
|
||||
container_name: zammad-memcached
|
||||
command: memcached -m 256M
|
||||
build: ./docker/memcached
|
||||
build: ../memcached
|
||||
image: registry.gitlab.com/digiresilience/link/link-stack/memcached:${LINK_STACK_VERSION}
|
||||
restart: ${RESTART}
|
||||
environment:
|
||||
|
|
@ -61,7 +61,7 @@ services:
|
|||
depends_on:
|
||||
- zammad-railsserver
|
||||
build:
|
||||
context: ./docker/zammad
|
||||
context: ../zammad
|
||||
args:
|
||||
EMBEDDED: "true"
|
||||
image: registry.gitlab.com/digiresilience/link/link-stack/zammad:${LINK_STACK_VERSION}
|
||||
|
|
@ -81,7 +81,7 @@ services:
|
|||
<<: [ *common-global-variables, *common-zammad-variables ]
|
||||
POSTGRES_USER: zammad
|
||||
POSTGRES_PASSWORD: ${ZAMMAD_DATABASE_PASSWORD}
|
||||
build: ./docker/postgresql
|
||||
build: ../postgresql
|
||||
image: registry.gitlab.com/digiresilience/link/link-stack/postgresql:${LINK_STACK_VERSION}
|
||||
restart: ${RESTART}
|
||||
ports:
|
||||
|
|
@ -101,7 +101,7 @@ services:
|
|||
<<: [ *common-global-variables, *common-zammad-variables ]
|
||||
RAILS_RELATIVE_URL_ROOT: /zammad
|
||||
build:
|
||||
context: ./docker/zammad
|
||||
context: ../zammad
|
||||
args:
|
||||
EMBEDDED: "true"
|
||||
image: registry.gitlab.com/digiresilience/link/link-stack/zammad:${LINK_STACK_VERSION}
|
||||
|
|
@ -112,7 +112,7 @@ services:
|
|||
|
||||
zammad-redis:
|
||||
container_name: zammad-redis
|
||||
build: ./docker/redis
|
||||
build: ../redis
|
||||
image: registry.gitlab.com/digiresilience/link/link-stack/redis:${LINK_STACK_VERSION}
|
||||
restart: ${RESTART}
|
||||
environment:
|
||||
|
|
@ -131,7 +131,7 @@ services:
|
|||
environment:
|
||||
<<: [ *common-global-variables, *common-zammad-variables ]
|
||||
build:
|
||||
context: ./docker/zammad
|
||||
context: ../zammad
|
||||
args:
|
||||
EMBEDDED: "true"
|
||||
image: registry.gitlab.com/digiresilience/link/link-stack/zammad:${LINK_STACK_VERSION}
|
||||
|
|
@ -151,7 +151,7 @@ services:
|
|||
environment:
|
||||
<<: [ *common-global-variables, *common-zammad-variables ]
|
||||
build:
|
||||
context: ./docker/zammad
|
||||
context: ../zammad
|
||||
args:
|
||||
EMBEDDED: "true"
|
||||
image: registry.gitlab.com/digiresilience/link/link-stack/zammad:${LINK_STACK_VERSION}
|
||||
|
|
@ -161,8 +161,6 @@ services:
|
|||
- zammad-storage:/opt/zammad/storage
|
||||
|
||||
volumes:
|
||||
elasticsearch-data:
|
||||
driver: local
|
||||
opensearch-data:
|
||||
driver: local
|
||||
postgresql-data:
|
||||
|
|
@ -1 +0,0 @@
|
|||
FROM docker.elastic.co/elasticsearch/elasticsearch:8.12.2
|
||||
|
|
@ -1 +0,0 @@
|
|||
FROM heartexlabs/label-studio:1.11.0
|
||||
15
docker/opensearch-dashboards/opensearch_dashboards.yml
Normal file
15
docker/opensearch-dashboards/opensearch_dashboards.yml
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
opensearch.hosts: [https://opensearch:9200]
|
||||
opensearch.ssl.verificationMode: none
|
||||
opensearch.username: kibanaserver
|
||||
opensearch.password: kibanaserver
|
||||
opensearch.requestHeadersAllowlist: ["securitytenant","Authorization","x-forwarded-for","x-proxy-user","x-proxy-roles"]
|
||||
opensearch_security.auth.type: "proxy"
|
||||
opensearch_security.proxycache.user_header: "x-proxy-user"
|
||||
opensearch_security.proxycache.roles_header: "x-proxy-roles"
|
||||
|
||||
opensearch_security.multitenancy.enabled: true
|
||||
opensearch_security.multitenancy.tenants.preferred: [Private, Global]
|
||||
opensearch_security.readonly_mode.roles: [kibana_read_only]
|
||||
# Use this setting if you are running opensearch-dashboards without https
|
||||
opensearch_security.cookie.secure: false
|
||||
server.host: '0.0.0.0'
|
||||
36
docker/opensearch/config.yml
Normal file
36
docker/opensearch/config.yml
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
_meta:
|
||||
type: "config"
|
||||
config_version: 2
|
||||
|
||||
config:
|
||||
dynamic:
|
||||
http:
|
||||
anonymous_auth_enabled: false
|
||||
xff:
|
||||
enabled: true
|
||||
remoteIpHeader: "x-forwarded-for"
|
||||
internalProxies: ".*"
|
||||
authc:
|
||||
basic_internal_auth_domain:
|
||||
description: "Authenticate via HTTP Basic against internal users database"
|
||||
http_enabled: true
|
||||
transport_enabled: true
|
||||
order: 4
|
||||
http_authenticator:
|
||||
type: basic
|
||||
challenge: true
|
||||
authentication_backend:
|
||||
type: intern
|
||||
proxy_auth_domain:
|
||||
description: "Authenticate via proxy"
|
||||
http_enabled: true
|
||||
transport_enabled: true
|
||||
order: 0
|
||||
http_authenticator:
|
||||
type: proxy
|
||||
challenge: false
|
||||
config:
|
||||
user_header: "x-proxy-user"
|
||||
roles_header: "x-proxy-roles"
|
||||
authentication_backend:
|
||||
type: noop
|
||||
1
docker/signal-cli-rest-api/Dockerfile
Normal file
1
docker/signal-cli-rest-api/Dockerfile
Normal file
|
|
@ -0,0 +1 @@
|
|||
FROM bbernhard/signal-cli-rest-api:0.81
|
||||
|
|
@ -1 +0,0 @@
|
|||
FROM signald/signald:0.23.2
|
||||
24664
package-lock.json
generated
24664
package-lock.json
generated
File diff suppressed because it is too large
Load diff
50
package.json
50
package.json
|
|
@ -9,31 +9,31 @@
|
|||
"dev:metamigo": "dotenv -- turbo run dev --concurrency 30 --filter=!link --filter=!leafcutter",
|
||||
"migrate": "dotenv -- npm run migrate --workspace=@digiresilience/metamigo-cli",
|
||||
"fmt": "turbo run fmt",
|
||||
"docker:all:up": "CURRENT_UID=$(CURRENT_UID) docker compose -f docker-compose.zammad.yml -f docker-compose.metamigo-postgresql.yml -f docker-compose.metamigo.yml -f docker-compose.opensearch.yml -f docker-compose.leafcutter.yml -f docker-compose.link.yml -f docker-compose.label-studio.yml up -d",
|
||||
"docker:all:down": "docker compose -f docker-compose.zammad.yml -f docker-compose.metamigo-postgresql.yml -f docker-compose.metamigo.yml -f docker-compose.opensearch.yml -f docker-compose.leafcutter.yml -f docker-compose.link.yml down",
|
||||
"docker:all:build": "docker compose -f docker-compose.zammad.yml -f docker-compose.metamigo-postgresql.yml -f docker-compose.metamigo.yml -f docker-compose.opensearch.yml -f docker-compose.leafcutter.yml -f docker-compose.link.yml up --build -d",
|
||||
"docker:link:dev:up": "docker compose -f docker-compose.metamigo-postgresql.yml -f docker-compose.metamigo.yml -f docker-compose.zammad.yml -f docker-compose.label-studio.yml up -d",
|
||||
"docker:link:dev:down": "docker compose -f docker-compose.metamigo-postgresql.yml -f docker-compose.metamigo.yml -f docker-compose.zammad.yml -f docker-compose.label-studio.yml down",
|
||||
"docker:link:up": "docker compose -f docker-compose.metamigo-postgresql.yml -f docker-compose.metamigo.yml -f docker-compose.zammad.yml -f docker-compose.link.yml up -d",
|
||||
"docker:link:down": "docker compose -f docker-compose.metamigo-postgresql.yml -f docker-compose.metamigo.yml -f docker-compose.zammad.yml -f docker-compose.link.yml down",
|
||||
"docker:link:build": "docker compose -f docker-compose.link.yml up --build -d",
|
||||
"docker:opensearch:up": "docker compose -f docker-compose.opensearch.yml up -d",
|
||||
"docker:opensearch:down": "docker compose -f docker-compose.opensearch.yml down",
|
||||
"docker:opensearch:build": "docker compose -f docker-compose.opensearch.yml up --build -d",
|
||||
"docker:leafcutter:dev:up": "docker compose -f docker-compose.opensearch.yml up -d",
|
||||
"docker:leafcutter:dev:down": "docker compose -f docker-compose.opensearch.yml down",
|
||||
"docker:leafcutter:up": "docker compose -f docker-compose.opensearch.yml -f docker-compose.leafcutter.yml up -d",
|
||||
"docker:leafcutter:down": "docker compose -f docker-compose.opensearch.yml -f docker-compose.leafcutter.yml down",
|
||||
"docker:leafcutter:build": "docker compose -f docker-compose.leafcutter.yml up --build -d",
|
||||
"docker:zammad:up": "docker compose -f docker-compose.zammad.yml -f docker-compose.opensearch.yml up -d",
|
||||
"docker:zammad:down": "docker compose -f docker-compose.zammad.yml -f docker-compose.opensearch.yml down",
|
||||
"docker:zammad:build": "docker compose -f docker-compose.zammad.yml -f docker-compose.opensearch.yml up --build -d",
|
||||
"docker:metamigo:dev:up": "docker compose -f docker-compose.metamigo-postgresql.yml -f docker-compose.zammad.yml up -d",
|
||||
"docker:metamigo:dev:down": "docker compose -f docker-compose.metamigo-postgresql.yml -f docker-compose.zammad.yml down",
|
||||
"docker:metamigo:up": "docker compose -f docker-compose.zammad.yml -f docker-compose.metamigo-postgresql.yml -f docker-compose.metamigo.yml up -d",
|
||||
"docker:metamigo:down": "docker compose -f docker-compose.zammad.yml -f docker-compose.metamigo-postgresql.yml -f docker-compose.metamigo.yml down",
|
||||
"docker:label-studio:up": "docker compose -f docker-compose.label-studio.yml -f docker-compose.metamigo-postgresql.yml up -d",
|
||||
"docker:label-studio:down": "docker compose -f docker-compose.label-studio.yml -f docker-compose.metamigo-postgresql.yml down",
|
||||
"docker:all:up": "CURRENT_UID=$(CURRENT_UID) docker compose -f docker/compose/zammad.yml -f docker/compose/metamigo-postgresql.yml -f docker/compose/metamigo.yml -f docker/compose/opensearch.yml -f docker/compose/leafcutter.yml -f docker/compose/link.yml -f docker/compose/label-studio.yml up -d",
|
||||
"docker:all:down": "docker compose -f docker/compose/zammad.yml -f docker/compose/metamigo-postgresql.yml -f docker/compose/metamigo.yml -f docker/compose/opensearch.yml -f docker/compose/leafcutter.yml -f docker/compose/link.yml down",
|
||||
"docker:all:build": "docker compose -f docker/compose/zammad.yml -f docker/compose/metamigo-postgresql.yml -f docker/compose/metamigo.yml -f docker/compose/opensearch.yml -f docker/compose/leafcutter.yml -f docker/compose/link.yml up --build -d",
|
||||
"docker:link:dev:up": "docker compose --env-file -f docker/compose/opensearch.yml -f docker/compose/zammad.yml up -d",
|
||||
"docker:link:dev:down": "docker compose -f docker/compose/opensearch.yml -f docker/compose/zammad.yml down",
|
||||
"docker:link:up": "docker compose -f docker/compose/metamigo-postgresql.yml -f docker/compose/metamigo.yml -f docker/compose/zammad.yml -f docker/compose/link.yml up -d",
|
||||
"docker:link:down": "docker compose -f docker/compose/metamigo-postgresql.yml -f docker/compose/metamigo.yml -f docker/compose/zammad.yml -f docker/compose/link.yml down",
|
||||
"docker:link:build": "docker compose -f docker/compose/link.yml up --build -d",
|
||||
"docker:opensearch:up": "docker compose -f docker/compose/opensearch.yml up -d",
|
||||
"docker:opensearch:down": "docker compose -f docker/compose/opensearch.yml down",
|
||||
"docker:opensearch:build": "docker compose -f docker/compose/opensearch.yml up --build -d",
|
||||
"docker:leafcutter:dev:up": "docker compose -f docker/compose/opensearch.yml up -d",
|
||||
"docker:leafcutter:dev:down": "docker compose -f docker/compose/opensearch.yml down",
|
||||
"docker:leafcutter:up": "docker compose -f docker/compose/opensearch.yml -f docker/compose/leafcutter.yml up -d",
|
||||
"docker:leafcutter:down": "docker compose -f docker/compose/opensearch.yml -f docker/compose/leafcutter.yml down",
|
||||
"docker:leafcutter:build": "docker compose -f docker/compose/leafcutter.yml up --build -d",
|
||||
"docker:zammad:up": "docker compose -f docker/compose/zammad.yml -f docker/compose/opensearch.yml up -d",
|
||||
"docker:zammad:down": "docker compose -f docker/compose/zammad.yml -f docker/compose/opensearch.yml down",
|
||||
"docker:zammad:build": "docker compose -f docker/compose/zammad.yml -f docker/compose/opensearch.yml up --build -d",
|
||||
"docker:metamigo:dev:up": "docker compose -f docker/compose/metamigo-postgresql.yml -f docker/compose/zammad.yml up -d",
|
||||
"docker:metamigo:dev:down": "docker compose -f docker/compose/metamigo-postgresql.yml -f docker/compose/zammad.yml down",
|
||||
"docker:metamigo:up": "docker compose -f docker/compose/zammad.yml -f docker/compose/metamigo-postgresql.yml -f docker/compose/metamigo.yml up -d",
|
||||
"docker:metamigo:down": "docker compose -f docker/compose/zammad.yml -f docker/compose/metamigo-postgresql.yml -f docker/compose/metamigo.yml down",
|
||||
"docker:label-studio:up": "docker compose -f docker/compose/label-studio.yml -f docker/compose/metamigo-postgresql.yml up -d",
|
||||
"docker:label-studio:down": "docker compose -f docker/compose/label-studio.yml -f docker-compose.metamigo-postgresql.yml down",
|
||||
"upgrade:setup": "npm i -g npm-check-updates",
|
||||
"upgrade:check": "ncu && ncu -ws -x graphql -x postgraphile",
|
||||
"upgrade:all": "ncu -u && ncu -ws -u -x graphql -x postgraphile -x graphile-worker && npm i",
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
module.exports = {
|
||||
"env": {
|
||||
"browser": true,
|
||||
"commonjs": true,
|
||||
"es2021": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 12
|
||||
},
|
||||
"rules": {
|
||||
}
|
||||
};
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
### [0.2.3](https://gitlab.com/digiresilience.org/link/babel-preset-amigo/compare/0.2.2...0.2.3) (2021-10-08)
|
||||
|
||||
### [0.2.2](https://gitlab.com/digiresilience.org/link/babel-preset-amigo/compare/0.2.1...0.2.2) (2021-05-25)
|
||||
|
||||
### [0.2.1](https://gitlab.com/digiresilience.org/link/babel-preset-amigo/compare/0.2.0...0.2.1) (2021-05-03)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* bump babel to 7.14 ([fec59d5](https://gitlab.com/digiresilience.org/link/babel-preset-amigo/commit/fec59d563dc0b0f1c3ace754d88091f0bdbf1afc))
|
||||
|
||||
## [0.2.0](https://gitlab.com/digiresilience.org/link/babel-preset-amigo/compare/0.1.0...0.2.0) (2020-11-20)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* upgrade deps
|
||||
|
||||
### Features
|
||||
|
||||
* upgrade deps ([46a9ff0](https://gitlab.com/digiresilience.org/link/babel-preset-amigo/commit/46a9ff0883e1f99ab0e918fcbe8c90f4545d58cf))
|
||||
|
||||
## 0.1.0 (2020-10-09)
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
# babel-preset-amigo
|
||||
|
||||
A shared babel config for [CDR Tech][cdrtech].
|
||||
|
||||
# Install
|
||||
|
||||
We recommend using [@digiresilience/amigo-dev][amigo-dev] to manage your dev dependencies.
|
||||
|
||||
[amigo-dev]: https://gitlab.com/digiresilience/link/amigo-dev
|
||||
|
||||
But if you want to do it manually, then:
|
||||
|
||||
```console
|
||||
$ npm install --save-dev @digiresilience/babel-preset-amigo
|
||||
```
|
||||
|
||||
# Usage
|
||||
|
||||
**`babel.config.json`**
|
||||
|
||||
```json
|
||||
{
|
||||
"presets": [
|
||||
"@digiresilience/babel-preset-amigo"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
# Credits
|
||||
|
||||
Copyright © 2020-present [Center for Digital Resilience][cdr]
|
||||
|
||||
### Contributors
|
||||
|
||||
| [![Abel Luck][abelxluck_avatar]][abelxluck_homepage]<br/>[Abel Luck][abelxluck_homepage] |
|
||||
|---|
|
||||
|
||||
[abelxluck_homepage]: https://gitlab.com/abelxluck
|
||||
[abelxluck_avatar]: https://secure.gravatar.com/avatar/0f605397e0ead93a68e1be26dc26481a?s=100&d=identicon
|
||||
|
||||
### License
|
||||
|
||||
[](https://www.gnu.org/licenses/agpl-3.0.en.html)
|
||||
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as
|
||||
published by the Free Software Foundation, either version 3 of the
|
||||
License, or (at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
[cdrtech]: https://digiresilience.org/tech/
|
||||
[cdr]: https://digiresilience.org
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
module.exports = () => ({
|
||||
presets: [
|
||||
[require("@babel/preset-env"), { targets: { node: "current" } }],
|
||||
require("@babel/preset-typescript"),
|
||||
],
|
||||
});
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
{
|
||||
"name": "babel-preset-link",
|
||||
"version": "0.2.3",
|
||||
"description": "amigo's babel preset",
|
||||
"author": "Abel Luck <abel@guardianproject.info>",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"private": false,
|
||||
"scripts": {
|
||||
"lint": "eslint index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/core": "7.24.0",
|
||||
"@babel/preset-env": "7.24.0",
|
||||
"@babel/preset-typescript": "7.23.3"
|
||||
},
|
||||
"peerDependencies": {},
|
||||
"devDependencies": {
|
||||
"eslint": "^8.57.0"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"name": "eslint-config-link",
|
||||
"name": "eslint-config",
|
||||
"version": "0.3.10",
|
||||
"description": "amigo's eslint config",
|
||||
"author": "Abel Luck <abel@guardianproject.info>",
|
||||
|
|
@ -10,8 +10,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@rushstack/eslint-patch": "^1.7.2",
|
||||
"@typescript-eslint/eslint-plugin": "^7.1.1",
|
||||
"@typescript-eslint/parser": "^7.1.1",
|
||||
"@typescript-eslint/eslint-plugin": "^7.2.0",
|
||||
"@typescript-eslint/parser": "^7.2.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-config-xo-space": "^0.35.0",
|
||||
"eslint-plugin-cypress": "^2.15.1",
|
||||
|
|
@ -30,6 +30,6 @@
|
|||
"devDependencies": {
|
||||
"eslint": "^8.57.0",
|
||||
"jest": "^29.7.0",
|
||||
"typescript": "^5.3.3"
|
||||
"typescript": "^5.4.2"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
require('eslint-config-link/patch/modern-module-resolution');
|
||||
module.exports = {
|
||||
extends: [
|
||||
"eslint-config-link/profile/node",
|
||||
"eslint-config-link/profile/typescript"
|
||||
],
|
||||
parserOptions: { tsconfigRootDir: __dirname }
|
||||
};
|
||||
|
||||
11
packages/hapi-nextauth/.gitignore
vendored
11
packages/hapi-nextauth/.gitignore
vendored
|
|
@ -1,11 +0,0 @@
|
|||
.idea/*
|
||||
.nyc_output
|
||||
build
|
||||
node_modules
|
||||
test
|
||||
src/*/*.js
|
||||
coverage
|
||||
*.log
|
||||
package-lock.json
|
||||
.npmrc
|
||||
junit.xml
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
.eslintrc.js
|
||||
.editorconfig
|
||||
.prettierignore
|
||||
.versionrc
|
||||
Makefile
|
||||
.gitlab-ci.yml
|
||||
coverage
|
||||
jest*
|
||||
tsconfig*
|
||||
*.log
|
||||
test*
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
# package.json is formatted by package managers, so we ignore it here
|
||||
package.json
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
### [0.2.1](https://digiresilience.org/link/hapi-users/compare/0.2.0...0.2.1) (2021-10-08)
|
||||
|
||||
## [0.2.0](https://digiresilience.org/link/hapi-users/compare/0.1.0...0.2.0) (2021-05-03)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* update deps
|
||||
|
||||
### Features
|
||||
|
||||
* update deps ([4fdf4d0](https://digiresilience.org/link/hapi-users/commit/4fdf4d0a2a25f76f1d3c27868145b0362e819195))
|
||||
|
||||
## [0.1.0](https://digiresilience.org/link/hapi-users/compare/0.0.3...0.1.0) (2021-04-30)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* upgrade next-auth to 3.19.3
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* upgrade amigo-dev to 0.2.3 ([3fc9eaa](https://digiresilience.org/link/hapi-users/commit/3fc9eaa44658982887d6b8e6b6dc89044a7357de))
|
||||
* upgrade next-auth to 3.19.3 ([cfb19b5](https://digiresilience.org/link/hapi-users/commit/cfb19b5ef43dd493fa1795c28b47c2d973f40132))
|
||||
|
||||
### [0.0.3](https://digiresilience.org/link/hapi-users/compare/0.0.2...0.0.3) (2020-11-24)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* don't require package.json ([d9ca860](https://digiresilience.org/link/hapi-users/commit/d9ca860e8feeb46e9f19a1295313fe8f1efb45b5))
|
||||
|
||||
### [0.0.2](https://digiresilience.org/link/hapi-users/compare/0.0.1...0.0.2) (2020-11-24)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* do not register @hapi/basic, but declare a dependency on it ([8775d01](https://digiresilience.org/link/hapi-users/commit/8775d01778c42711d0b4aec15b0d25c0c7c040b8))
|
||||
* implement basic auth for endpoint authorization ([0834f2e](https://digiresilience.org/link/hapi-users/commit/0834f2e9f2a618287767c18797b1ad7665b22bb1))
|
||||
|
||||
### 0.0.1 (2020-11-20)
|
||||
|
|
@ -1,616 +0,0 @@
|
|||
### GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc.
|
||||
<https://fsf.org/>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
### Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains
|
||||
free software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing
|
||||
under this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
### TERMS AND CONDITIONS
|
||||
|
||||
#### 0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public
|
||||
License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds
|
||||
of works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of
|
||||
an exact copy. The resulting work is called a "modified version" of
|
||||
the earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user
|
||||
through a computer network, with no transfer of a copy, is not
|
||||
conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices" to
|
||||
the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
#### 1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work for
|
||||
making modifications to it. "Object code" means any non-source form of
|
||||
a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users can
|
||||
regenerate automatically from other parts of the Corresponding Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that same
|
||||
work.
|
||||
|
||||
#### 2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not convey,
|
||||
without conditions so long as your license otherwise remains in force.
|
||||
You may convey covered works to others for the sole purpose of having
|
||||
them make modifications exclusively for you, or provide you with
|
||||
facilities for running those works, provided that you comply with the
|
||||
terms of this License in conveying all material for which you do not
|
||||
control copyright. Those thus making or running the covered works for
|
||||
you must do so exclusively on your behalf, under your direction and
|
||||
control, on terms that prohibit them from making any copies of your
|
||||
copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under the
|
||||
conditions stated below. Sublicensing is not allowed; section 10 makes
|
||||
it unnecessary.
|
||||
|
||||
#### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such
|
||||
circumvention is effected by exercising rights under this License with
|
||||
respect to the covered work, and you disclaim any intention to limit
|
||||
operation or modification of the work as a means of enforcing, against
|
||||
the work's users, your or third parties' legal rights to forbid
|
||||
circumvention of technological measures.
|
||||
|
||||
#### 4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
#### 5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these
|
||||
conditions:
|
||||
|
||||
- a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
- b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under
|
||||
section 7. This requirement modifies the requirement in section 4
|
||||
to "keep intact all notices".
|
||||
- c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
- d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
#### 6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms of
|
||||
sections 4 and 5, provided that you also convey the machine-readable
|
||||
Corresponding Source under the terms of this License, in one of these
|
||||
ways:
|
||||
|
||||
- a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
- b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the Corresponding
|
||||
Source from a network server at no charge.
|
||||
- c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
- d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
- e) Convey the object code using peer-to-peer transmission,
|
||||
provided you inform other peers where the object code and
|
||||
Corresponding Source of the work are being offered to the general
|
||||
public at no charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal,
|
||||
family, or household purposes, or (2) anything designed or sold for
|
||||
incorporation into a dwelling. In determining whether a product is a
|
||||
consumer product, doubtful cases shall be resolved in favor of
|
||||
coverage. For a particular product received by a particular user,
|
||||
"normally used" refers to a typical or common use of that class of
|
||||
product, regardless of the status of the particular user or of the way
|
||||
in which the particular user actually uses, or expects or is expected
|
||||
to use, the product. A product is a consumer product regardless of
|
||||
whether the product has substantial commercial, industrial or
|
||||
non-consumer uses, unless such uses represent the only significant
|
||||
mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to
|
||||
install and execute modified versions of a covered work in that User
|
||||
Product from a modified version of its Corresponding Source. The
|
||||
information must suffice to ensure that the continued functioning of
|
||||
the modified object code is in no case prevented or interfered with
|
||||
solely because modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or
|
||||
updates for a work that has been modified or installed by the
|
||||
recipient, or for the User Product in which it has been modified or
|
||||
installed. Access to a network may be denied when the modification
|
||||
itself materially and adversely affects the operation of the network
|
||||
or violates the rules and protocols for communication across the
|
||||
network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
#### 7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders
|
||||
of that material) supplement the terms of this License with terms:
|
||||
|
||||
- a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
- b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
- c) Prohibiting misrepresentation of the origin of that material,
|
||||
or requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
- d) Limiting the use for publicity purposes of names of licensors
|
||||
or authors of the material; or
|
||||
- e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
- f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions
|
||||
of it) with contractual assumptions of liability to the recipient,
|
||||
for any liability that these contractual assumptions directly
|
||||
impose on those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions; the
|
||||
above requirements apply either way.
|
||||
|
||||
#### 8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your license
|
||||
from a particular copyright holder is reinstated (a) provisionally,
|
||||
unless and until the copyright holder explicitly and finally
|
||||
terminates your license, and (b) permanently, if the copyright holder
|
||||
fails to notify you of the violation by some reasonable means prior to
|
||||
60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
#### 9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or run
|
||||
a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
#### 10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
#### 11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims owned
|
||||
or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within the
|
||||
scope of its coverage, prohibits the exercise of, or is conditioned on
|
||||
the non-exercise of one or more of the rights that are specifically
|
||||
granted under this License. You may not convey a covered work if you
|
||||
are a party to an arrangement with a third party that is in the
|
||||
business of distributing software, under which you make payment to the
|
||||
third party based on the extent of your activity of conveying the
|
||||
work, and under which the third party grants, to any of the parties
|
||||
who would receive the covered work from you, a discriminatory patent
|
||||
license (a) in connection with copies of the covered work conveyed by
|
||||
you (or copies made from those copies), or (b) primarily for and in
|
||||
connection with specific products or compilations that contain the
|
||||
covered work, unless you entered into that arrangement, or that patent
|
||||
license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
#### 12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under
|
||||
this License and any other pertinent obligations, then as a
|
||||
consequence you may not convey it at all. For example, if you agree to
|
||||
terms that obligate you to collect a royalty for further conveying
|
||||
from those to whom you convey the Program, the only way you could
|
||||
satisfy both those terms and this License would be to refrain entirely
|
||||
from conveying the Program.
|
||||
|
||||
#### 13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your
|
||||
version supports such interaction) an opportunity to receive the
|
||||
Corresponding Source of your version by providing access to the
|
||||
Corresponding Source from a network server at no charge, through some
|
||||
standard or customary means of facilitating copying of software. This
|
||||
Corresponding Source shall include the Corresponding Source for any
|
||||
work covered by version 3 of the GNU General Public License that is
|
||||
incorporated pursuant to the following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
#### 14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions
|
||||
of the GNU Affero General Public License from time to time. Such new
|
||||
versions will be similar in spirit to the present version, but may
|
||||
differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Program
|
||||
specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever
|
||||
published by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future versions
|
||||
of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
#### 15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT
|
||||
WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND
|
||||
PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
|
||||
DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
|
||||
CORRECTION.
|
||||
|
||||
#### 16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR
|
||||
CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
|
||||
ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT
|
||||
NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR
|
||||
LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
|
||||
TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
|
||||
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
|
||||
|
||||
#### 17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
|
@ -1,82 +0,0 @@
|
|||
# hapi-nextauth
|
||||
|
||||
This is a plugin for hapi.js that exposes [NextAuth's database adapter](https://next-auth.js.org/tutorials/creating-a-database-adapter) via HTTP. Bring your own database.
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import * as Hapi from "@hapi/hapi";
|
||||
import Joi from "joi";
|
||||
import NextAuthPlugin from "@digiresilience/hapi-nextauth";
|
||||
import type { AdapterInstance } from "next-auth/adapters";
|
||||
|
||||
|
||||
const server = new Hapi.Server();
|
||||
|
||||
// the validator must be registered before registering the plugin
|
||||
await server.validator(Joi);
|
||||
|
||||
const nextAuthAdapterFactory: AdapterInstance = (request: Hapi.Request) => {
|
||||
... instantiate your next auth adapter ...
|
||||
}
|
||||
|
||||
|
||||
// register the plugin
|
||||
await server.register({
|
||||
plugin: NextAuthPlugin,
|
||||
options: {
|
||||
// the only required parameter is a function that returns your implementation of the NextAuthAdapter
|
||||
nextAuthAdapterFactory,
|
||||
}});
|
||||
```
|
||||
|
||||
Reference the [next-auth typings](https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/types/next-auth/adapters.d.ts#L38-L77) for the adapter interface.
|
||||
|
||||
Options consist of:
|
||||
|
||||
- `nextAuthAdapterFactory` - a function that returns your implementation of the NextAuthAdapter, it takes the Hapi Request as the sole argument.
|
||||
- `basePath` - a string that all next auth endpoints will be served from
|
||||
- `sharedSecret` - the secret used for basic authentication to the nextauth endpoints
|
||||
- `validators` - an object containing
|
||||
- `profile` - a Joi schema that validates a profile
|
||||
- `user` - a Joi schema that validates a user
|
||||
- `userId` - a Joi schema that validates a userId
|
||||
- `session` - a Joi schema that validates a session
|
||||
- `tags` - tags to add to the endpoints
|
||||
|
||||
Defaults are defined in [`src/index.ts`](src/index.ts)
|
||||
|
||||
## Credits
|
||||
|
||||
Copyright © 2020-present [Center for Digital Resilience][cdr]
|
||||
|
||||
### Contributors
|
||||
|
||||
| [![Abel Luck][abelxluck_avatar]][abelxluck_homepage]<br/>[Abel Luck][abelxluck_homepage] |
|
||||
| ---------------------------------------------------------------------------------------- |
|
||||
|
||||
[abelxluck_homepage]: https://gitlab.com/abelxluck
|
||||
[abelxluck_avatar]: https://secure.gravatar.com/avatar/0f605397e0ead93a68e1be26dc26481a?s=100&d=identicon
|
||||
|
||||
### License
|
||||
|
||||
[](https://www.gnu.org/licenses/agpl-3.0.en.html)
|
||||
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as
|
||||
published by the Free Software Foundation, either version 3 of the
|
||||
License, or (at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
[cdrtech]: https://digiresilience.org/tech/
|
||||
[cdr]: https://digiresilience.org
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"presets": [
|
||||
"babel-preset-link"
|
||||
]
|
||||
}
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
{
|
||||
"preset": "jest-config-link"
|
||||
}
|
||||
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
{
|
||||
"name": "@digiresilience/hapi-nextauth",
|
||||
"version": "1.0.0",
|
||||
"description": "a plugin for hapi.js that exposes NextAuth's database adapter via HTTP",
|
||||
"main": "build/main/index.js",
|
||||
"type": "module",
|
||||
"author": "Abel Luck <abel@guardianproject.info>",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"private": false,
|
||||
"devDependencies": {
|
||||
"@hapi/basic": "^7.0.2",
|
||||
"@types/jest": "^29.5.12",
|
||||
"babel-preset-link": "*",
|
||||
"eslint-config-link": "*",
|
||||
"jest-config-link": "*",
|
||||
"tsc-watch": "^6.0.4",
|
||||
"tsconfig-link": "*"
|
||||
},
|
||||
"dependencies": {
|
||||
"@hapi/hapi": "^21.3.3",
|
||||
"@hapi/hoek": "^11.0.4",
|
||||
"joi": "^17.12.2",
|
||||
"next-auth": "4.24.6"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc -p tsconfig.json",
|
||||
"fix:lint": "eslint src --ext .ts --fix",
|
||||
"fmt": "prettier \"src/**/*.ts\" --write",
|
||||
"test": "jest --coverage --forceExit --detectOpenHandles --reporters=default --reporters=jest-junit",
|
||||
"lint": "eslint src --ext .ts",
|
||||
"lint-fmt": "prettier \"src/**/*.ts\" --list-different",
|
||||
"doc": "typedoc src/ --exclude '**/*.test.ts' --exclude '**/*.spec.ts' --name $npm_package_name --readme README.md --target es2019 --mode file --out build/docs",
|
||||
"dev": "tsc-watch --build --noClear"
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue