Add all repos

This commit is contained in:
Darren Clarke 2023-02-13 12:41:30 +00:00
parent faa12c60bc
commit 8a91c9b89b
369 changed files with 29047 additions and 28 deletions

View file

@ -0,0 +1,7 @@
{
"image": "mcr.microsoft.com/devcontainers/universal:2",
"runArgs": ["--device=/dev/net/tun"],
"features": {
"ghcr.io/tailscale/codespace/tailscale": {}
}
}

View file

@ -1,12 +0,0 @@
services:
app:
entrypoint:
- sleep
- infinity
image: registry.gitlab.com/redaranj/dev-environment:latest
init: true
volumes:
- type: bind
source: /var/run/docker.sock
target: /var/run/docker.sock

View file

@ -40,7 +40,7 @@ services:
zammad-elasticsearch: zammad-elasticsearch:
environment: environment:
- discovery.type=single-node - discovery.type=single-node
image: ${ZAMMAD_ELASTICSEARCH_IMAGE} build: ./elasticsearch
restart: ${RESTART} restart: ${RESTART}
volumes: volumes:
- elasticsearch-data:/usr/share/elasticsearch/data - elasticsearch-data:/usr/share/elasticsearch/data
@ -53,14 +53,14 @@ services:
<<: *common-zammad-variables <<: *common-zammad-variables
POSTGRESQL_USER: zammad POSTGRESQL_USER: zammad
POSTGRESQL_PASS: ${ZAMMAD_DATABASE_PASSWORD} POSTGRESQL_PASS: ${ZAMMAD_DATABASE_PASSWORD}
image: ${ZAMMAD_IMAGE} build: ./zammad
restart: on-failure restart: on-failure
volumes: volumes:
- zammad-data:/opt/zammad - zammad-data:/opt/zammad
zammad-memcached: zammad-memcached:
command: memcached -m 256M command: memcached -m 256M
image: memcached:1.6.10-alpine build: ./memcached
restart: ${RESTART} restart: ${RESTART}
zammad-nginx: zammad-nginx:
@ -69,7 +69,7 @@ services:
- "80" - "80"
depends_on: depends_on:
- zammad-railsserver - zammad-railsserver
image: ${ZAMMAD_IMAGE} build: ./zammad
restart: ${RESTART} restart: ${RESTART}
environment: environment:
VIRTUAL_HOST: ${ZAMMAD_VIRTUAL_HOST} VIRTUAL_HOST: ${ZAMMAD_VIRTUAL_HOST}
@ -82,7 +82,7 @@ services:
environment: environment:
- POSTGRES_USER=zammad - POSTGRES_USER=zammad
- POSTGRES_PASSWORD=${ZAMMAD_DATABASE_PASSWORD} - POSTGRES_PASSWORD=${ZAMMAD_DATABASE_PASSWORD}
image: ${ZAMMAD_POSTGRES_IMAGE} build: ./postgres
restart: ${RESTART} restart: ${RESTART}
volumes: volumes:
- postgresql-data:/var/lib/postgresql/data - postgresql-data:/var/lib/postgresql/data
@ -94,13 +94,13 @@ services:
- zammad-postgresql - zammad-postgresql
- zammad-redis - zammad-redis
environment: *common-zammad-variables environment: *common-zammad-variables
image: ${ZAMMAD_IMAGE} build: ./zammad
restart: ${RESTART} restart: ${RESTART}
volumes: volumes:
- zammad-data:/opt/zammad - zammad-data:/opt/zammad
zammad-redis: zammad-redis:
image: redis:6.2.5-alpine build: ./redis
restart: ${RESTART} restart: ${RESTART}
zammad-scheduler: zammad-scheduler:
@ -110,7 +110,7 @@ services:
- zammad-railsserver - zammad-railsserver
- zammad-redis - zammad-redis
environment: *common-zammad-variables environment: *common-zammad-variables
image: ${ZAMMAD_IMAGE} build: ./zammad
restart: ${RESTART} restart: ${RESTART}
volumes: volumes:
- zammad-data:/opt/zammad - zammad-data:/opt/zammad
@ -122,13 +122,13 @@ services:
- zammad-railsserver - zammad-railsserver
- zammad-redis - zammad-redis
environment: *common-zammad-variables environment: *common-zammad-variables
image: ${ZAMMAD_IMAGE} build: ./zammad
restart: ${RESTART} restart: ${RESTART}
volumes: volumes:
- zammad-data:/opt/zammad - zammad-data:/opt/zammad
metamigo-api: metamigo-api:
image: ${METAMIGO_IMAGE} build: ./metamigo-api
container_name: metamigo-api container_name: metamigo-api
restart: ${RESTART} restart: ${RESTART}
command: [ "api" ] command: [ "api" ]
@ -139,7 +139,7 @@ services:
- ./signald:/signald - ./signald:/signald
metamigo-frontend: metamigo-frontend:
image: ${METAMIGO_IMAGE} build: ./metamigo-frontend
container_name: metamigo-frontend container_name: metamigo-frontend
restart: ${RESTART} restart: ${RESTART}
command: [ "frontend" ] command: [ "frontend" ]
@ -153,14 +153,14 @@ services:
VIRTUAL_PORT: 3000 VIRTUAL_PORT: 3000
metamigo-worker: metamigo-worker:
image: ${METAMIGO_IMAGE} build: ./metamigo-worker
container_name: metamigo-worker container_name: metamigo-worker
restart: ${RESTART} restart: ${RESTART}
command: [ "worker" ] command: [ "worker" ]
environment: *common-metamigo-variables environment: *common-metamigo-variables
metamigo-postgresql: metamigo-postgresql:
image: ${METAMIGO_POSTGRES_IMAGE} build: ./postgresql
container_name: metamigo-postgresql container_name: metamigo-postgresql
restart: ${RESTART} restart: ${RESTART}
volumes: volumes:
@ -177,14 +177,14 @@ services:
- 127.0.0.1:5432:5432 - 127.0.0.1:5432:5432
signald: signald:
image: ${SIGNALD_IMAGE} build: ./signald
restart: ${RESTART} restart: ${RESTART}
user: ${CURRENT_UID} user: ${CURRENT_UID}
volumes: volumes:
- ./signald:/signald - ../signald:/signald
nginx-proxy: nginx-proxy:
image: ${NGINX_IMAGE} build: ./nginx-proxy
restart: ${RESTART} restart: ${RESTART}
ports: ports:
- "80:80" - "80:80"

1
elasticsearch/Dockerfile Normal file
View file

@ -0,0 +1 @@
FROM elasticsearch/elasticsearch:8.6.1

1
label-studio/Dockerfile Normal file
View file

@ -0,0 +1 @@
FROM heartexlabs/label-studio:1.7.1

1
memcached/Dockerfile Normal file
View file

@ -0,0 +1 @@
FROM memcached:1.6.10-alpine

View file

@ -0,0 +1 @@
require("../.eslintrc.js");

26
metamigo-api/app/index.ts Normal file
View file

@ -0,0 +1,26 @@
import type * as Hapi from "@hapi/hapi";
import * as Joi from "joi";
import type { IAppConfig } from "../config";
import * as Services from "./services";
import * as Routes from "./routes";
import * as Plugins from "./plugins";
const AppPlugin = {
name: "App",
register: async (
server: Hapi.Server,
options: { config: IAppConfig }
): Promise<void> => {
// declare our **run-time** plugin dependencies
// these are runtime only deps, not registration time
// ref: https://hapipal.com/best-practices/handling-plugin-dependencies
server.dependency(["config", "hapi-pino"]);
server.validator(Joi);
await Plugins.register(server, options.config);
await Services.register(server);
await Routes.register(server);
},
};
export default AppPlugin;

View file

@ -0,0 +1,198 @@
import { Boom } from "@hapi/boom";
import { Server } from "@hapi/hapi";
import { randomBytes } from "crypto";
import type { Logger } from "pino";
import {
proto,
BufferJSON,
generateRegistrationId,
Curve,
signedKeyPair,
AuthenticationCreds,
AuthenticationState,
AccountSettings,
SignalDataSet,
SignalDataTypeMap,
SignalKeyStore,
SignalKeyStoreWithTransaction,
} from "@adiwajshing/baileys";
import { SavedWhatsappBot as Bot } from "db";
const KEY_MAP: { [T in keyof SignalDataTypeMap]: string } = {
"pre-key": "preKeys",
session: "sessions",
"sender-key": "senderKeys",
"app-state-sync-key": "appStateSyncKeys",
"app-state-sync-version": "appStateVersions",
"sender-key-memory": "senderKeyMemory",
};
export const addTransactionCapability = (
state: SignalKeyStore,
logger: Logger
): SignalKeyStoreWithTransaction => {
let inTransaction = false;
let transactionCache: SignalDataSet = {};
let mutations: SignalDataSet = {};
const prefetch = async (type: keyof SignalDataTypeMap, ids: string[]) => {
if (!inTransaction) {
throw new Boom("Cannot prefetch without transaction");
}
const dict = transactionCache[type];
const idsRequiringFetch = dict
? ids.filter((item) => !(item in dict))
: ids;
// only fetch if there are any items to fetch
if (idsRequiringFetch.length) {
const result = await state.get(type, idsRequiringFetch);
transactionCache[type] = transactionCache[type] || {};
// @ts-expect-error
Object.assign(transactionCache[type], result);
}
};
return {
get: async (type, ids) => {
if (inTransaction) {
await prefetch(type, ids);
return ids.reduce((dict, id) => {
const value = transactionCache[type]?.[id];
if (value) {
// @ts-expect-error
dict[id] = value;
}
return dict;
}, {});
} else {
return state.get(type, ids);
}
},
set: (data) => {
if (inTransaction) {
logger.trace({ types: Object.keys(data) }, "caching in transaction");
for (const key in data) {
// @ts-expect-error
transactionCache[key] = transactionCache[key] || {};
// @ts-expect-error
Object.assign(transactionCache[key], data[key]);
// @ts-expect-error
mutations[key] = mutations[key] || {};
// @ts-expect-error
Object.assign(mutations[key], data[key]);
}
} else {
return state.set(data);
}
},
isInTransaction: () => inTransaction,
// @ts-expect-error
prefetch: (type, ids) => {
logger.trace({ type, ids }, "prefetching");
return prefetch(type, ids);
},
transaction: async (work) => {
if (inTransaction) {
await work();
} else {
logger.debug("entering transaction");
inTransaction = true;
try {
await work();
if (Object.keys(mutations).length) {
logger.debug("committing transaction");
await state.set(mutations);
} else {
logger.debug("no mutations in transaction");
}
} finally {
inTransaction = false;
transactionCache = {};
mutations = {};
}
}
},
};
};
export const initAuthCreds = (): AuthenticationCreds => {
const identityKey = Curve.generateKeyPair();
return {
noiseKey: Curve.generateKeyPair(),
signedIdentityKey: identityKey,
signedPreKey: signedKeyPair(identityKey, 1),
registrationId: generateRegistrationId(),
advSecretKey: randomBytes(32).toString("base64"),
nextPreKeyId: 1,
firstUnuploadedPreKeyId: 1,
processedHistoryMessages: [],
accountSettings: {
unarchiveChats: false,
},
} as any;
};
export const useDatabaseAuthState = (
bot: Bot,
server: Server
): { state: AuthenticationState; saveState: () => void } => {
let { logger }: any = server;
let creds: AuthenticationCreds;
let keys: any = {};
const saveState = async () => {
logger && logger.trace("saving auth state");
const authInfo = JSON.stringify({ creds, keys }, BufferJSON.replacer, 2);
await server.db().whatsappBots.updateAuthInfo(bot, authInfo);
};
if (bot.authInfo) {
console.log("Auth info exists");
const result = JSON.parse(bot.authInfo, BufferJSON.reviver);
creds = result.creds;
keys = result.keys;
} else {
console.log("Auth info does not exist");
creds = initAuthCreds();
keys = {};
}
return {
state: {
creds,
keys: {
get: (type, ids) => {
const key = KEY_MAP[type];
return ids.reduce((dict, id) => {
let value = keys[key]?.[id];
if (value) {
if (type === "app-state-sync-key") {
// @ts-expect-error
value = proto.AppStateSyncKeyData.fromObject(value);
}
// @ts-expect-error
dict[id] = value;
}
return dict;
}, {});
},
set: (data) => {
for (const _key in data) {
const key = KEY_MAP[_key as keyof SignalDataTypeMap];
keys[key] = keys[key] || {};
// @ts-expect-error
Object.assign(keys[key], data[_key]);
}
saveState();
},
},
},
saveState,
};
};

View file

@ -0,0 +1,114 @@
import * as Boom from "@hapi/boom";
import * as Hoek from "@hapi/hoek";
import * as Hapi from "@hapi/hapi";
import { promisify } from "util";
import jwt from "jsonwebtoken";
import jwksClient, { hapiJwt2KeyAsync } from "jwks-rsa";
import type { IAppConfig } from "../../config";
const CF_JWT_HEADER_NAME = "cf-access-jwt-assertion";
const CF_JWT_ALGOS = ["RS256"];
const verifyToken = (settings: any) => {
const { audience, issuer } = settings;
const client = jwksClient({
jwksUri: `${issuer}/cdn-cgi/access/certs`,
});
return async (token: any) => {
const getKey = (header: any, callback: any) => {
client.getSigningKey(header.kid, (err, key) => {
if (err)
throw Boom.serverUnavailable(
"failed to fetch cloudflare access jwks"
);
callback(undefined, key?.getPublicKey());
});
};
const opts = {
algorithms: CF_JWT_ALGOS,
audience,
issuer,
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
return (promisify(jwt.verify) as any)(token, getKey, opts);
};
};
const handleCfJwt = (verify: any) => async (
request: Hapi.Request,
h: Hapi.ResponseToolkit
) => {
const token = request.headers[CF_JWT_HEADER_NAME];
if (token) {
try {
await verify(token);
} catch (error) {
console.error(error);
return Boom.unauthorized("invalid cloudflare access token");
}
}
return h.continue;
};
const defaultOpts = {
issuer: undefined,
audience: undefined,
strategyName: "clouflareaccess",
validate: undefined,
};
const cfJwtRegister = async (server: Hapi.Server, options: any): Promise<void> => {
server.dependency(["hapi-auth-jwt2"]);
const settings = Hoek.applyToDefaults(defaultOpts, options);
const verify = verifyToken(settings);
const { validate, strategyName, audience, issuer } = settings;
server.ext("onPreAuth", handleCfJwt(verify));
server.auth.strategy(strategyName!, "jwt", {
key: hapiJwt2KeyAsync({
jwksUri: `${issuer}/cdn-cgi/access/certs`,
}),
cookieKey: false,
urlKey: false,
headerKey: CF_JWT_HEADER_NAME,
validate,
verifyOptions: {
audience,
issuer,
algorithms: ["RS256"],
},
});
};
export const registerCloudflareAccessJwt = async (
server: Hapi.Server,
config: IAppConfig
): Promise<void> => {
const { audience, domain } = config.cfaccess;
// only enable this plugin if cloudflare access config is configured
if (audience && domain) {
server.log(["auth"], "cloudflare access authorization enabled");
await server.register({
plugin: {
name: "cloudflare-jwt",
version: "0.0.1",
register: cfJwtRegister,
},
options: {
issuer: `https://${domain}`,
audience,
validate: (decoded: any, _request: any) => {
const { email, name } = decoded;
return {
isValid: true,
credentials: { user: { email, name } },
};
},
},
});
}
};

View file

@ -0,0 +1,26 @@
import type * as Hapi from "@hapi/hapi";
import NextAuthPlugin, { AdapterFactory } from "@digiresilience/hapi-nextauth";
import { NextAuthAdapter } from "common";
import type { SavedUser, UnsavedUser, SavedSession } from "common";
import { IAppConfig } from "config";
export const registerNextAuth = async (
server: Hapi.Server,
config: IAppConfig
): Promise<void> => {
// I'm not sure why I need to be so explicit with the generic types here
// I thought ts could figure out the generics based on the concrete params, but apparently not
const nextAuthAdapterFactory: AdapterFactory<
SavedUser,
UnsavedUser,
SavedSession
> = (request: Hapi.Request) => new NextAuthAdapter(request.db());
await server.register({
plugin: NextAuthPlugin,
options: {
nextAuthAdapterFactory,
sharedSecret: config.nextAuth.secret,
},
});
};

View file

@ -0,0 +1,32 @@
import type * as Hapi from "@hapi/hapi";
import Schmervice from "@hapipal/schmervice";
import PgPromisePlugin from "@digiresilience/hapi-pg-promise";
import type { IAppConfig } from "../../config";
import { dbInitOptions } from "db";
import { registerNextAuth } from "./hapi-nextauth";
import { registerSwagger } from "./swagger";
import { registerNextAuthJwt } from "./nextauth-jwt";
import { registerCloudflareAccessJwt } from "./cloudflare-jwt";
export const register = async (
server: Hapi.Server,
config: IAppConfig
): Promise<void> => {
await server.register(Schmervice);
await server.register({
plugin: PgPromisePlugin,
options: {
// the only required parameter is the connection string
connection: config.db.connection,
// ... and the pg-promise initialization options
pgpInit: dbInitOptions(config),
},
});
await registerNextAuth(server, config);
await registerSwagger(server);
await registerNextAuthJwt(server, config);
await registerCloudflareAccessJwt(server, config);
};

View file

@ -0,0 +1,100 @@
import * as Hoek from "@hapi/hoek";
import * as Hapi from "@hapi/hapi";
import type { IAppConfig } from "../../config";
// hapi-auth-jwt2 expects the key to be a raw key
const jwkToHapiAuthJwt2 = (jwkString) => {
try {
const jwk = JSON.parse(jwkString);
return Buffer.from(jwk.k, "base64");
} catch {
throw new Error(
"Failed to parse key for JWT verification. This is probably an application configuration error."
);
}
};
const jwtDefaults = {
jwkeysB64: undefined,
validate: undefined,
strategyName: "nextauth-jwt",
};
const jwtRegister = async (server: Hapi.Server, options): Promise<void> => {
server.dependency(["hapi-auth-jwt2"]);
const settings: any = Hoek.applyToDefaults(jwtDefaults, options);
const key = settings.jwkeysB64.map((k) => jwkToHapiAuthJwt2(k));
server.auth.strategy(settings.strategyName!, "jwt", {
key,
cookieKey: false,
urlKey: false,
validate: settings.validate,
});
};
export const registerNextAuthJwt = async (
server: Hapi.Server,
config: IAppConfig
): Promise<void> => {
if (config.nextAuth.signingKey) {
await server.register({
plugin: {
name: "nextauth-jwt",
version: "0.0.2",
register: jwtRegister,
},
options: {
jwkeysB64: [config.nextAuth.signingKey],
validate: async (decoded, request: Hapi.Request) => {
const { email, name, role } = decoded;
const user = await request.db().users.findBy({ email });
if (!config.isProd) {
server.logger.info(
{
email,
name,
role,
},
"nextauth-jwt authorizing request"
);
// server.logger.info({ user }, "nextauth-jwt user result");
}
return {
isValid: Boolean(user && user.isActive),
// this credentials object is made available in every request
// at `request.auth.credentials`
credentials: { email, name, role },
};
},
},
});
} else if (config.isProd) {
throw new Error("Missing nextauth.signingKey configuration value.");
} else {
server.log(
["warn"],
"Missing nextauth.signingKey configuration value. Authentication of nextauth endpoints disabled!"
);
}
};
// @hapi/jwt expects the key in its own format
/* UNUSED
const _jwkToHapiJwt = (jwkString) => {
try {
const jwk = JSON.parse(jwkString);
const rawKey = Buffer.from(jwk.k, "base64");
return {
key: rawKey,
algorithms: [jwk.alg],
kid: jwk.kid,
};
} catch {
throw new Error(
"Failed to parse key for JWT verification. This is probably an application configuration error."
);
}
};
*/

View file

@ -0,0 +1,32 @@
import * as Inert from "@hapi/inert";
import * as Vision from "@hapi/vision";
import type * as Hapi from "@hapi/hapi";
import * as HapiSwagger from "hapi-swagger";
export const registerSwagger = async (server: Hapi.Server): Promise<void> => {
const swaggerOptions: HapiSwagger.RegisterOptions = {
info: {
title: "Metamigo API Docs",
description: "part of CDR Link",
version: "0.1",
},
// group sets of endpoints by tag
tags: [
{
name: "users",
description: "API for Users",
},
],
documentationRouteTags: ["swagger"],
documentationPath: "/api-docs",
};
await server.register([
{ plugin: Inert },
{ plugin: Vision },
{
plugin: HapiSwagger,
options: swaggerOptions,
},
]);
};

View file

@ -0,0 +1,21 @@
import * as Metamigo from "common";
import Toys from "@hapipal/toys";
export const withDefaults = Toys.withRouteDefaults({
options: {
cors: true,
auth: "nextauth-jwt",
validate: {
failAction: Metamigo.validatingFailAction,
},
},
});
export const noAuth = Toys.withRouteDefaults({
options: {
cors: true,
validate: {
failAction: Metamigo.validatingFailAction,
},
},
});

View file

@ -0,0 +1,33 @@
import isFunction from "lodash/isFunction";
import type * as Hapi from "@hapi/hapi";
import * as RandomRoutes from "./random";
import * as UserRoutes from "./users";
import * as VoiceRoutes from "./voice";
import * as WhatsappRoutes from "./whatsapp";
import * as SignalRoutes from "./signal";
const loadRouteIndex = async (server, index) => {
const routes = [];
for (const exported in index) {
if (Object.prototype.hasOwnProperty.call(index, exported)) {
const route = index[exported];
routes.push(route);
}
}
routes.forEach(async (route) => {
if (isFunction(route)) server.route(await route(server));
else server.route(route);
});
};
export const register = async (server: Hapi.Server): Promise<void> => {
// Load your routes here.
// routes are loaded from the list of exported vars
// a route file should export routes directly or an async function that returns the routes.
loadRouteIndex(server, RandomRoutes);
loadRouteIndex(server, UserRoutes);
loadRouteIndex(server, VoiceRoutes);
loadRouteIndex(server, WhatsappRoutes);
loadRouteIndex(server, SignalRoutes);
};

View file

@ -0,0 +1,249 @@
import * as Hapi from "@hapi/hapi";
import * as Joi from "joi";
import * as Helpers from "../helpers";
import Boom from "boom";
const getSignalService = (request) => {
return request.services().signaldService;
};
export const GetAllSignalBotsRoute = Helpers.withDefaults({
method: "get",
path: "/api/signal/bots",
options: {
description: "Get all bots",
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const signalService = getSignalService(request);
const bots = await signalService.findAll();
if (bots) {
// with the pino logger the first arg is an object of data to log
// the second arg is a message
// all other args are formated args for the msg
request.logger.info({ bots }, "Retrieved bot(s) at %s", new Date());
return { bots };
}
return _h.response().code(204);
},
},
});
export const GetBotsRoute = Helpers.noAuth({
method: "get",
path: "/api/signal/bots/{token}",
options: {
description: "Get one bot",
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { token } = request.params;
const signalService = getSignalService(request);
const bot = await signalService.findByToken(token);
if (bot) {
// with the pino logger the first arg is an object of data to log
// the second arg is a message
// all other args are formated args for the msg
request.logger.info({ bot }, "Retrieved bot(s) at %s", new Date());
return bot;
}
throw Boom.notFound("Bot not found");
},
},
});
interface MessageRequest {
phoneNumber: string;
message: string;
}
export const SendBotRoute = Helpers.noAuth({
method: "post",
path: "/api/signal/bots/{token}/send",
options: {
description: "Send a message",
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { token } = request.params;
const { phoneNumber, message } = request.payload as MessageRequest;
const signalService = getSignalService(request);
const bot = await signalService.findByToken(token);
if (bot) {
request.logger.info({ bot }, "Sent a message at %s", new Date());
await signalService.send(bot, phoneNumber, message as string);
return _h
.response({
result: {
recipient: phoneNumber,
timestamp: new Date().toISOString(),
source: bot.phoneNumber,
},
})
.code(200); // temp
}
throw Boom.notFound("Bot not found");
},
},
});
interface ResetSessionRequest {
phoneNumber: string;
}
export const ResetSessionBotRoute = Helpers.noAuth({
method: "post",
path: "/api/signal/bots/{token}/resetSession",
options: {
description: "Reset a session with another user",
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { token } = request.params;
const { phoneNumber } = request.payload as ResetSessionRequest;
const signalService = getSignalService(request);
const bot = await signalService.findByToken(token);
if (bot) {
await signalService.resetSession(bot, phoneNumber);
return _h
.response({
result: {
recipient: phoneNumber,
timestamp: new Date().toISOString(),
source: bot.phoneNumber,
},
})
.code(200); // temp
}
throw Boom.notFound("Bot not found");
},
},
});
export const ReceiveBotRoute = Helpers.withDefaults({
method: "get",
path: "/api/signal/bots/{token}/receive",
options: {
description: "Receive messages",
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { token } = request.params;
const signalService = getSignalService(request);
const bot = await signalService.findByToken(token);
if (bot) {
request.logger.info({ bot }, "Received messages at %s", new Date());
return signalService.receive(bot);
}
throw Boom.notFound("Bot not found");
},
},
});
export const RegisterBotRoute = Helpers.withDefaults({
method: "get",
path: "/api/signal/bots/{id}/register",
options: {
description: "Register a bot",
handler: async (request: Hapi.Request, h: Hapi.ResponseToolkit) => {
const { id } = request.params;
const signalService = getSignalService(request);
const { code } = request.query;
const bot = await signalService.findById(id);
if (!bot) throw Boom.notFound("Bot not found");
try {
request.logger.info({ bot }, "Create bot at %s", new Date());
await signalService.register(bot, code);
return h.response(bot).code(200);
} catch (error) {
return h.response().code(error.code);
}
},
},
});
interface BotRequest {
phoneNumber: string;
description: string;
}
export const CreateBotRoute = Helpers.withDefaults({
method: "post",
path: "/api/signal/bots",
options: {
description: "Register a bot",
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { phoneNumber, description } = request.payload as BotRequest;
const signalService = getSignalService(request);
console.log("request.auth.credentials:", request.auth.credentials);
const bot = await signalService.create(
phoneNumber,
description,
request.auth.credentials.email as string
);
if (bot) {
request.logger.info({ bot }, "Create bot at %s", new Date());
return bot;
}
throw Boom.notFound("Bot not found");
},
},
});
export const RequestCodeRoute = Helpers.withDefaults({
method: "get",
path: "/api/signal/bots/{id}/requestCode",
options: {
description: "Register a bot",
validate: {
params: Joi.object({
id: Joi.string().uuid().required(),
}),
query: Joi.object({
mode: Joi.string().valid("sms", "voice").required(),
captcha: Joi.string(),
}),
},
handler: async (request: Hapi.Request, h: Hapi.ResponseToolkit) => {
const { id } = request.params;
const { mode, captcha } = request.query;
const signalService = getSignalService(request);
const bot = await signalService.findById(id);
if (!bot) {
throw Boom.notFound("Bot not found");
}
try {
if (mode === "sms") {
await signalService.requestSMSVerification(bot, captcha);
} else if (mode === "voice") {
await signalService.requestVoiceVerification(bot, captcha);
}
return h.response().code(200);
} catch (error) {
console.log(error);
if (error.name === "CaptchaRequiredException") {
return h.response().code(402);
} else if (error.code) {
return h.response().code(error.code);
} else {
return h.response().code(500);
}
}
},
},
});

View file

@ -0,0 +1,59 @@
import * as Joi from "joi";
import * as Hapi from "@hapi/hapi";
import { UserRecord, crudRoutesFor, CrudControllerBase } from "common";
import * as RouteHelpers from "../helpers";
class UserRecordController extends CrudControllerBase(UserRecord) { }
const validator = (): Record<string, Hapi.RouteOptionsValidate> => ({
create: {
payload: Joi.object({
name: Joi.string().required(),
email: Joi.string().email().required(),
emailVerified: Joi.string().isoDate().required(),
createdBy: Joi.string().required(),
avatar: Joi.string()
.uri({ scheme: ["http", "https"] })
.optional(),
userRole: Joi.string().optional(),
isActive: Joi.boolean().optional(),
}).label("UserCreate"),
},
updateById: {
params: {
userId: Joi.string().uuid().required(),
},
payload: Joi.object({
name: Joi.string().optional(),
email: Joi.string().email().optional(),
emailVerified: Joi.string().isoDate().optional(),
createdBy: Joi.boolean().optional(),
avatar: Joi.string()
.uri({ scheme: ["http", "https"] })
.optional(),
userRole: Joi.string().optional(),
isActive: Joi.boolean().optional(),
createdAt: Joi.string().isoDate().optional(),
updatedAt: Joi.string().isoDate().optional(),
}).label("UserUpdate"),
},
deleteById: {
params: {
userId: Joi.string().uuid().required(),
},
},
getById: {
params: {
userId: Joi.string().uuid().required(),
},
},
});
export const UserRoutes = async (
_server: Hapi.Server
): Promise<Hapi.ServerRoute[]> => {
const controller = new UserRecordController("users", "userId");
return RouteHelpers.withDefaults(
crudRoutesFor("user", "/api/users", controller, "userId", validator())
);
};

View file

@ -0,0 +1,124 @@
import * as Hapi from "@hapi/hapi";
import * as Joi from "joi";
import * as Boom from "@hapi/boom";
import * as R from "remeda";
import * as Helpers from "../helpers";
import Twilio from "twilio";
import { crudRoutesFor, CrudControllerBase } from "common";
import { VoiceLineRecord, SavedVoiceLine } from "db";
const TwilioHandlers = {
freeNumbers: async (provider, request: Hapi.Request) => {
const { accountSid, apiKeySid, apiKeySecret } = provider.credentials;
const client = Twilio(apiKeySid, apiKeySecret, {
accountSid,
});
const numbers = R.pipe(
await client.incomingPhoneNumbers.list({ limit: 100 }),
R.filter((n) => n.capabilities.voice),
R.map(R.pick(["sid", "phoneNumber"]))
);
const numberSids = R.map(numbers, R.prop("sid"));
const voiceLineRepo = request.db().voiceLines;
const voiceLines: SavedVoiceLine[] =
await voiceLineRepo.findAllByProviderLineSids(numberSids);
const voiceLineSids = new Set(R.map(voiceLines, R.prop("providerLineSid")));
return R.pipe(
numbers,
R.reject((n) => voiceLineSids.has(n.sid)),
R.map((n) => ({ id: n.sid, name: n.phoneNumber }))
);
},
};
export const VoiceProviderRoutes = Helpers.withDefaults([
{
method: "GET",
path: "/api/voice/providers/{providerId}/freeNumbers",
options: {
description:
"get a list of the incoming numbers for a provider account that aren't assigned to a voice line",
validate: {
params: {
providerId: Joi.string().uuid().required(),
},
},
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { providerId } = request.params;
const voiceProvidersRepo = request.db().voiceProviders;
const provider = await voiceProvidersRepo.findById(providerId);
if (!provider) return Boom.notFound();
switch (provider.kind) {
case "TWILIO":
return TwilioHandlers.freeNumbers(provider, request);
default:
return Boom.badImplementation();
}
},
},
},
]);
class VoiceLineRecordController extends CrudControllerBase(VoiceLineRecord) { }
const validator = (): Record<string, Hapi.RouteOptionsValidate> => ({
create: {
payload: Joi.object({
providerType: Joi.string().required(),
providerId: Joi.string().required(),
number: Joi.string().required(),
language: Joi.string().required(),
voice: Joi.string().required(),
promptText: Joi.string().optional(),
promptRecording: Joi.binary()
.encoding("base64")
.max(50 * 1000 * 1000)
.optional(),
}).label("VoiceLineCreate"),
},
updateById: {
params: {
id: Joi.string().uuid().required(),
},
payload: Joi.object({
providerType: Joi.string().optional(),
providerId: Joi.string().optional(),
number: Joi.string().optional(),
language: Joi.string().optional(),
voice: Joi.string().optional(),
promptText: Joi.string().optional(),
promptRecording: Joi.binary()
.encoding("base64")
.max(50 * 1000 * 1000)
.optional(),
}).label("VoiceLineUpdate"),
},
deleteById: {
params: {
id: Joi.string().uuid().required(),
},
},
getById: {
params: {
id: Joi.string().uuid().required(),
},
},
});
export const VoiceLineRoutes = async (
_server: Hapi.Server
): Promise<Hapi.ServerRoute[]> => {
const controller = new VoiceLineRecordController("voiceLines", "id");
return Helpers.withDefaults(
crudRoutesFor(
"voice-line",
"/api/voice/voice-line",
controller,
"id",
validator()
)
);
};
export * from "./twilio";

View file

@ -0,0 +1,230 @@
import * as Hapi from "@hapi/hapi";
import * as Joi from "joi";
import * as Boom from "@hapi/boom";
import Twilio from "twilio";
import { SavedVoiceProvider } from "db";
import pMemoize from "p-memoize";
import ms from "ms";
import * as Helpers from "../../helpers";
import workerUtils from "../../../../worker-utils";
import { SayLanguage, SayVoice } from "twilio/lib/twiml/VoiceResponse";
const queueRecording = async (meta) => {
return workerUtils.addJob("twilio-recording", meta, { jobKey: meta.callSid });
};
const twilioClientFor = (provider: SavedVoiceProvider): Twilio.Twilio => {
const { accountSid, apiKeySid, apiKeySecret } = provider.credentials;
if (!accountSid || !apiKeySid || !apiKeySecret)
throw new Error(
`twilio provider ${provider.name} does not have credentials`
);
return Twilio(apiKeySid, apiKeySecret, {
accountSid,
});
};
const _getOrCreateTTSTestApplication = async (
url,
name,
client: Twilio.Twilio
) => {
const application = await client.applications.list({ friendlyName: name });
if (application[0] && application[0].voiceUrl === url) {
return application[0];
}
return client.applications.create({
voiceMethod: "POST",
voiceUrl: url,
friendlyName: name,
});
};
const getOrCreateTTSTestApplication = pMemoize(_getOrCreateTTSTestApplication, {
maxAge: ms("1h"),
});
export const TwilioRoutes = Helpers.noAuth([
{
method: "get",
path: "/api/voice/twilio/prompt/{voiceLineId}",
options: {
description: "download the mp3 file to play as a prompt for the user",
validate: {
params: {
voiceLineId: Joi.string().uuid().required(),
},
},
handler: async (request: Hapi.Request, h: Hapi.ResponseToolkit) => {
const { voiceLineId } = request.params;
const voiceLine = await request
.db()
.voiceLines.findById({ id: voiceLineId });
if (!voiceLine) return Boom.notFound();
if (!voiceLine.audioPromptEnabled) return Boom.badRequest();
const mp3 = voiceLine.promptAudio["audio/mpeg"];
if (!mp3) {
return Boom.serverUnavailable();
}
return h
.response(Buffer.from(mp3, "base64"))
.header("Content-Type", "audio/mpeg")
.header("Content-Disposition", "attachment; filename=prompt.mp3");
},
},
},
{
method: "post",
path: "/api/voice/twilio/record/{voiceLineId}",
options: {
description: "webhook for twilio to handle an incoming call",
validate: {
params: {
voiceLineId: Joi.string().uuid().required(),
},
},
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { voiceLineId } = request.params;
const { To } = request.payload as { To: string };
const voiceLine = await request.db().voiceLines.findBy({ number: To });
if (!voiceLine) return Boom.notFound();
if (voiceLine.id !== voiceLineId) return Boom.badRequest();
const frontendUrl = request.server.config().frontend.url;
const useTextPrompt = !voiceLine.audioPromptEnabled;
const twiml = new Twilio.twiml.VoiceResponse();
if (useTextPrompt) {
let prompt = voiceLine.promptText;
if (!prompt || prompt.length === 0)
prompt =
"The grabadora text prompt is unconfigured. Please set a prompt in the administration screen.";
twiml.say(
{
language: voiceLine.language as SayLanguage,
voice: voiceLine.voice as SayVoice,
},
prompt
);
} else {
const promptUrl = `${frontendUrl}/api/v1/voice/twilio/prompt/${voiceLineId}`;
twiml.play({ loop: 1 }, promptUrl);
}
twiml.record({
playBeep: true,
finishOnKey: "1",
recordingStatusCallback: `${frontendUrl}/api/v1/voice/twilio/recording-ready/${voiceLineId}`,
});
return twiml.toString();
},
},
},
{
method: "post",
path: "/api/voice/twilio/recording-ready/{voiceLineId}",
options: {
description: "webhook for twilio to handle a recording",
validate: {
params: {
voiceLineId: Joi.string().uuid().required(),
},
},
handler: async (request: Hapi.Request, h: Hapi.ResponseToolkit) => {
const { voiceLineId } = request.params;
const voiceLine = await request
.db()
.voiceLines.findById({ id: voiceLineId });
if (!voiceLine) return Boom.notFound();
const { AccountSid, RecordingSid, CallSid } = request.payload as {
AccountSid: string;
RecordingSid: string;
CallSid: string;
};
await queueRecording({
voiceLineId,
accountSid: AccountSid,
callSid: CallSid,
recordingSid: RecordingSid,
});
return h.response().code(203);
},
},
},
{
method: "post",
path: "/api/voice/twilio/text-to-speech/{providerId}",
options: {
description: "webook for twilio to test the twilio text-to-speech",
validate: {
params: {
providerId: Joi.string().uuid().required(),
},
},
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { language, voice, prompt } = request.payload as {
language: SayLanguage;
voice: SayVoice;
prompt: string;
};
const twiml = new Twilio.twiml.VoiceResponse();
twiml.say({ language, voice }, prompt);
return twiml.toString();
},
},
},
{
method: "get",
path: "/api/voice/twilio/text-to-speech-token/{providerId}",
options: {
description:
"generates a one time token to test the twilio text-to-speech",
validate: {
params: {
providerId: Joi.string().uuid().required(),
},
},
handler: async (request: Hapi.Request, h: Hapi.ResponseToolkit) => {
const { providerId } = request.params as { providerId: string };
const provider: SavedVoiceProvider = await request
.db()
.voiceProviders.findById({ id: providerId });
if (!provider) return Boom.notFound();
const frontendUrl = request.server.config().frontend.url;
const url = `${frontendUrl}/api/v1/voice/twilio/text-to-speech/${providerId}`;
const name = `Grabadora text-to-speech tester: ${providerId}`;
const app = await getOrCreateTTSTestApplication(
url,
name,
twilioClientFor(provider)
);
const { accountSid, apiKeySecret, apiKeySid } = provider.credentials;
const token = new Twilio.jwt.AccessToken(
accountSid,
apiKeySid,
apiKeySecret,
{ identity: "tts-test" }
);
const grant = new Twilio.jwt.AccessToken.VoiceGrant({
outgoingApplicationSid: app.sid,
incomingAllow: true,
});
token.addGrant(grant);
return h.response({
token: token.toJwt(),
});
},
},
},
]);

View file

@ -0,0 +1,195 @@
import * as Hapi from "@hapi/hapi";
import * as Helpers from "../helpers";
import Boom from "boom";
export const GetAllWhatsappBotsRoute = Helpers.withDefaults({
method: "get",
path: "/api/whatsapp/bots",
options: {
description: "Get all bots",
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { whatsappService } = request.services();
const bots = await whatsappService.findAll();
if (bots) {
// with the pino logger the first arg is an object of data to log
// the second arg is a message
// all other args are formated args for the msg
request.logger.info({ bots }, "Retrieved bot(s) at %s", new Date());
return { bots };
}
return _h.response().code(204);
},
},
});
export const GetBotsRoute = Helpers.noAuth({
method: "get",
path: "/api/whatsapp/bots/{token}",
options: {
description: "Get one bot",
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { token } = request.params;
const { whatsappService } = request.services();
const bot = await whatsappService.findByToken(token);
if (bot) {
// with the pino logger the first arg is an object of data to log
// the second arg is a message
// all other args are formated args for the msg
request.logger.info({ bot }, "Retrieved bot(s) at %s", new Date());
return bot;
}
throw Boom.notFound("Bot not found");
},
},
});
interface MessageRequest {
phoneNumber: string;
message: string;
}
export const SendBotRoute = Helpers.noAuth({
method: "post",
path: "/api/whatsapp/bots/{token}/send",
options: {
description: "Send a message",
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { token } = request.params;
const { phoneNumber, message } = request.payload as MessageRequest;
const { whatsappService } = request.services();
const bot = await whatsappService.findByToken(token);
if (bot) {
request.logger.info({ bot }, "Sent a message at %s", new Date());
await whatsappService.send(bot, phoneNumber, message as string);
return _h
.response({
result: {
recipient: phoneNumber,
timestamp: new Date().toISOString(),
source: bot.phoneNumber,
},
})
.code(200); // temp
}
throw Boom.notFound("Bot not found");
},
},
});
export const ReceiveBotRoute = Helpers.withDefaults({
method: "get",
path: "/api/whatsapp/bots/{token}/receive",
options: {
description: "Receive messages",
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { token } = request.params;
const { whatsappService } = request.services();
const bot = await whatsappService.findByToken(token);
if (bot) {
request.logger.info({ bot }, "Received messages at %s", new Date());
// temp
const date = new Date();
const twoDaysAgo = new Date(date.getTime());
twoDaysAgo.setDate(date.getDate() - 2);
return whatsappService.receive(bot, twoDaysAgo);
}
throw Boom.notFound("Bot not found");
},
},
});
export const RegisterBotRoute = Helpers.withDefaults({
method: "get",
path: "/api/whatsapp/bots/{id}/register",
options: {
description: "Register a bot",
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { id } = request.params;
const { whatsappService } = request.services();
const bot = await whatsappService.findById(id);
if (bot) {
await whatsappService.register(bot, (error: string) => {
if (error) {
return _h.response(error).code(500);
}
request.logger.info({ bot }, "Register bot at %s", new Date());
return _h.response().code(200);
});
}
throw Boom.notFound("Bot not found");
},
},
});
export const RefreshBotRoute = Helpers.withDefaults({
method: "get",
path: "/api/whatsapp/bots/{id}/refresh",
options: {
description: "Refresh messages",
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { id } = request.params;
const { whatsappService } = request.services();
const bot = await whatsappService.findById(id);
if (bot) {
request.logger.info({ bot }, "Refreshed messages at %s", new Date());
// await whatsappService.refresh(bot);
return;
}
throw Boom.notFound("Bot not found");
},
},
});
interface BotRequest {
phoneNumber: string;
description: string;
}
export const CreateBotRoute = Helpers.withDefaults({
method: "post",
path: "/api/whatsapp/bots",
options: {
description: "Register a bot",
handler: async (request: Hapi.Request, _h: Hapi.ResponseToolkit) => {
const { phoneNumber, description } = request.payload as BotRequest;
const { whatsappService } = request.services();
console.log("request.auth.credentials:", request.auth.credentials);
const bot = await whatsappService.create(
phoneNumber,
description,
request.auth.credentials.email as string
);
if (bot) {
request.logger.info({ bot }, "Register bot at %s", new Date());
return bot;
}
throw Boom.notFound("Bot not found");
},
},
});

View file

@ -0,0 +1,14 @@
import type * as Hapi from "@hapi/hapi";
import SettingsService from "./settings";
import RandomService from "./random";
import WhatsappService from "./whatsapp";
import SignaldService from "./signald";
export const register = async (server: Hapi.Server): Promise<void> => {
// register your services here
// don't forget to add them to the AppServices interface in ../types/index.ts
server.registerService(RandomService);
server.registerService(SettingsService);
server.registerService(WhatsappService);
server.registerService(SignaldService);
};

View file

@ -0,0 +1,16 @@
import * as Hapi from "@hapi/hapi";
import * as Schmervice from "@hapipal/schmervice";
import { settingInfo, SettingsService } from "db";
export const VoicemailPrompt = settingInfo<string>("voicemail-prompt");
export const VoicemailMinLength = settingInfo<number>("voicemail-min-length");
export const VoicemailUseTextPrompt = settingInfo<boolean>(
"voicemail-use-text-prompt"
);
export { ISettingsService } from "db";
// @ts-expect-error
const service = (server: Hapi.Server): Schmervice.ServiceFunctionalInterface =>
SettingsService(server.db().settings);
export default service;

View file

@ -0,0 +1,200 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
import { Server } from "@hapi/hapi";
import { Service } from "@hapipal/schmervice";
import {
SignaldAPI,
IncomingMessagev1,
ClientMessageWrapperv1
} from "@digiresilience/node-signald";
import { SavedSignalBot as Bot } from "db";
import workerUtils from "../../worker-utils";
export default class SignaldService extends Service {
signald: SignaldAPI;
subscriptions: Set<string>;
constructor(server: Server, options: never) {
super(server, options);
if (this.server.config().signald.enabled) {
this.signald = new SignaldAPI();
this.signald.setLogger((level, msg, extra?) => {
this.server.logger[level]({ extra }, msg);
});
this.subscriptions = new Set();
}
}
async initialize(): Promise<void> {
if (this.server.config().signald.enabled && this.signald) {
this.setupListeners();
this.connect();
}
}
async teardown(): Promise<void> {
if (this.server.config().signald.enabled && this.signald)
this.signald.disconnect();
}
private connect() {
const { enabled, socket } = this.server.config().signald;
if (!enabled) return;
this.signald.connectWithBackoff(socket);
}
private async onConnected() {
await this.subscribeAll();
}
private setupListeners() {
this.signald.on("transport_error", async (error) => {
this.server.logger.info({ error }, "signald transport error");
});
this.signald.on("transport_connected", async () => {
this.onConnected();
});
this.signald.on("transport_received_payload", async (payload: ClientMessageWrapperv1) => {
this.server.logger.debug({ payload }, "signald payload received");
if (payload.type === "IncomingMessage") {
this.receiveMessage(payload.data)
}
});
this.signald.on("transport_sent_payload", async (payload) => {
this.server.logger.debug({ payload }, "signald payload sent");
});
}
private async subscribeAll() {
const result = await this.signald.listAccounts();
const accounts = result.accounts.map((account) => account.address.number);
await Promise.all(
accounts.map(async (account) => {
await this.signald.subscribe(account);
this.subscriptions.add(account);
})
);
}
private async unsubscribeAll() {
await Promise.all(
[...this.subscriptions].map(async (account) => {
await this.signald.unsubscribe(account);
this.subscriptions.delete(account);
})
);
}
async create(
phoneNumber: string,
description: string,
email: string
): Promise<Bot> {
const db = this.server.db();
const user = await db.users.findBy({ email });
const row = await db.signalBots.insert({
phoneNumber,
description,
userId: user.id,
});
return row;
}
async findAll(): Promise<Bot[]> {
const db = this.server.db();
return db.signalBots.findAll();
}
async findById(id: string): Promise<Bot> {
const db = this.server.db();
return db.signalBots.findById({ id });
}
async findByToken(token: string): Promise<Bot> {
const db = this.server.db();
return db.signalBots.findBy({ token });
}
async register(bot: Bot, code: string): Promise<any> {
const address = await this.signald.verify(bot.phoneNumber, code);
this.server.db().signalBots.updateAuthInfo(bot, address.address.uuid);
}
async send(bot: Bot, phoneNumber: string, message: string): Promise<any> {
this.server.logger.debug(
{ us: bot.phoneNumber, then: phoneNumber, message },
"signald send"
);
return await this.signald.send(
bot.phoneNumber,
{ number: phoneNumber },
undefined,
message
);
}
async resetSession(bot: Bot, phoneNumber: string): Promise<any> {
return await this.signald.resetSession(bot.phoneNumber, {
number: phoneNumber,
});
}
async requestVoiceVerification(bot: Bot, captcha?: string): Promise<void> {
this.server.logger.debug(
{ number: bot.phoneNumber, captcha },
"requesting voice verification for"
);
await this.signald.register(bot.phoneNumber, true, captcha);
}
async requestSMSVerification(bot: Bot, captcha?: string): Promise<void> {
this.server.logger.debug(
{ number: bot.phoneNumber, captcha },
"requesting sms verification for"
);
await this.signald.register(bot.phoneNumber, false, captcha);
}
private async receiveMessage(message: IncomingMessagev1) {
const { account } = message;
if (!account) {
this.server.logger.debug({ message }, "invalid message received");
this.server.logger.error("invalid message received");
}
const bot = await this.server.db().signalBots.findBy({ phoneNumber: account });
if (!bot) {
this.server.logger.info("message received for unknown bot", {
account,
message,
});
return;
}
await this.queueMessage(bot, message);
}
private async queueMessage(bot: Bot, message: IncomingMessagev1) {
const { timestamp, account, data_message: dataMessage } = message;
if (!dataMessage?.body && !dataMessage?.attachments) {
this.server.logger.info({ message }, "message received with no content");
return;
}
if (!timestamp || !account) {
this.server.logger.debug({ message }, "invalid message received");
}
const receivedMessage = {
message,
botId: bot.id,
botPhoneNumber: bot.phoneNumber,
};
workerUtils.addJob("signald-message", receivedMessage, {
jobKey: `signal-bot-${bot.id}-${timestamp}`,
queueName: `signal-bot-${bot.id}`,
});
}
}

View file

@ -0,0 +1,247 @@
import { Server } from "@hapi/hapi";
import { Service } from "@hapipal/schmervice";
import { SavedWhatsappBot as Bot } from "db";
import makeWASocket, { DisconnectReason, proto, downloadContentFromMessage, MediaType } from "@adiwajshing/baileys";
import workerUtils from "../../worker-utils";
import { useDatabaseAuthState } from "../lib/whatsapp-key-store";
import { connect } from "pg-monitor";
export type AuthCompleteCallback = (error?: string) => void;
export default class WhatsappService extends Service {
connections: { [key: string]: any } = {};
loginConnections: { [key: string]: any } = {};
static browserDescription: [string, string, string] = [
"Metamigo",
"Chrome",
"2.0",
];
constructor(server: Server, options: never) {
super(server, options);
}
async initialize(): Promise<void> {
this.updateConnections();
}
async teardown(): Promise<void> {
this.resetConnections();
}
private async sleep(ms: number): Promise<void> {
console.log(`pausing ${ms}`)
return new Promise(resolve => setTimeout(resolve, ms));
}
private async resetConnections() {
for (const connection of Object.values(this.connections)) {
try {
connection.end(null)
} catch (error) {
console.log(error);
}
}
this.connections = {};
}
private createConnection(bot: Bot, server: Server, options: any, authCompleteCallback?: any) {
const { state, saveState } = useDatabaseAuthState(bot, server)
const connection = makeWASocket({ ...options, auth: state });
let pause = 5000;
connection.ev.on('connection.update', async (update) => {
console.log(`Connection updated ${JSON.stringify(update, null, 2)}`)
const { connection: connectionState, lastDisconnect, qr, isNewLogin } = update
if (qr) {
console.log('got qr code')
await this.server.db().whatsappBots.updateQR(bot, qr);
} else if (isNewLogin) {
console.log("got new login")
} else if (connectionState === 'open') {
console.log('opened connection')
} else if (connectionState === "close") {
console.log('connection closed due to ', lastDisconnect.error)
const disconnectStatusCode = (lastDisconnect?.error as any)?.output?.statusCode
if (disconnectStatusCode === DisconnectReason.restartRequired) {
console.log('reconnecting after got new login')
const updatedBot = await this.findById(bot.id);
this.createConnection(updatedBot, server, options)
authCompleteCallback()
} else if (disconnectStatusCode !== DisconnectReason.loggedOut) {
console.log('reconnecting')
await this.sleep(pause)
pause = pause * 2
this.createConnection(bot, server, options)
}
}
})
connection.ev.on('chats.set', item => console.log(`recv ${item.chats.length} chats (is latest: ${item.isLatest})`))
connection.ev.on('messages.set', item => console.log(`recv ${item.messages.length} messages (is latest: ${item.isLatest})`))
connection.ev.on('contacts.set', item => console.log(`recv ${item.contacts.length} contacts`))
connection.ev.on('messages.upsert', async m => {
console.log("messages upsert")
const { messages } = m;
if (messages) {
await this.queueUnreadMessages(bot, messages);
}
})
connection.ev.on('messages.update', m => console.log(m))
connection.ev.on('message-receipt.update', m => console.log(m))
connection.ev.on('presence.update', m => console.log(m))
connection.ev.on('chats.update', m => console.log(m))
connection.ev.on('contacts.upsert', m => console.log(m))
connection.ev.on('creds.update', saveState)
this.connections[bot.id] = connection;
}
private async updateConnections() {
this.resetConnections();
const bots = await this.server.db().whatsappBots.findAll();
for await (const bot of bots) {
if (bot.isVerified) {
this.createConnection(
bot,
this.server,
{
browser: WhatsappService.browserDescription,
printQRInTerminal: false,
version: [2, 2204, 13],
})
}
}
}
private async queueMessage(bot: Bot, webMessageInfo: proto.WebMessageInfo) {
const { key, message, messageTimestamp } = webMessageInfo;
const { remoteJid } = key;
if (!key.fromMe && message && remoteJid !== "status@broadcast") {
const isMediaMessage =
message.audioMessage ||
message.documentMessage ||
message.imageMessage ||
message.videoMessage;
let messageContent = Object.values(message)[0]
let messageType: MediaType;
let attachment: string;
let filename: string;
let mimetype: string;
if (isMediaMessage) {
if (message.audioMessage) {
messageType = "audio";
filename =
key.id + "." + message.audioMessage.mimetype.split("/").pop();
mimetype = message.audioMessage.mimetype;
} else if (message.documentMessage) {
messageType = "document";
filename = message.documentMessage.fileName;
mimetype = message.documentMessage.mimetype;
} else if (message.imageMessage) {
messageType = "image";
filename =
key.id + "." + message.imageMessage.mimetype.split("/").pop();
mimetype = message.imageMessage.mimetype;
} else if (message.videoMessage) {
messageType = "video"
filename =
key.id + "." + message.videoMessage.mimetype.split("/").pop();
mimetype = message.videoMessage.mimetype;
}
const stream = await downloadContentFromMessage(messageContent, messageType)
let buffer = Buffer.from([])
for await (const chunk of stream) {
buffer = Buffer.concat([buffer, chunk])
}
attachment = buffer.toString("base64");
}
if (messageContent || attachment) {
const receivedMessage = {
waMessageId: key.id,
waMessage: JSON.stringify(webMessageInfo),
waTimestamp: new Date((messageTimestamp as number) * 1000),
attachment,
filename,
mimetype,
whatsappBotId: bot.id,
botPhoneNumber: bot.phoneNumber,
};
workerUtils.addJob("whatsapp-message", receivedMessage, {
jobKey: key.id,
});
}
}
}
private async queueUnreadMessages(bot: Bot, messages: any[]) {
for await (const message of messages) {
await this.queueMessage(bot, message);
}
}
async create(
phoneNumber: string,
description: string,
email: string
): Promise<Bot> {
const db = this.server.db();
const user = await db.users.findBy({ email });
const row = await db.whatsappBots.insert({
phoneNumber,
description,
userId: user.id,
});
return row;
}
async findAll(): Promise<Bot[]> {
return this.server.db().whatsappBots.findAll();
}
async findById(id: string): Promise<Bot> {
return this.server.db().whatsappBots.findById({ id });
}
async findByToken(token: string): Promise<Bot> {
return this.server.db().whatsappBots.findBy({ token });
}
async register(bot: Bot, callback: AuthCompleteCallback): Promise<void> {
await this.createConnection(bot, this.server, { version: [2, 2204, 13] }, callback);
}
async send(bot: Bot, phoneNumber: string, message: string): Promise<void> {
const connection = this.connections[bot.id];
const recipient = `${phoneNumber.replace(/\D+/g, "")}@s.whatsapp.net`;
await connection.sendMessage(recipient, { text: message });
}
async receiveSince(bot: Bot, lastReceivedDate: Date): Promise<void> {
const connection = this.connections[bot.id];
const messages = await connection.messagesReceivedAfter(
lastReceivedDate,
false
);
for (const message of messages) {
this.queueMessage(bot, message);
}
}
async receive(bot: Bot, lastReceivedDate: Date): Promise<any> {
const connection = this.connections[bot.id];
// const messages = await connection.messagesReceivedAfter(
// lastReceivedDate,
// false
// );
const messages = await connection.loadAllUnreadMessages();
return messages;
}
}

View file

@ -0,0 +1,27 @@
import type { IMain } from "pg-promise";
import type { ISettingsService } from "../services/settings";
import type WhatsappService from "../services/whatsapp";
import type SignaldService from "../services/signald";
import type { IAppConfig } from "../../config";
import type { AppDatabase } from "db";
// add your service interfaces here
interface AppServices {
settingsService: ISettingsService;
whatsappService: WhatsappService;
signaldService: SignaldService;
}
// extend the hapi types with our services and config
declare module "@hapi/hapi" {
export interface Request {
services(): AppServices;
db(): AppDatabase;
pgp: IMain;
}
export interface Server {
config(): IAppConfig;
db(): AppDatabase;
pgp: IMain;
}
}

View file

@ -0,0 +1,6 @@
{
"presets": [
"@babel/preset-env",
"@babel/preset-typescript"
]
}

10
metamigo-api/config.ts Normal file
View file

@ -0,0 +1,10 @@
import config, {
loadConfig,
loadConfigRaw,
IAppConfig,
IAppConvict,
} from "config";
export { IAppConvict, IAppConfig, loadConfig, loadConfigRaw };
export default config;

8
metamigo-api/logger.ts Normal file
View file

@ -0,0 +1,8 @@
import { defState } from "@digiresilience/montar";
import { configureLogger } from "common";
import config from "config";
export const logger = defState("apiLogger", {
start: async () => configureLogger(config),
});
export default logger;

77
metamigo-api/package.json Normal file
View file

@ -0,0 +1,77 @@
{
"name": "api",
"version": "0.2.0",
"main": "build/main/cli/index.js",
"author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@adiwajshing/baileys": "5.0.0",
"@adiwajshing/keyed-db": "0.2.4",
"@digiresilience/hapi-nextauth": "0.2.1",
"@digiresilience/hapi-pg-promise": "^0.0.3",
"@digiresilience/montar": "^0.1.6",
"@digiresilience/node-signald": "0.0.3",
"@graphile-contrib/pg-simplify-inflector": "^6.1.0",
"@hapi/basic": "^7.0.0",
"@hapi/boom": "^10.0.0",
"@hapi/wreck": "^18.0.0",
"@hapipal/schmervice": "^2.1.0",
"@hapipal/toys": "^3.2.0",
"blipp": "^4.0.2",
"camelcase-keys": "^8.0.2",
"fluent-ffmpeg": "^2.1.2",
"graphile-migrate": "^1.4.1",
"graphile-worker": "^0.13.0",
"hapi-auth-jwt2": "^10.4.0",
"hapi-postgraphile": "^0.11.0",
"hapi-swagger": "^15.0.0",
"joi": "^17.7.0",
"jsonwebtoken": "^9.0.0",
"jwks-rsa": "^3.0.1",
"long": "^5.2.1",
"p-memoize": "^7.1.1",
"pg-monitor": "^2.0.0",
"pg-promise": "^11.0.2",
"postgraphile-plugin-connection-filter": "^2.3.0",
"remeda": "^1.6.0",
"twilio": "^3.84.1"
},
"devDependencies": {
"@babel/core": "7.20.12",
"@babel/preset-env": "7.20.2",
"@babel/preset-typescript": "7.18.6",
"@types/jest": "^29.2.5",
"eslint": "^8.32.0",
"pino-pretty": "^9.1.1",
"prettier": "^2.8.3",
"ts-node": "^10.9.1",
"typescript": "4.9.4",
"@types/hapi__wreck": "^17.0.1",
"@types/long": "^4.0.2",
"nodemon": "^2.0.20",
"@types/node": "*",
"camelcase-keys": "^8.0.2",
"pg-monitor": "^2.0.0",
"typedoc": "^0.23.24"
},
"nodemonConfig": {
"ignore": [
"docs/*"
],
"ext": "ts,json,js"
},
"scripts": {
"build": "tsc -p tsconfig.json",
"fix:lint": "eslint src --ext .ts --fix",
"fix:prettier": "prettier \"src/**/*.ts\" --write",
"cli": "NODE_ENV=development nodemon --unhandled-rejections=strict build/main/cli/index.js",
"serve": "NODE_ENV=development npm run cli server",
"serve:prod": "NODE_ENV=production npm run cli server",
"worker": "NODE_ENV=development npm run cli worker",
"worker:prod": "NODE_ENV=production npm run cli worker",
"lint:lint": "eslint src --ext .ts",
"lint:prettier": "prettier \"src/**/*.ts\" --list-different",
"lint": "npm run lint:lint && npm run lint:prettier",
"watch:build": "tsc -p tsconfig.json -w"
}
}

View file

@ -0,0 +1,28 @@
import * as Metamigo from "common";
import { defState } from "@digiresilience/montar";
import Manifest from "./manifest";
import config, { IAppConfig } from "../config";
export const deployment = async (
config: IAppConfig,
start = false
): Promise<Metamigo.Server> => {
// Build the manifest, which describes all the plugins needed for our application server
const manifest = await Manifest.build(config);
// Create the server and optionally start it
const server = Metamigo.deployment(manifest, config, start);
return server;
};
export const stopDeployment = async (server: Metamigo.Server): Promise<void> => {
return Metamigo.stopDeployment(server);
};
const server = defState("server", {
start: () => deployment(config, true),
stop: () => stopDeployment(server),
});
export default server;

View file

@ -0,0 +1,79 @@
import * as Glue from "@hapi/glue";
import * as Metamigo from "common";
import * as Blipp from "blipp";
import HapiBasic from "@hapi/basic";
import HapiJwt from "hapi-auth-jwt2";
import HapiPostgraphile from "hapi-postgraphile";
import { getPostGraphileOptions } from "db";
import AppPlugin from "../app";
import type { IAppConfig } from "../config";
const build = async (config: IAppConfig): Promise<Glue.Manifest> => {
const { port, address } = config.server;
const metamigoPlugins = Metamigo.defaultPlugins(config);
return {
server: {
port,
address,
debug: false, // We use pino not the built-in hapi logger
routes: {
validate: {
failAction: Metamigo.validatingFailAction,
},
},
},
register: {
plugins: [
// jwt plugin, required for our jwt auth plugin
{ plugin: HapiJwt },
// Blipp prints the nicely formatted list of endpoints at app boot
{ plugin: Blipp },
// load the metamigo base plugins
...metamigoPlugins,
// basic authentication, required by hapi-nextauth
{ plugin: HapiBasic },
// load our main app
{
plugin: AppPlugin,
options: {
config,
},
},
// load Postgraphile
{
plugin: HapiPostgraphile,
options: {
route: {
path: "/graphql",
options: {
auth: {
strategies: ["nextauth-jwt"],
mode: "optional",
},
},
},
pgConfig: config.postgraphile.authConnection,
schemaName: "app_public",
schemaOptions: {
...getPostGraphileOptions(),
jwtAudiences: [config.nextAuth.audience],
jwtSecret: "",
// unauthenticated users will hit the database with this role
pgDefaultRole: "app_anonymous",
},
},
},
],
},
};
};
const Manifest = {
build,
};
export default Manifest;

View file

@ -0,0 +1,10 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"outDir": "build/main",
"types": ["long", "jest", "node"],
"lib": ["es2020", "DOM"]
},
"include": ["**/*.ts", "**/.*.ts"],
"exclude": ["node_modules"]
}

View file

@ -0,0 +1,21 @@
import * as Worker from "graphile-worker";
import { defState } from "@digiresilience/montar";
import config from "./config";
const startWorkerUtils = async (): Promise<Worker.WorkerUtils> => {
const workerUtils = await Worker.makeWorkerUtils({
connectionString: config.worker.connection,
});
return workerUtils;
};
const stopWorkerUtils = async (): Promise<void> => {
return workerUtils.release();
};
const workerUtils = defState("apiWorkerUtils", {
start: startWorkerUtils,
stop: stopWorkerUtils,
});
export default workerUtils;

View file

@ -0,0 +1 @@
require('../.eslintrc.js");

18
metamigo-cli/config.ts Normal file
View file

@ -0,0 +1,18 @@
import { generateConfig, printConfigOptions } from "common";
import { loadConfigRaw } from "config";
export const genConf = async (): Promise<void> => {
const c = await loadConfigRaw();
const generated = generateConfig(c);
console.log(generated);
};
export const genSchema = async (): Promise<void> => {
const c = await loadConfigRaw();
console.log(c.getSchemaString());
};
export const listConfig = async (): Promise<void> => {
const c = await loadConfigRaw();
printConfigOptions(c);
};

67
metamigo-cli/index.ts Normal file
View file

@ -0,0 +1,67 @@
#!/usr/bin/env node
import { Command } from "commander";
import { startWithout } from "@digiresilience/montar";
import { migrateWrapper } from "db";
import { loadConfig } from "config";
import { genConf, listConfig } from "./config";
import { createTokenForTesting, generateJwks } from "./jwks";
import { exportGraphqlSchema } from "./postgraphile";
import "api/build/main/server";
import "api/build/main/logger";
import "worker/build/main";
const program = new Command();
export async function runServer(): Promise<void> {
await startWithout(["worker"]);
}
export async function runWorker(): Promise<void> {
await startWithout(["server"]);
}
program
.command("config-generate")
.description("Generate a sample JSON configuration file (to stdout)")
.action(genConf);
program
.command("config-help")
.description("Prints the entire convict config ")
.action(listConfig);
program
.command("api")
.description("Run the application api server")
.action(runServer);
program
.command("worker")
.description("Run the worker to process jobs")
.action(runWorker);
program
.command("db <commands...>")
.description("Run graphile-migrate commands with your app's config loaded.")
.action(async (args) => {
const config = await loadConfig();
return migrateWrapper(args, config);
});
program
.command("gen-jwks")
.description("Generate the JWKS")
.action(generateJwks);
program
.command("gen-testing-jwt")
.description("Generate a JWT for the test suite")
.action(createTokenForTesting);
program
.command("export-graphql-schema")
.description("Export the graphql schema")
.action(exportGraphqlSchema);
program.parse(process.argv);

68
metamigo-cli/jwks.ts Normal file
View file

@ -0,0 +1,68 @@
import jose from "node-jose";
import * as jwt from "jsonwebtoken";
const generateKeystore = async () => {
const keystore = jose.JWK.createKeyStore();
await keystore.generate("oct", 256, {
alg: "A256GCM",
use: "enc",
});
await keystore.generate("oct", 256, {
alg: "HS512",
use: "sig",
});
return keystore;
};
const safeString = (input) => {
return Buffer.from(JSON.stringify(input)).toString("base64");
};
const stringify = (v) => JSON.stringify(v, undefined, 2);
const _generateJwks = async () => {
const keystore = await generateKeystore();
const encryption = keystore.all({ use: "enc" })[0].toJSON(true);
const signing = keystore.all({ use: "sig" })[0].toJSON(true);
return {
nextAuth: {
signingKeyB64: safeString(signing),
encryptionKeyB64: safeString(encryption),
},
};
};
export const generateJwks = async (): Promise<void> => {
console.log(stringify(await _generateJwks()));
};
export const createTokenForTesting = async (): Promise<void> => {
const keys = await _generateJwks();
const signingKey = Buffer.from(
JSON.parse(
Buffer.from(keys.nextAuth.signingKeyB64, "base64").toString("utf-8")
).k,
"base64"
);
const token = jwt.sign(
{
iss: "Test Env",
iat: 1606893960,
aud: "metamigo",
sub: "abel@guardianproject.info",
name: "Abel Luck",
email: "abel@guardianproject.info",
userRole: "admin",
},
signingKey,
{ expiresIn: "100y", algorithm: "HS512" }
);
console.log("CONFIG");
console.log(stringify(keys));
console.log();
console.log("TOKEN");
console.log(token);
console.log();
};

39
metamigo-cli/package.json Normal file
View file

@ -0,0 +1,39 @@
{
"name": "cli",
"version": "0.2.0",
"main": "build/main/index.js",
"author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@digiresilience/montar": "^0.1.6",
"commander": "^10.0.0",
"graphile-migrate": "^1.4.1",
"graphile-worker": "^0.13.0",
"node-jose": "^2.1.1"
},
"devDependencies": {
"@babel/core": "7.20.12",
"@babel/preset-env": "7.20.2",
"@babel/preset-typescript": "7.18.6",
"@types/jest": "^29.2.5",
"eslint": "^8.32.0",
"jest": "^29.3.1",
"jest-junit": "^15.0.0",
"pino-pretty": "^9.1.1",
"prettier": "^2.8.3",
"ts-node": "^10.9.1",
"typedoc": "^0.23.24",
"typescript": "4.9.4",
"nodemon": "^2.0.20"
},
"scripts": {
"build": "tsc -p tsconfig.json",
"cli": "NODE_ENV=development node --unhandled-rejections=strict build/main/index.js",
"fix:lint": "eslint src --ext .ts --fix",
"fix:prettier": "prettier \"src/**/*.ts\" --write",
"lint:lint": "eslint src --ext .ts",
"lint:prettier": "prettier \"src/**/*.ts\" --list-different",
"test": "echo no tests",
"lint": "yarn lint:lint && yarn lint:prettier"
}
}

View file

@ -0,0 +1,37 @@
import { writeFileSync } from "fs";
import {
getIntrospectionQuery,
graphqlSync,
lexicographicSortSchema,
printSchema,
} from "graphql";
import { createPostGraphileSchema } from "postgraphile";
import { Pool } from "pg";
import { loadConfig } from "config";
import { getPostGraphileOptions } from "db";
export const exportGraphqlSchema = async (): Promise<void> => {
const config = await loadConfig();
const rootPgPool = new Pool({
connectionString: config.db.connection,
});
const exportSchema = `../../data/schema.graphql`;
const exportJson = `../../frontend/lib/graphql-schema.json`;
try {
const schema = await createPostGraphileSchema(
config.postgraphile.authConnection,
"app_public",
getPostGraphileOptions()
);
const sorted = lexicographicSortSchema(schema);
const json = graphqlSync(schema, getIntrospectionQuery());
writeFileSync(exportSchema, printSchema(sorted));
writeFileSync(exportJson, JSON.stringify(json));
console.log(`GraphQL schema exported to ${exportSchema}`);
console.log(`GraphQL schema json exported to ${exportJson}`);
} finally {
rootPgPool.end();
}
};

View file

@ -0,0 +1,8 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"outDir": "build/main"
},
"include": ["**/*.ts", "**/.*.ts", "config.ts"],
"exclude": ["node_modules", "**/*.spec.ts", "**/*.test.ts"]
}

View file

@ -0,0 +1,13 @@
require('@digiresilience/eslint-config-metamigo/patch/modern-module-resolution');
module.exports = {
extends: [
"@digiresilience/eslint-config-metamigo/profile/node",
"@digiresilience/eslint-config-metamigo/profile/typescript"
],
rules: {
// TODO: enable this after jest fixes this issue https://github.com/nodejs/node/issues/38343
"unicorn/prefer-node-protocol": "off"
},
parserOptions: { tsconfigRootDir: __dirname }
};

13
metamigo-common/.gitignore vendored Normal file
View file

@ -0,0 +1,13 @@
.idea/*
.nyc_output
build
node_modules
test
src/*/*.js
coverage
*.log
package-lock.json
.npmrc
.yalc
yalc.lock
junit.xml

View file

@ -0,0 +1,13 @@
.eslintrc.js
.editorconfig
.prettierignore
.versionrc
Makefile
.gitlab-ci.yml
coverage
jest*
tsconfig*
*.log
test*
.yalc
yalc.lock

View file

@ -0,0 +1,2 @@
# package.json is formatted by package managers, so we ignore it here
package.json

View file

@ -0,0 +1,5 @@
{
"presets": [
"@digiresilience/babel-preset-metamigo"
]
}

View file

@ -0,0 +1,31 @@
import { ConvictSchema } from "./types";
export interface IAppMetaConfig {
name: string;
version: string;
figletFont: string;
}
export const AppMetaConfig: ConvictSchema<IAppMetaConfig> = {
version: {
doc: "The current application version",
format: String,
env: "npm_package_version",
default: null,
skipGenerate: true,
},
name: {
doc: "Application name",
format: String,
env: "npm_package_name",
default: null,
skipGenerate: true,
},
figletFont: {
doc: "The figlet font name used to print the site name on boot",
format: String,
env: "FIGLET_FONT",
default: "Sub-Zero",
skipGenerate: true,
},
};

View file

@ -0,0 +1,23 @@
import { ConvictSchema } from "./types";
export interface ISessionConfig {
sessionMaxAgeSeconds: number;
sessionUpdateAgeSeconds: number;
}
export const SessionConfig: ConvictSchema<ISessionConfig> = {
sessionMaxAgeSeconds: {
doc: "How long in seconds until an idle session expires and is no longer valid.",
format: "positiveInt",
default: 30 * 24 * 60 * 60, // 30 days
env: "SESSION_MAX_AGE_SECONDS",
},
sessionUpdateAgeSeconds: {
doc: `Throttle how frequently in seconds to write to database to extend a session.
Use it to limit write operations. Set to 0 to always update the database.
Note: This option is ignored if using JSON Web Tokens`,
format: "positiveInt",
default: 24 * 60 * 60, // 24 hours
env: "SESSION_UPDATE_AGE_SECONDS",
},
};

View file

@ -0,0 +1,32 @@
import { ConvictSchema } from "./types";
export interface ICorsConfig {
allowedMethods: Array<string>;
allowedOrigins: Array<string>;
allowedHeaders: Array<string>;
}
export const CorsConfig: ConvictSchema<ICorsConfig> = {
allowedMethods: {
doc: "The allowed CORS methods",
format: "Array",
env: "CORS_ALLOWED_METHODS",
default: ["GET", "PUT", "POST", "PATCH", "DELETE", "HEAD", "OPTIONS"],
},
allowedOrigins: {
doc: "The allowed origins",
format: "Array",
env: "CORS_ALLOWED_ORIGINS",
default: [],
},
allowedHeaders: {
doc: "The allowed headers",
format: "Array",
env: "CORS_ALLOWED_HEADERS",
default: [
"content-type",
"authorization",
"cf-access-authenticated-user-email",
],
},
};

View file

@ -0,0 +1,58 @@
import * as Joi from "joi";
import type { Format } from "convict";
const coerceString = (v: any): string => v.toString();
const validator = (s: any) => (v: any) => Joi.assert(v, s);
const url = Joi.string().uri({
scheme: ["http", "https"],
});
const ip = Joi.string().ip({ version: ["ipv4", "ipv6"], cidr: "optional" });
/**
* Additional configuration value formats for convict.
*
* You can use these to achieve richer validation for your configuration.
*/
export const MetamigoConvictFormats: { [index: string]: Format } = {
positiveInt: {
name: "positveInt",
coerce: (n: string): number => Number.parseInt(n, 10),
validate: validator(Joi.number().positive().integer()),
},
port: {
name: "port",
coerce: (n: string): number => Number.parseInt(n, 10),
validate: validator(Joi.number().port()),
},
ipaddress: {
name: "ipaddress",
coerce: coerceString,
validate: validator(ip),
},
url: {
name: "url",
coerce: coerceString,
validate: validator(url),
},
uri: {
name: "uri",
coerce: coerceString,
validate: validator(Joi.string().uri()),
},
optionalUri: {
name: "uri",
coerce: coerceString,
validate: validator(Joi.string().uri().allow("")),
},
email: {
name: "email",
coerce: coerceString,
validate: validator(Joi.string().email()),
},
uuid: {
name: "uuid",
coerce: coerceString,
validate: validator(Joi.string().guid()),
},
};

View file

@ -0,0 +1,44 @@
import convict from "convict";
const visitLeaf = (acc: any, key: any, leaf: any) => {
if (leaf.skipGenerate) {
return;
}
if (leaf.default === undefined) {
acc[key] = undefined;
} else {
acc[key] = leaf.default;
}
};
const visitNode = (acc: any, node: any, key = "") => {
if (node._cvtProperties) {
const keys = Object.keys(node._cvtProperties);
let subacc: any;
if (key === "") {
subacc = acc;
} else {
subacc = {};
acc[key] = subacc;
}
keys.forEach((key) => {
visitNode(subacc, node._cvtProperties[key], key);
});
// In the case that the entire sub-tree specified skipGenerate, remove the empty node
if (Object.keys(subacc).length === 0) {
delete acc[key];
}
} else {
visitLeaf(acc, key, node);
}
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const generateConfig = (conf: convict.Config<any>): unknown => {
const schema = conf.getSchema();
const generated = {};
visitNode(generated, schema);
return JSON.stringify(generated, undefined, 1);
};

View file

@ -0,0 +1,142 @@
import process from "process";
import convict, { SchemaObj } from "convict";
import { IServerConfig, ServerConfig } from "./server";
import { IMetricsConfig, MetricsConfig } from "./metrics-server";
import { IAppMetaConfig, AppMetaConfig } from "./app-meta";
import { ICorsConfig, CorsConfig } from "./cors";
import { ILoggingConfig, LoggingConfig } from "./logging";
import { ExtendedConvict } from "./types";
import { MetamigoConvictFormats } from "./formats";
type IEnvConfig = "production" | "development" | "test";
const EnvConfig: SchemaObj<IEnvConfig> = {
doc: "The application environment",
format: ["production", "development", "test"],
default: "development",
env: "NODE_ENV",
};
export const configBaseSchema = {
env: EnvConfig,
server: ServerConfig,
meta: AppMetaConfig,
cors: CorsConfig,
metrics: MetricsConfig,
logging: LoggingConfig,
};
/**
*
* The metamigo base configuration object. Use this for easy typed access to your
* config.
*
*/
interface IMetamigoConfig {
env: IEnvConfig;
server: IServerConfig;
meta: IAppMetaConfig;
cors: ICorsConfig;
metrics: IMetricsConfig;
logging: ILoggingConfig;
isProd?: boolean;
isTest?: boolean;
isDev?: boolean;
frontend: any;
nextAuth: any;
}
export type IMetamigoConvict = ExtendedConvict<IMetamigoConfig>;
export type {
IServerConfig,
IMetricsConfig,
IAppMetaConfig,
ICorsConfig,
ILoggingConfig,
IMetamigoConfig,
};
export * from "./formats";
export * from "./generate";
export * from "./print";
export * from "./types";
/**
* Loads your applications configuration from environment variables and configuration files (see METAMIGO_CONFIG).
*
* @param schema your schema definition
* @param override an optional object with config value that will override defaults but not config files and env vars (see [convict precedence docs](https://github.com/mozilla/node-convict/tree/master/packages/convict#precedence-order ))
* @returns the raw convict config object
*/
export const loadConfigurationRaw = async <T extends IMetamigoConfig>(
schema: convict.Schema<T>,
override?: Partial<T>
): Promise<ExtendedConvict<T>> => {
convict.addFormats(MetamigoConvictFormats);
const config: ExtendedConvict<T> = convict(schema);
const env = config.get("env");
config.isProd = env === "production";
config.isTest = env === "test";
config.isDev = env === "development";
try {
if (process.env.METAMIGO_CONFIG) {
config.loadFile(process.env.METAMIGO_CONFIG);
}
} catch (error) {
const msg = `
🚫 Your application's configuration is invalid JSON. 🚫
${error}
`;
throw new Error(msg);
}
if (override) {
config.load(override);
}
try {
config.validate({ allowed: "strict" });
} catch (error: any) {
const msg = `
🚫 Your application's configuration is invalid. 🚫
${error.message}
`;
throw new Error(msg);
}
// set our helpers
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const configDirty = config as any;
configDirty.set("isProd", config.isProd);
configDirty.set("isTest", config.isTest);
configDirty.set("isDev", config.isDev);
return config;
};
/**
* Loads your applications configuration from environment variables and configuration files (see METAMIGO_CONFIG).
*
* @param schema your schema definition
* @param override an optional object with config value that will override defaults but not config files and env vars (see [convict precedence docs](https://github.com/mozilla/node-convict/tree/master/packages/convict#precedence-order ))
* @returns a vanilla javascript object with the config loaded values
*/
export const loadConfiguration = async <T extends IMetamigoConfig>(
schema: convict.Schema<T>,
override?: Partial<T>
): Promise<T> => {
const c = await loadConfigurationRaw(schema, override);
return c.getProperties();
};

View file

@ -0,0 +1,90 @@
import { ConvictSchema } from "./types";
export interface ILoggingConfig {
level: string;
sql: boolean;
redact: string[];
ignorePaths: string[];
ignoreTags: string[];
requestIdHeader: string;
logRequestStart: boolean;
logRequestComplete: boolean;
logRequestPayload: boolean;
logRequestQueryParams: boolean;
prettyPrint: boolean | "auto";
}
export const LoggingConfig: ConvictSchema<ILoggingConfig> = {
level: {
doc: "The logging level",
format: ["trace", "debug", "info", "warn", "error"],
default: "info",
env: "LOG_LEVEL",
},
sql: {
doc: "Whether to log sql statements",
format: "Boolean",
default: false,
env: "LOG_SQL",
},
redact: {
doc: "Pino redaction array. These are always redacted. see https://getpino.io/#/docs/redaction",
format: "Array",
default: [
"req.remoteAddress",
"req.headers.authorization",
`req.headers["cf-access-jwt-assertion"]`,
`req.headers["cf-access-authenticated-user-email"]`,
`req.headers["cf-connecting-ip"]`,
`req.headers["cf-ipcountry"]`,
`req.headers["x-forwarded-for"]`,
"req.headers.cookie",
],
},
ignorePaths: {
doc: "Ignore http paths (exact) when logging requests",
format: "Array",
default: ["/graphql"],
},
ignoreTags: {
doc: "Ignore routes tagged with these tags when logging requests",
format: "Array",
default: ["status", "swagger", "nolog"],
},
requestIdHeader: {
doc: "The header where the request id lives",
format: String,
default: "x-request-id",
env: "REQUEST_ID_HEADER",
},
logRequestStart: {
doc: "Whether hapi-pino should add a log.info() at the beginning of Hapi requests for the given Request.",
format: "Boolean",
default: false,
env: "LOG_REQUEST_START",
},
logRequestComplete: {
doc: "Whether hapi-pino should add a log.info() at the completion of Hapi requests for the given Request.",
format: "Boolean",
default: true,
env: "LOG_REQUEST_COMPLETE",
},
logRequestPayload: {
doc: "When enabled, add the request payload as payload to the response event log.",
format: "Boolean",
default: false,
env: "LOG_REQUEST_PAYLOAD",
},
logRequestQueryParams: {
doc: "When enabled, add the request query as queryParams to the response event log.",
format: "Boolean",
default: false,
env: "LOG_REQUEST_QUERY_PARAMS",
},
prettyPrint: {
doc: "Pretty print the logs",
format: ["auto", true, false],
default: "auto",
env: "LOG_PRETTY_PRINT",
},
};

View file

@ -0,0 +1,22 @@
import { ConvictSchema } from "./types";
export interface IMetricsConfig {
address: string;
port: number;
}
export const MetricsConfig: ConvictSchema<IMetricsConfig> = {
address: {
doc: "The ip address to bind the prometheus metrics to",
format: "ipaddress",
default: "127.0.0.1",
env: "METRICS_ADDRESS",
},
port: {
doc: "The port to bind the prometheus metrics to",
format: "port",
default: 3002,
env: "METRICS_PORT",
arg: "port",
},
};

View file

@ -0,0 +1,41 @@
import chalk from "chalk";
import convict from "convict";
const visitLeaf = (path: any, key: any, leaf: any) => {
if (leaf.skipGenerate) {
return;
}
let name = `${path}.${key}`;
if (path.length === 0) name = key;
console.log(chalk.green(name));
console.log(leaf.doc);
if (leaf.default === undefined) {
console.log(chalk.red("\t required"));
} else {
console.log(`\tdefault: ${JSON.stringify(leaf.default)}`);
}
console.log(`\tformat: ${leaf.format}`);
console.log(`\tenv: ${leaf.env}`);
};
const visitNode = (path: any, node: any, key = "") => {
if (node._cvtProperties) {
const keys = Object.keys(node._cvtProperties);
const subpath = key === "" ? path : `${key}`;
keys.forEach((key) => {
visitNode(subpath, node._cvtProperties[key], key);
});
console.log();
} else {
visitLeaf(path, key, node);
}
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const printConfigOptions = (conf: convict.Config<any>): void => {
const schema = conf.getSchema();
visitNode("", schema);
};

View file

@ -0,0 +1,21 @@
import { ConvictSchema } from "./types";
export interface IServerConfig {
address: string;
port: number;
}
export const ServerConfig: ConvictSchema<IServerConfig> = {
address: {
doc: "The IP address to bind the server to",
format: "ipaddress",
default: "0.0.0.0",
env: "SERVER_ADDRESS",
},
port: {
doc: "The port to bind the server to",
format: "port",
default: 3001,
env: "SERVER_PORT",
},
};

View file

@ -0,0 +1,26 @@
import convict from "convict";
/*
interface SSMObj {
path: string;
}
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
interface ConvictSchemaObj<T = any> extends convict.SchemaObj<T> {
// ssm?: SSMObj;
/**
* The config item will be ignored for purposes of config file generation
*/
skipGenerate?: boolean;
}
export type ConvictSchema<T> = {
[P in keyof T]: convict.Schema<T[P]> | ConvictSchemaObj<T[P]>;
};
export interface ExtendedConvict<T> extends convict.Config<T> {
isProd?: boolean;
isTest?: boolean;
isDev?: boolean;
}

View file

@ -0,0 +1,295 @@
/* eslint-disable @typescript-eslint/ban-types,@typescript-eslint/no-explicit-any,max-params */
import * as Boom from "@hapi/boom";
import * as Hapi from "@hapi/hapi";
import { CrudRepository } from "../records/crud-repository";
import { createResponse } from "../helpers/response";
import {
PgRecordInfo,
UnsavedR,
SavedR,
KeyType,
} from "../records/record-info";
/**
*
* A generic controller that handles exposes a [[CrudRepository]] as HTTP
* endpoints with full POST, PUT, GET, DELETE semantics.
*
* The controller yanks the instance of the crud repository out of the request at runtime.
* This assumes you're following the pattern exposed with the hapi-pg-promise plugin.
*
* @typeParam ID The type of the id column
* @typeParam T The type of the record
*/
export abstract class AbstractCrudController<
TUnsavedR,
TSavedR extends TUnsavedR & IdKeyT,
IdKeyT extends object
> {
/**
* @param repoName the key at which the repository for the record can be accessed (that is, request.db[repoName])
* @param paramsIdField the placeholder used in the Hapi route for the id of the record
* @param dbDecoration the decorated function on the request to use (defaults to request.db())
*/
abstract repoName: string;
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
abstract paramsIdField = "id";
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
abstract dbDecoration = "db";
abstract recordType: PgRecordInfo<TUnsavedR, TSavedR, IdKeyT>;
repo(request: Hapi.Request): CrudRepository<TUnsavedR, TSavedR, IdKeyT> {
// @ts-expect-error
const db = request[this.dbDecoration];
if (!db)
throw Boom.badImplementation(
`CrudController for table ${this.recordType.tableName} could not find request decoration '${this.dbDecoration}'`
);
const repo = db()[this.repoName];
if (!repo)
throw Boom.badImplementation(
`CrudController for table ${this.recordType.tableName} could not find repository for '${this.dbDecoration}().${this.repoName}'`
);
return repo;
}
/**
* Creates a new record
*/
public create = async (
request: Hapi.Request,
toolkit: Hapi.ResponseToolkit
): Promise<any> => {
try {
// would love to know how to get rid of this double cast hack
const payload: TSavedR = <TSavedR>(<any>request.payload);
const data: TSavedR = await this.repo(request).insert(payload);
return toolkit.response(
createResponse(request, {
value: data,
})
);
} catch (error: any) {
return toolkit.response(
createResponse(request, {
boom: Boom.badImplementation(error),
})
);
}
};
/**
* Updates a record by ID. This method can accept partial updates.
*/
public updateById = async (
request: Hapi.Request,
toolkit: Hapi.ResponseToolkit
): Promise<any> => {
try {
const payload: Partial<TSavedR> = <any>request.payload;
const id: IdKeyT = request.params[this.paramsIdField];
const updatedRow: TSavedR = await this.repo(request).updateById(
id,
payload
);
if (!updatedRow) {
return toolkit.response(
createResponse(request, {
boom: Boom.notFound(),
})
);
}
return toolkit.response(
createResponse(request, {
value: updatedRow,
})
);
} catch (error: any) {
return toolkit.response(
createResponse(request, {
boom: Boom.badImplementation(error),
})
);
}
};
/**
* Return a record given its id.
*/
public getById = async (
request: Hapi.Request,
toolkit: Hapi.ResponseToolkit
): Promise<any> => {
try {
const id: IdKeyT = request.params[this.paramsIdField];
// @ts-expect-error
const row: TSavedR = await this.repo(request).findById(id);
if (!row) {
return toolkit.response(
createResponse(request, {
boom: Boom.notFound(),
})
);
}
return toolkit.response(
createResponse(request, {
value: row,
})
);
} catch (error: any) {
return toolkit.response(
createResponse(request, {
boom: Boom.badImplementation(error),
})
);
}
};
/**
* Return all records.
*/
public getAll = async (
request: Hapi.Request,
toolkit: Hapi.ResponseToolkit
): Promise<any> => {
try {
const rows: TSavedR[] = await this.repo(request).findAll();
return toolkit.response(
createResponse(request, {
value: rows,
})
);
} catch (error: any) {
return toolkit.response(
createResponse(request, {
boom: Boom.badImplementation(error),
})
);
}
};
/**
* Delete a record given its id.
*/
public deleteById = async (
request: Hapi.Request,
toolkit: Hapi.ResponseToolkit
): Promise<any> => {
try {
const id: IdKeyT = request.params[this.paramsIdField];
const count = await this.repo(request).removeById(id);
if (count === 0) {
return createResponse(request, { boom: Boom.notFound() });
}
return toolkit.response(
createResponse(request, {
value: { id },
})
);
} catch (error: any) {
return toolkit.response(
createResponse(request, {
boom: Boom.badImplementation(error),
})
);
}
};
}
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
export function unboundCrudController<TRecordInfo extends PgRecordInfo>(
aRecordType: TRecordInfo
) {
return class CrudController extends AbstractCrudController<
UnsavedR<TRecordInfo>,
SavedR<TRecordInfo>,
KeyType<TRecordInfo>
> {
public readonly repoName: string;
public readonly paramsIdField;
public readonly dbDecoration;
public readonly recordType = aRecordType;
constructor(repoName: string, paramsIdField = "id", dbDecoration = "db") {
super();
this.repoName = repoName;
this.paramsIdField = paramsIdField;
this.dbDecoration = dbDecoration;
}
};
}
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
export function CrudControllerBase<Rec extends PgRecordInfo>(recordType: Rec) {
return unboundCrudController<Rec>(recordType);
}
export const crudRoutesFor = (
name: string,
path: string,
controller: AbstractCrudController<any, any, any>,
idParam: string,
validate: Record<string, Hapi.RouteOptionsValidate>
): Hapi.ServerRoute[] => [
{
method: "POST",
path: `${path}`,
options: {
handler: controller.create,
validate: validate.create,
description: `Method that creates a new ${name}.`,
tags: ["api", name],
},
},
{
method: "PUT",
path: `${path}/{${idParam}}`,
options: {
handler: controller.updateById,
validate: validate.updateById,
description: `Method that updates a ${name} by its id.`,
tags: ["api", name],
},
},
{
method: "GET",
path: `${path}/{${idParam}}`,
options: {
handler: controller.getById,
validate: validate.getById,
description: `Method that gets a ${name} by its id.`,
tags: ["api", name],
},
},
{
method: "GET",
path: `${path}`,
options: {
handler: controller.getAll,
description: `Method that gets all ${name}s.`,
tags: ["api", name],
},
},
{
method: "DELETE",
path: `${path}/{${idParam}}`,
options: {
handler: controller.deleteById,
validate: validate.deleteById,
description: `Method that deletes a ${name} by its id.`,
tags: ["api", name],
},
},
];

View file

@ -0,0 +1,185 @@
/* eslint-disable unicorn/no-null,max-params */
import { createHash, randomBytes } from "crypto";
import type { AdapterInstance } from "next-auth/adapters";
import omit from "lodash/omit";
import type { IMetamigoRepositories } from "../records";
import type { UnsavedAccount, SavedAccount } from "../records/account";
import type { UserId, UnsavedUser, SavedUser } from "../records/user";
import type { UnsavedSession, SavedSession } from "../records/session";
// Sessions expire after 30 days of being idle
export const defaultSessionMaxAge = 30 * 24 * 60 * 60 * 1000;
// Sessions updated only if session is greater than this value (0 = always)
export const defaulteSessionUpdateAge = 24 * 60 * 60 * 1000;
const getCompoundId = (providerId: any, providerAccountId: any) =>
createHash("sha256")
.update(`${providerId}:${providerAccountId}`)
.digest("hex");
const randomToken = () => randomBytes(32).toString("hex");
export class NextAuthAdapter<TRepositories extends IMetamigoRepositories>
implements AdapterInstance<SavedUser, UnsavedUser, SavedSession>
{
constructor(
private repos: TRepositories,
private readonly sessionMaxAge = defaultSessionMaxAge,
private readonly sessionUpdateAge = defaulteSessionUpdateAge
) { }
async createUser(profile: UnsavedUser): Promise<SavedUser> {
// @ts-expect-error
return this.repos.users.upsert(omit(profile, ["isActive", "id"]));
}
async getUser(id: UserId): Promise<SavedUser | null> {
const user = await this.repos.users.findById({ id });
if (!user) return null;
// if a user has no linked accounts, then we do not return it
// see: https://github.com/nextauthjs/next-auth/issues/876
const accounts = await this.repos.accounts.findAllBy({
userId: user.id,
});
if (!accounts || accounts.length === 0) return null;
return user;
}
async getUserByEmail(email: string): Promise<SavedUser | null> {
const user = await this.repos.users.findBy({ email });
if (!user) return null;
// if a user has no linked accounts, then we do not return it
// see: https://github.com/nextauthjs/next-auth/issues/876
const accounts = await this.repos.accounts.findAllBy({
userId: user.id,
});
if (!accounts || accounts.length === 0) return null;
return user;
}
async getUserByProviderAccountId(
providerId: string,
providerAccountId: string
): Promise<SavedUser | null> {
const account = await this.repos.accounts.findBy({
compoundId: getCompoundId(providerId, providerAccountId),
});
if (!account) return null;
return this.repos.users.findById({ id: account.userId });
}
async updateUser(user: SavedUser): Promise<SavedUser> {
return this.repos.users.update(user);
}
// @ts-expect-error
async linkAccount(
userId: string,
providerId: string,
providerType: string,
providerAccountId: string,
refreshToken: string,
accessToken: string,
accessTokenExpires: number
): Promise<void> {
const exists = await this.repos.users.existsById({ id: userId });
if (!exists) return;
const account: UnsavedAccount = {
accessToken,
refreshToken,
compoundId: getCompoundId(providerId, providerAccountId),
providerAccountId,
providerId,
providerType,
accessTokenExpires: accessTokenExpires
? new Date(accessTokenExpires)
: new Date(),
userId,
};
await this.repos.accounts.insert(account);
}
async unlinkAccount(
userId: string,
providerId: string,
providerAccountId: string
): Promise<void> {
await this.repos.accounts.removeBy({
userId,
compoundId: getCompoundId(providerId, providerAccountId),
});
}
createSession(user: SavedUser): Promise<SavedSession> {
let expires;
if (this.sessionMaxAge) {
const dateExpires = new Date(Date.now() + this.sessionMaxAge);
expires = dateExpires.toISOString();
}
const session: UnsavedSession = {
// @ts-expect-error
expires,
userId: user.id,
sessionToken: randomToken(),
accessToken: randomToken(),
};
return this.repos.sessions.insert(session);
}
async getSession(sessionToken: string): Promise<SavedSession | null> {
const session = await this.repos.sessions.findBy({ sessionToken });
if (session && session.expires && new Date() > session.expires) {
this.repos.sessions.remove(session);
return null;
}
return session;
}
async updateSession(
session: SavedSession,
force?: boolean
): Promise<SavedSession | null> {
if (
this.sessionMaxAge &&
(this.sessionUpdateAge || this.sessionUpdateAge === 0) &&
session.expires
) {
// Calculate last updated date, to throttle write updates to database
// Formula: ({expiry date} - sessionMaxAge) + sessionUpdateAge
// e.g. ({expiry date} - 30 days) + 1 hour
//
// Default for sessionMaxAge is 30 days.
// Default for sessionUpdateAge is 1 hour.
const dateSessionIsDueToBeUpdated = new Date(
session.expires.getTime() - this.sessionMaxAge + this.sessionUpdateAge
);
// Trigger update of session expiry date and write to database, only
// if the session was last updated more than {sessionUpdateAge} ago
if (new Date() > dateSessionIsDueToBeUpdated) {
const newExpiryDate = new Date();
newExpiryDate.setTime(newExpiryDate.getTime() + this.sessionMaxAge);
session.expires = newExpiryDate;
} else if (!force) {
return null;
}
} else if (!force) {
// If session MaxAge, session UpdateAge or session.expires are
// missing then don't even try to save changes, unless force is set.
return null;
}
const { expires } = session;
return this.repos.sessions.update({ ...session, expires });
}
async deleteSession(sessionToken: string): Promise<void> {
await this.repos.sessions.removeBy({ sessionToken });
}
}

View file

@ -0,0 +1,8 @@
import * as PGP from "pg-promise";
import * as PGPTS from "pg-promise/typescript/pg-subset";
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export type IDatabase = PGP.IDatabase<any>;
export type IMain = PGP.IMain;
export type IResult = PGPTS.IResult;
export type IInitOptions = PGP.IInitOptions;

123
metamigo-common/hapi.ts Normal file
View file

@ -0,0 +1,123 @@
import * as Hapi from "@hapi/hapi";
import * as http from "http";
import type { HttpTerminator } from "http-terminator";
import * as Glue from "@hapi/glue";
import * as Promster from "@promster/hapi";
import figlet from "figlet";
import PinoPlugin from "hapi-pino";
import { createServer as createPrometheusServer } from "@promster/server";
import { createHttpTerminator } from "http-terminator";
import { getPrettyPrint } from "./logger";
import RequestIdPlugin from "./plugins/request-id";
import StatusPlugin from "./plugins/status";
import ConfigPlugin from "./plugins/config";
import { IMetamigoConfig } from "./config";
export interface Server {
hapiServer: Hapi.Server;
promServer?: http.Server;
promTerminator?: HttpTerminator;
}
export const deployment = async <T extends IMetamigoConfig>(
manifest: Glue.Manifest,
config: T,
start = false
): Promise<Server> => {
const hapiServer: Hapi.Server = await Glue.compose(manifest);
await hapiServer.initialize();
if (!start) return { hapiServer };
await announce(config);
await hapiServer.start();
const { port, address } = config.metrics;
const promServer = await createPrometheusServer({
port,
hostname: address,
});
const promTerminator = createHttpTerminator({
server: promServer,
});
console.log(`
🚀 Server listening on http://${hapiServer.info.address}:${hapiServer.info.port}
Metrics listening on http://${address}:${port}
`);
return {
hapiServer,
promServer,
promTerminator,
};
};
export const stopDeployment = async (server: Server): Promise<void> => {
await server.hapiServer.stop();
if (server.promTerminator) await server.promTerminator.terminate();
};
export const defaultPlugins = <T extends IMetamigoConfig>(
config: T
): string[] | Glue.PluginObject[] | Array<string | Glue.PluginObject> => {
const {
logRequestStart,
logRequestComplete,
logRequestPayload,
logRequestQueryParams,
level,
redact,
ignorePaths,
ignoreTags,
requestIdHeader,
} = config.logging;
const plugins = [
{ plugin: ConfigPlugin, options: { config } },
{
plugin: PinoPlugin,
options: {
prettyPrint: getPrettyPrint(config),
level,
logRequestStart,
logRequestComplete,
logPayload: logRequestPayload,
logQueryParams: logRequestQueryParams,
redact: {
paths: redact,
remove: true,
},
ignorePaths,
ignoreTags,
},
},
{
plugin: RequestIdPlugin,
options: {
header: requestIdHeader,
},
},
{ plugin: StatusPlugin },
{ plugin: Promster.createPlugin() },
];
// @ts-ignore
return plugins;
};
export const announce = async <T extends IMetamigoConfig>(
config: T
): Promise<void> =>
new Promise((resolve, reject) => {
// @ts-expect-error
figlet.text(
config.meta.name,
{ font: config.meta.figletFont },
(err, text) => {
if (err) reject(err);
console.log(`${text}`);
resolve();
}
);
});

View file

@ -0,0 +1,44 @@
/**
* Used by Flavor to mark a type in a readable way.
*/
export interface Flavoring<FlavorT> {
_type?: FlavorT;
}
/**
*
* Create a "flavored" version of a type. TypeScript will disallow mixing
* flavors, but will allow unflavored values of that type to be passed in where
* a flavored version is expected. This is a less restrictive form of branding.
*
*/
export type Flavor<T, FlavorT> = T & Flavoring<FlavorT>;
export type UUID = Flavor<string, "A UUID">;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const deepFreeze = (o: unknown): any => {
Object.freeze(o);
const oIsFunction = typeof o === "function";
const hasOwnProp = Object.prototype.hasOwnProperty;
Object.getOwnPropertyNames(o).forEach((prop) => {
if (
hasOwnProp.call(o, prop) &&
(oIsFunction
? prop !== "caller" && prop !== "callee" && prop !== "arguments"
: true) &&
// @ts-expect-error
o[prop] !== null &&
// @ts-expect-error
(typeof o[prop] === "object" || typeof o[prop] === "function") &&
// @ts-expect-error
!Object.isFrozen(o[prop])
) {
// @ts-expect-error
deepFreeze(o[prop]);
}
});
return o;
};

View file

@ -0,0 +1,59 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import * as Boom from "@hapi/boom";
import * as Hapi from "@hapi/hapi";
interface IResponseMeta {
operation?: string;
method?: string;
paging?: string | null;
}
interface IResponseError {
code?: string | number;
message?: string;
error?: string;
}
interface IResponse<T> {
meta: IResponseMeta;
data: T[];
errors: IResponseError[];
}
interface IResponseOptions<T> {
value?: T | null | undefined;
boom?: Boom.Boom<any> | null | undefined;
}
export function createResponse<T>(
request: Hapi.Request,
{ value = undefined, boom = undefined }: IResponseOptions<T>
): IResponse<T> {
const errors: IResponseError[] = [];
const data: any = [];
if (boom) {
errors.push({
code: boom.output.payload.statusCode,
error: boom.output.payload.error,
message: boom.output.payload.message,
});
}
if (value && data) {
if (Array.isArray(value)) {
data.push(...value);
} else {
data.push(value);
}
}
return {
meta: {
method: request.method.toUpperCase(),
operation: request.url.pathname,
},
data,
errors,
};
}

View file

@ -0,0 +1,62 @@
import process from "process";
import * as Hapi from "@hapi/hapi";
import * as Joi from "joi";
import Hoek from "@hapi/hoek";
import * as Boom from "@hapi/boom";
export interface HapiValidationError extends Joi.ValidationError {
output: {
statusCode: number;
headers: Hapi.Utils.Dictionary<string | string[]>;
payload: {
statusCode: number;
error: string;
message?: string;
validation: {
source: string;
keys: string[];
};
};
};
}
export function defaultValidationErrorHandler(
request: Hapi.Request,
h: Hapi.ResponseToolkit,
err?: Error
): Hapi.Lifecycle.ReturnValue {
// Newer versions of Joi don't format the key for missing params the same way. This shim
// provides backwards compatibility. Unfortunately, Joi doesn't export it's own Error class
// in JS so we have to rely on the `name` key before we can cast it.
//
// The Hapi code we're 'overwriting' can be found here:
// https://github.com/hapijs/hapi/blob/master/lib/validation.js#L102
if (err && err.name === "ValidationError" && err.hasOwnProperty("output")) {
const validationError: HapiValidationError = err as HapiValidationError;
const validationKeys: string[] = [];
validationError.details.forEach((detail) => {
if (detail.path.length > 0) {
validationKeys.push(Hoek.escapeHtml(detail.path.join(".")));
} else {
// If no path, use the value sigil to signal the entire value had an issue.
validationKeys.push("value");
}
});
validationError.output.payload.validation.keys = validationKeys;
}
throw err;
}
export const validatingFailAction = async (
request: Hapi.Request,
h: Hapi.ResponseToolkit,
err: Error
): Promise<void> => {
if (process.env.NODE_ENV === "production") {
throw Boom.badRequest("Invalid request payload input");
} else {
defaultValidationErrorHandler(request, h, err);
}
};

23
metamigo-common/index.ts Normal file
View file

@ -0,0 +1,23 @@
export * from "./config";
export * from "./controllers/crud-controller";
export * from "./controllers/nextauth-adapter";
export * from "./hapi";
export * from "./helpers";
export * from "./helpers/response";
export * from "./helpers/validation-error";
export * from "./logger";
export * from "./records";
import * as pino from "pino";
declare module "@hapi/hapi" {
interface Server {
// @ts-ignore
logger: pino.Logger;
}
interface Request {
// @ts-ignore
logger: pino.Logger;
}
}

22
metamigo-common/logger.ts Normal file
View file

@ -0,0 +1,22 @@
import pino, { LoggerOptions } from "pino";
import { IMetamigoConfig } from "./config";
export const getPrettyPrint = <T extends IMetamigoConfig>(config: T): boolean => {
const { prettyPrint } = config.logging;
if (prettyPrint === "auto") return config?.isDev || false;
return prettyPrint === true;
};
export const configureLogger = <T extends IMetamigoConfig>(
config: T
): pino.Logger => {
const { level, redact } = config.logging;
const options: LoggerOptions = {
level,
redact: {
paths: redact,
remove: true,
},
};
return pino(options);
};

View file

@ -0,0 +1,65 @@
{
"name": "common",
"version": "0.2.0",
"description": "",
"main": "build/main/index.js",
"types": "build/main/index.d.ts",
"author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later",
"private": false,
"scripts": {
"build": "tsc -p tsconfig.json",
"fix:lint": "eslint src --ext .ts --fix",
"fix:prettier": "prettier \"src/**/*.ts\" --write",
"test": "yarn test:jest && yarn test:lint && yarn test:prettier",
"test:lint": "eslint src --ext .ts",
"test:prettier": "prettier \"src/**/*.ts\" --list-different",
"test:jest": "jest --coverage --forceExit --detectOpenHandles --reporters=default --reporters=jest-junit",
"doc": "yarn run doc:html",
"doc:html": "typedoc src/ --exclude '**/*.test.ts' --exclude '**/*.spec.ts' --name $npm_package_name --readme README.md --target es2019 --mode file --out build/docs",
"watch:build": "tsc -p tsconfig.json -w"
},
"devDependencies": {
"@types/figlet": "^1.5.5",
"@types/lodash": "^4.14.191",
"@types/node": "*",
"@types/uuid": "^9.0.0",
"camelcase-keys": "^8.0.2",
"pg-monitor": "^2.0.0",
"typedoc": "^0.23.24"
},
"dependencies": {
"@digiresilience/hapi-nextauth": "0.2.1",
"@hapi/boom": "^10.0.0",
"@hapi/glue": "^9.0.0",
"@hapi/hapi": "^21.2.0",
"@hapi/hoek": "^11.0.2",
"@hapi/inert": "^7.0.0",
"@hapi/vision": "^7.0.0",
"@hapipal/schmervice": "^2.1.0",
"@promster/hapi": "^8.0.6",
"@promster/server": "^7.0.8",
"@promster/types": "^3.2.5",
"@types/convict": "^6.1.1",
"@types/hapi__glue": "^6.1.6",
"@types/hapi__hapi": "^20.0.13",
"@types/hapi__inert": "^5.2.4",
"@types/hapi__vision": "^5.5.4",
"@types/hapi-pino": "^9.1.2",
"@types/hapipal__schmervice": "^2.0.3",
"@types/joi": "^17.2.3",
"chalk": "^5.2.0",
"commander": "^10.0.0",
"convict": "^6.2.4",
"decamelcase-keys": "^1.1.1",
"figlet": "^1.5.2",
"hapi-pino": "^11.0.1",
"http-terminator": "^3.2.0",
"joi": "^17.7.0",
"lodash": "^4.17.21",
"pg-promise": "^11.0.2",
"pino": "^8.8.0",
"prom-client": "^14.x.x",
"uuid": "^9.0.0"
}
}

View file

@ -0,0 +1,23 @@
import { Server } from "@hapi/hapi";
import cloneDeep from "lodash/cloneDeep";
import { deepFreeze } from "../helpers";
interface ConfigOptions {
config: unknown;
}
const register = async (
server: Server,
options: ConfigOptions
): Promise<void> => {
const safeConfig = deepFreeze(cloneDeep(options.config));
server.decorate("server", "config", () => safeConfig);
};
const ConfigPlugin = {
register,
name: "config",
version: "0.0.1",
};
export default ConfigPlugin;

View file

@ -0,0 +1,37 @@
import { Server } from "@hapi/hapi";
import { v4 as uuid } from "uuid";
interface RequestIdOptions {
header?: string;
}
const register = async (
server: Server,
options?: RequestIdOptions
): Promise<void> => {
const header = options?.header || "x-request-id";
server.ext("onPreResponse", async (request, h) => {
if (!request.response) {
return h.continue;
}
if ("isBoom" in request.response) {
const id = request.response.output.headers[header] || uuid();
request.response.output.headers[header] = id;
} else {
const id = request.headers[header] || uuid();
// @ts-ignore
request.response.header(header, id);
}
return h.continue;
});
};
const RequestIdPlugin = {
register,
name: "request-id",
version: "0.0.1",
};
export default RequestIdPlugin;

View file

@ -0,0 +1,60 @@
import { Server, RouteOptionsAccess } from "@hapi/hapi";
import { Prometheus } from "@promster/hapi";
interface StatusOptions {
path?: string;
auth?: RouteOptionsAccess;
}
const count = (statusCounter: any) => async () => {
statusCounter.inc();
return "Incremented metamigo_status_test counter";
};
const ping = async () => "OK";
const statusRoutes = (server: Server, opt?: StatusOptions) => {
const path = opt?.path || "/status";
const statusCounter = new Prometheus.Counter({
name: "metamigo_status_test",
help: "Test counter",
});
return [
{
method: "GET",
path: `${path}/ping`,
handler: ping,
options: {
auth: opt?.auth,
tags: ["api", "status", "ping"],
description: "Returns 200 and OK as the response.",
},
},
{
method: "GET",
path: `${path}/inc`,
handler: count(statusCounter),
options: {
auth: opt?.auth,
tags: ["api", "status", "prometheus"],
description: "Increments a test counter, for testing prometheus.",
},
},
];
};
const register = async (
server: Server,
options: StatusOptions
): Promise<void> => {
server.route(statusRoutes(server, options));
};
const StatusPlugin = {
register,
name: "status",
version: "0.0.1",
};
export default StatusPlugin;

View file

@ -0,0 +1,30 @@
import { recordInfo } from "./record-info";
import { RepositoryBase } from "./base";
import { Flavor, UUID } from "../helpers";
import { UserId } from "./user";
export type AccountId = Flavor<UUID, "Account Id">;
export interface UnsavedAccount {
compoundId: string;
userId: UserId;
providerType: string;
providerId: string;
providerAccountId: string;
refreshToken: string;
accessToken: string;
accessTokenExpires: Date;
}
export interface SavedAccount extends UnsavedAccount {
id: AccountId;
createdAt: Date;
updatedAt: Date;
}
export const AccountRecord = recordInfo<UnsavedAccount, SavedAccount>(
"app_public",
"accounts"
);
export class AccountRecordRepository extends RepositoryBase(AccountRecord) {}

View file

@ -0,0 +1,57 @@
import { TableName } from "pg-promise";
import { IMain } from "../db/types";
import { CrudRepository } from "./crud-repository";
import { PgRecordInfo, UnsavedR, SavedR, KeyType } from "./record-info";
import type { IDatabase } from "pg-promise";
export type PgProtocol<T> = IDatabase<T> & T;
/**
* This function returns a constructor for a repository class for [[TRecordInfo]]
*
* @param aRecordType the record type runtime definition
*/
// haven't figured out a good return type for this function
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
export function unboundRepositoryBase<
TRecordInfo extends PgRecordInfo,
TDatabaseExtension
>(aRecordType: TRecordInfo) {
return class Repository extends CrudRepository<
UnsavedR<TRecordInfo>,
SavedR<TRecordInfo>,
KeyType<TRecordInfo>
> {
_recordType!: TRecordInfo;
static readonly recordType = aRecordType;
static readonly schemaName = aRecordType.schemaName;
static readonly tableName = aRecordType.tableName;
public readonly recordType = aRecordType;
public readonly schemaTable: TableName;
public db: PgProtocol<TDatabaseExtension>;
public pgp: IMain;
constructor(db: PgProtocol<TDatabaseExtension>) {
super();
this.pgp = db.$config.pgp;
this.schemaTable = new this.pgp.helpers.TableName({
schema: aRecordType.schemaName,
table: aRecordType.tableName,
});
this.db = db;
if (!this.db) {
throw new Error("Missing database in repository");
}
}
};
}
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
export function RepositoryBase<
Rec extends PgRecordInfo,
TDatabaseExtension = unknown
>(recordType: Rec) {
return unboundRepositoryBase<Rec, TDatabaseExtension>(recordType);
}

View file

@ -0,0 +1,321 @@
/* eslint-disable @typescript-eslint/ban-types,@typescript-eslint/no-explicit-any */
import { TableName } from "pg-promise";
import decamelcaseKeys from "decamelcase-keys";
import isObject from "lodash/isObject";
import isArray from "lodash/isArray";
import zipObject from "lodash/zipObject";
import isEmpty from "lodash/isEmpty";
import omit from "lodash/omit";
import { IDatabase, IMain, IResult } from "../db/types";
import { PgRecordInfo, idKeysOf } from "./record-info";
export interface ICrudRepository<
TUnsavedR,
TSavedR extends TUnsavedR & IdKeyT,
IdKeyT extends object
> {
findById(id: IdKeyT): Promise<TSavedR | null>;
findBy(example: Partial<TSavedR>): Promise<TSavedR | null>;
findAll(): Promise<TSavedR[]>;
findAllBy(example: Partial<TSavedR>): Promise<TSavedR[]>;
existsById(id: IdKeyT): Promise<boolean>;
countBy(example: Partial<TSavedR>): Promise<number>;
count(): Promise<number>;
insert(record: TUnsavedR): Promise<TSavedR>;
insertAll(toInsert: TUnsavedR[]): Promise<TSavedR[]>;
updateById(id: IdKeyT, attrs: Partial<TSavedR>): Promise<TSavedR>;
update(record: TSavedR): Promise<TSavedR>;
updateAll(toUpdate: TSavedR[]): Promise<TSavedR[]>;
remove(record: TSavedR): Promise<number>;
removeAll(toRemove: TSavedR[]): Promise<number>;
removeBy(example: Partial<TSavedR>): Promise<TSavedR | null>;
removeById(id: IdKeyT): Promise<number>;
}
// The snake cased object going into the db
type DatabaseRow = Record<string, unknown>;
/**
* Base class for generic CRUD operations on a repository for a specific type.
*
* Several assumptions are made about your environment for this generic CRUD repository to work:
*
* - the underlying column names are snake_cased (this behavior can be changed, see [[columnize]])
* - the rows have only a single primary key (composite keys are not supported)
*
* @typeParam ID The type of the id column
* @typeParam T The type of the record
*/
export abstract class CrudRepository<
TUnsavedR,
TSavedR extends TUnsavedR & IdKeyT,
IdKeyT extends object
> implements ICrudRepository<TUnsavedR, TSavedR, IdKeyT>
{
/**
* the fully qualified table name
*/
abstract schemaTable: TableName;
abstract recordType: PgRecordInfo<TUnsavedR, TSavedR, IdKeyT>;
abstract db: IDatabase;
abstract pgp: IMain;
/**
* Converts the record's columns into snake_case
*
* @param record the record of type T to convert
*/
columnize(record: TSavedR | Partial<TSavedR>): DatabaseRow {
return decamelcaseKeys(record);
}
/*
* Creates a simple where clause with each key-value in `example` is
* formatted as KEY=VALUE and all kv-pairs are ANDed together.
*
* @param example key value pair of column names and values
*/
where(example: Partial<TSavedR>): string {
const snaked = this.columnize(example);
const clauses = Object.keys(snaked).reduce((acc, cur) => {
const colName = this.pgp.as.format("$1:name", cur);
return `${acc} and ${colName} = $<${cur}>`;
}, "");
const where = this.pgp.as.format(`WHERE 1=1 ${clauses}`, { ...snaked }); // Pre-format WHERE condition
return where;
}
/**
* Converts a value containing the id of the record (which could be a primitive type, a composite object, or an array of values)
* into an object which can be safely passed to [[where]].
*/
idsObj(idValues: IdKeyT): IdKeyT {
if (isEmpty(idValues)) {
throw new Error(`idsObj(${this.schemaTable}): passed empty id(s)`);
}
let ids = {};
const idKeys = idKeysOf(this.recordType as any);
if (isArray(idValues)) {
ids = zipObject(idKeys, idValues);
} else if (isObject(idValues)) {
ids = idValues;
} else {
if (idKeys.length !== 1) {
throw new Error(
`idsObj(${this.schemaTable}): passed record has multiple primary keys. the ids must be passed as an object or array. ${idValues}`
);
}
// @ts-ignore
ids[idKeys[0]] = idValues;
}
// this is a sanity check so we don't do something like
// deleting all the data if a WHERE slips in with no ids
if (isEmpty(ids)) {
throw new Error(`idsObj(${this.schemaTable}): passed empty ids`);
}
return ids as IdKeyT;
}
/**
* Returns all rows in the table
*/
async findAll(): Promise<TSavedR[]> {
return this.db.any("SELECT * FROM $1", [this.schemaTable]);
}
/**
* Returns the number of rows in the table
*/
async count(): Promise<number> {
return this.db.one(
"SELECT count(*) FROM $1",
[this.schemaTable],
(a: { count: string }) => Number(a.count)
);
}
/**
* Returns the number of rows in the table matching the example
*/
async countBy(example: Partial<TSavedR>): Promise<number> {
return this.db.one(
"SELECT count(*) FROM $1 $2:raw ",
[this.schemaTable, this.where(example)],
(a: { count: string }) => Number(a.count)
);
}
/**
* Find a single row where the example are true.
* @param example key-value pairs of column names and values
*/
async findBy(example: Partial<TSavedR>): Promise<TSavedR | null> {
return this.db.oneOrNone("SELECT * FROM $1 $2:raw LIMIT 1", [
this.schemaTable,
this.where(example),
]);
}
/**
* Retrieves a row by ID
* @param id
*/
async findById(id: IdKeyT): Promise<TSavedR | null> {
const where = this.idsObj(id);
return this.db.oneOrNone("SELECT * FROM $1 $2:raw", [
this.schemaTable,
this.where(where),
]);
}
/**
* Returns whether a given row with id exists
* @param id
*/
async existsById(id: IdKeyT): Promise<boolean> {
return this.db.one(
"SELECT EXISTS(SELECT 1 FROM $1 $2:raw)",
[this.schemaTable, this.where(this.idsObj(id))],
(a: { exists: boolean }) => a.exists
);
}
/**
* Find all rows where the example are true.
* @param example key-value pairs of column names and values
*/
async findAllBy(example: Partial<TSavedR>): Promise<TSavedR[]> {
return this.db.any("SELECT * FROM $1 $2:raw", [
this.schemaTable,
this.where(example),
]);
}
/**
* Creates a new row
* @param record
* @return the new row
*/
async insert(record: TUnsavedR): Promise<TSavedR> {
return this.db.one("INSERT INTO $1 ($2:name) VALUES ($2:csv) RETURNING *", [
this.schemaTable,
this.columnize(record as any),
]);
}
/**
* Like `insert` but will insert/update a batch of rows at once
*/
async insertAll(toInsert: TUnsavedR[]): Promise<TSavedR[]> {
return this.db.tx((t) => {
const insertCommands: any[] = [];
toInsert.forEach((record) => {
insertCommands.push(this.insert(record));
});
return t.batch(insertCommands);
});
}
/**
* Deletes a row by id
* @param id
* @return the number of rows affected
*/
async removeById(id: IdKeyT): Promise<number> {
return this.db.result(
"DELETE FROM $1 $2:raw",
[this.schemaTable, this.where(this.idsObj(id))],
(r: IResult) => r.rowCount
);
}
/**
* Delete records matching the query
* @param example key-value pairs of column names and values
*/
async removeBy(example: Partial<TSavedR>): Promise<TSavedR | null> {
if (isEmpty(example))
throw new Error(
`removeBy(${this.schemaTable}): passed empty constraint!`
);
return this.db.result("DELETE FROM $1 $2:raw", [
this.schemaTable,
this.where(example),
]);
}
/**
* Deletes the given row
*
* @param record to remove
* @return the number of rows affected
*/
async remove(record: TSavedR): Promise<number> {
return this.removeById(this.recordType.idOf(record));
}
/**
* Deletes all rows
* @param toRemove a list of rows to remove, if empty, DELETES ALL ROWS
* @return the number of rows affected
*/
async removeAll(toRemove: TSavedR[] = []): Promise<number> {
if (toRemove.length === 0) {
return this.db.result(
"DELETE FROM $1 WHERE 1=1;",
[this.schemaTable],
(r: IResult) => r.rowCount
);
}
const results = await this.db.tx((t) => {
const delCommands: any[] = [];
toRemove.forEach((record) => {
delCommands.push(this.remove(record));
});
return t.batch(delCommands);
});
return results.length;
}
/**
* Updates an existing row
* @param id
* @param attrs
* @return the updated row
*/
async updateById(id: IdKeyT, attrs: Partial<TSavedR>): Promise<TSavedR> {
const idKeys = idKeysOf(this.recordType as any);
const attrsSafe = omit(attrs, idKeys);
return this.db.one(
"UPDATE $1 SET ($2:name) = ROW($2:csv) $3:raw RETURNING *",
[this.schemaTable, this.columnize(attrsSafe), this.where(this.idsObj(id))]
);
}
async update(record: TSavedR): Promise<TSavedR> {
return this.updateById(this.recordType.idOf(record), record);
}
/**
* Update a batch of records at once
*/
async updateAll(toUpdate: TSavedR[]): Promise<TSavedR[]> {
return this.db.tx((t) => {
const updateCommands: any[] = [];
toUpdate.forEach((record) => {
updateCommands.push(this.update(record));
});
return t.batch(updateCommands);
});
}
}

View file

@ -0,0 +1,16 @@
export * from "./base";
export * from "./record-info";
export * from "./crud-repository";
export * from "./user";
export * from "./session";
export * from "./account";
import type { AccountRecordRepository } from "./account";
import type { UserRecordRepository } from "./user";
import type { SessionRecordRepository } from "./session";
export interface IMetamigoRepositories {
users: UserRecordRepository;
sessions: SessionRecordRepository;
accounts: AccountRecordRepository;
}

View file

@ -0,0 +1,54 @@
export interface EntityType<TUnsaved = any, TSaved = any, TIds extends object = any> {
_saved: TSaved;
_unsaved: TUnsaved;
_idKeys: TIds;
idOf: (rec: TSaved) => TIds;
}
export declare type UnsavedR<T extends {
_unsaved: any;
}> = T["_unsaved"];
export declare type SavedR<T extends {
_saved: any;
}> = T["_saved"];
export declare type KeyType<R extends EntityType> = R["_idKeys"];
export interface PgRecordInfo<Unsaved = any, Saved extends Unsaved & IdType = any, IdType extends object = any> extends EntityType<Unsaved, Saved, IdType> {
tableName: string;
schemaName: string;
idKeys: (keyof Saved)[];
}
/**
* Extract the runtime key name from a recordInfo
*/
export declare function idKeysOf<RI extends PgRecordInfo>(recordInfoWithIdKey: RI): string[];
/**
* Turns a record type with possibly more fields than "id" into an array
*/
export declare function collectIdValues<RecordT extends PgRecordInfo>(idObj: KeyType<RecordT>, knexRecordType: RecordT): string[];
/**
*
* Creates a record descriptor that captures the table name, primary key name,
* unsaved type, and saved type of a database record type. Assumes "id" as the
* primary key name
*
*/
export declare function recordInfo<Unsaved, Saved extends Unsaved & {
id: any;
}>(schemaName: string, tableName: string): PgRecordInfo<Unsaved, Saved, Pick<Saved, "id">>;
export declare function recordInfo<Type extends {
id: string;
}>(schemaName: string, tableName: string): PgRecordInfo<Type, Type, Pick<Type, "id">>;
/**
*
* Creates a record descriptor that captures the table name, primary key name,
* unsaved type, and saved type of a database record type.
*
*/
export declare function recordInfo<Unsaved, Saved extends Unsaved, Id extends keyof Saved>(schemaName: string, tableName: string, idKey: Id[]): PgRecordInfo<Unsaved, Saved, Pick<Saved, Id>>;
/**
*
* Creates a record descriptor for records with composite primary keys
*
*/
export declare function compositeRecordType<TUnsaved, TSaved extends TUnsaved = TUnsaved>(schemaName: string, tableName: string): {
withCompositeKeys<TKeys extends keyof TSaved>(keys: TKeys[]): PgRecordInfo<TUnsaved, TSaved, Pick<TSaved, TKeys>>;
};

View file

@ -0,0 +1,133 @@
/* eslint-disable @typescript-eslint/ban-types,@typescript-eslint/no-explicit-any,@typescript-eslint/explicit-module-boundary-types */
import at from "lodash/at";
import pick from "lodash/pick";
export interface EntityType<
TUnsaved = any,
TSaved = any,
TIds extends object = any
> {
_saved: TSaved;
_unsaved: TUnsaved;
_idKeys: TIds;
idOf: (rec: TSaved) => TIds;
}
export type UnsavedR<T extends { _unsaved: any }> = T["_unsaved"];
export type SavedR<T extends { _saved: any }> = T["_saved"];
export type KeyType<R extends EntityType> = R["_idKeys"];
export interface PgRecordInfo<
Unsaved = any,
Saved extends Unsaved & IdType = any,
IdType extends object = any
> extends EntityType<Unsaved, Saved, IdType> {
tableName: string;
schemaName: string;
idKeys: (keyof Saved)[];
}
/**
* Extract the runtime key name from a recordInfo
*/
export function idKeysOf<RI extends PgRecordInfo>(
recordInfoWithIdKey: RI
): string[] {
return recordInfoWithIdKey.idKeys as any;
}
/**
* Turns a record type with possibly more fields than "id" into an array
*/
export function collectIdValues<RecordT extends PgRecordInfo>(
idObj: KeyType<RecordT>,
knexRecordType: RecordT
): string[] {
return at(idObj, idKeysOf(knexRecordType));
}
function castToRecordInfo(
runtimeData: Omit<PgRecordInfo, "_idKeys" | "_saved" | "_unsaved">
): PgRecordInfo {
return runtimeData as PgRecordInfo;
}
/**
*
* Creates a record descriptor that captures the table name, primary key name,
* unsaved type, and saved type of a database record type. Assumes "id" as the
* primary key name
*
*/
export function recordInfo<Unsaved, Saved extends Unsaved & { id: any }>(
schemaName: string,
tableName: string
): PgRecordInfo<Unsaved, Saved, Pick<Saved, "id">>;
export function recordInfo<Type extends { id: string }>(
schemaName: string,
tableName: string
): PgRecordInfo<Type, Type, Pick<Type, "id">>;
/**
*
* Creates a record descriptor that captures the table name, primary key name,
* unsaved type, and saved type of a database record type.
*
*/
export function recordInfo<
Unsaved,
Saved extends Unsaved,
Id extends keyof Saved
>(
schemaName: string,
tableName: string,
idKey: Id[]
): PgRecordInfo<Unsaved, Saved, Pick<Saved, Id>>;
/**
*
* Don't use this signature be sure to provide unsaved and saved types.
*
*/
export function recordInfo(
schemaName: string,
tableName: string,
idKeys?: string[]
) {
idKeys = idKeys || ["id"];
return castToRecordInfo({
schemaName,
tableName,
idKeys,
idOf: (rec) => pick(rec, idKeys as any),
});
}
/**
*
* Creates a record descriptor for records with composite primary keys
*
*/
export function compositeRecordType<
TUnsaved,
TSaved extends TUnsaved = TUnsaved
>(
schemaName: string,
tableName: string
): {
withCompositeKeys<TKeys extends keyof TSaved>(
keys: TKeys[]
): PgRecordInfo<TUnsaved, TSaved, Pick<TSaved, TKeys>>;
} {
return {
withCompositeKeys(keys) {
return castToRecordInfo({
schemaName,
tableName,
idKeys: keys,
idOf: (rec) => pick(rec, keys),
});
},
};
}

View file

@ -0,0 +1,26 @@
import { recordInfo } from "./record-info";
import { RepositoryBase } from "./base";
import { Flavor, UUID } from "../helpers";
import { UserId } from "./user";
export type SessionId = Flavor<UUID, "Session Id">;
export interface UnsavedSession {
userId: UserId;
expires: Date;
sessionToken: string;
accessToken: string;
}
export interface SavedSession extends UnsavedSession {
id: SessionId;
createdAt: Date;
updatedAt: Date;
}
export const SessionRecord = recordInfo<UnsavedSession, SavedSession>(
"app_private",
"sessions"
);
export class SessionRecordRepository extends RepositoryBase(SessionRecord) {}

View file

@ -0,0 +1,40 @@
import { recordInfo } from "./record-info";
import { RepositoryBase } from "./base";
import { Flavor, UUID } from "../helpers";
export type UserId = Flavor<UUID, "User Id">;
export interface UnsavedUser {
name: string;
email: string;
emailVerified: Date;
avatar: string;
isActive: boolean;
userRole: string;
}
export interface SavedUser extends UnsavedUser {
id: UserId;
createdAt: Date;
updatedAt: Date;
}
export const UserRecord = recordInfo<UnsavedUser, SavedUser>(
"app_public",
"users"
);
export class UserRecordRepository extends RepositoryBase(UserRecord) {
async upsert(record: UnsavedUser | SavedUser): Promise<SavedUser> {
return this.db.one(
`INSERT INTO $1 ($2:name) VALUES ($2:csv)
ON CONFLICT (email)
DO UPDATE SET
name = EXCLUDED.name,
avatar = EXCLUDED.avatar,
email_verified = EXCLUDED.email_verified
RETURNING *`,
[this.schemaTable, this.columnize(record)]
);
}
}

View file

@ -0,0 +1,13 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"incremental": true,
"outDir": "build/main",
"paths": {
"@hapipal/schmervice": ["vendor/hapipal__schmervice"],
"*": ["node_modules/@types/*", "*"]
}
},
"include": ["**/*.ts"],
"exclude": ["node_modules/**"]
}

View file

@ -0,0 +1,18 @@
require('@digiresilience/eslint-config-metamigo/patch/modern-module-resolution');
module.exports = {
extends: [
"@digiresilience/eslint-config-metamigo/profile/node",
"@digiresilience/eslint-config-metamigo/profile/typescript"
],
parserOptions: { tsconfigRootDir: __dirname },
rules: {
"import/no-extraneous-dependencies": [
// enable this when this is fixed
// https://github.com/benmosher/eslint-plugin-import/pull/1696
"off",
{ packageDir: [".", "node_modules/@digiresilience/metamigo", "node_modules/@digiresilience/metamigo-dev"] },
],
// TODO: enable this after jest fixes this issue https://github.com/nodejs/node/issues/38343
"unicorn/prefer-node-protocol": "off"
}
};

376
metamigo-config/index.ts Normal file
View file

@ -0,0 +1,376 @@
import * as process from "process";
import * as convict from "convict";
import * as Metamigo from "common";
import { defState } from "@digiresilience/montar";
export const configSchema = {
db: {
connection: {
doc: "The postgres connection url.",
format: "uri",
default: "postgresql://metamigo:metamigo@127.0.0.1:5435/metamigo_dev",
env: "DATABASE_URL",
sensitive: true,
},
name: {
doc: "The name of the postgres database",
format: String,
default: "metamigo_dev",
env: "DATABASE_NAME",
},
owner: {
doc: "The username of the postgres database owner",
format: String,
default: "metamigo",
env: "DATABASE_OWNER",
},
},
worker: {
connection: {
doc: "The postgres connection url for the worker database.",
format: "uri",
default: "postgresql://metamigo:metamigo@127.0.0.1:5435/metamigo_dev",
env: "WORKER_DATABASE_URL",
},
concurrency: {
doc: "The number of jobs to run concurrently",
default: 1,
format: "positiveInt",
env: "WORKER_CONCURRENT_JOBS",
},
pollInterval: {
doc: "How long to wait between polling for jobs in milliseconds (for jobs scheduled in the future/retries)",
default: 2000,
format: "positiveInt",
env: "WORKER_POLL_INTERVAL_MS",
},
},
postgraphile: {
auth: {
doc: "The postgres role that postgraphile logs in with",
format: String,
default: "metamigo_graphile_auth",
env: "DATABASE_AUTHENTICATOR",
},
appRootConnection: {
doc: "The postgres root/superuser connection url for development mode so PG can watch the schema changes, this is strangely named in the postgraphile API 'ownerConnectionString'",
format: String,
default: "postgresql://postgres:metamigo@127.0.0.1:5435/metamigo_dev",
env: "APP_ROOT_DATABASE_URL",
},
authConnection: {
doc: "The postgres connection URL for postgraphile, must not be superuser and must have limited privs.",
format: String,
default:
"postgresql://metamigo_graphile_auth:metamigo@127.0.0.1:5435/metamigo_dev",
env: "DATABASE_AUTH_URL",
},
visitor: {
doc: "The postgres role that postgraphile switches to",
format: String,
default: "app_postgraphile",
env: "DATABASE_VISITOR",
},
schema: {
doc: "The schema postgraphile should expose with graphql",
format: String,
default: "app_public",
},
enableGraphiql: {
doc: "Whether to enable the graphiql web interface or not",
format: "Boolean",
default: false,
env: "ENABLE_GRAPHIQL",
},
},
dev: {
shadowConnection: {
doc: "The shadow databse connection url used by postgraphile-migrate. Not needed in production.",
format: "uri",
default: "postgresql://metamigo:metamigo@127.0.0.1:5435/metamigo_shadow",
env: "SHADOW_DATABASE_URL",
sensitive: true,
},
rootConnection: {
doc: "The postgres root/superuser connection url for testing only, database must NOT be the app database. Not needed in production.",
format: "uri",
default: "postgresql://postgres:metamigo@127.0.0.1:5435/template1",
env: "ROOT_DATABASE_URL",
sensitive: true,
},
},
frontend: {
url: {
doc: "The url the frontend can be accessed at",
format: "url",
default: "http://localhost:3000",
env: "FRONTEND_URL",
},
apiUrl: {
doc: "The url the api backend can be accessed at from the frontend server",
format: "url",
default: "http://localhost:3001",
env: "API_URL",
},
},
nextAuth: {
secret: {
doc: "A random string used to hash tokens, sign cookies and generate crytographic keys. Shared with the api backend.",
format: String,
default: undefined,
env: "NEXTAUTH_SECRET",
sensitive: true,
},
audience: {
doc: "We will add this string as the `aud` claim to our JWT token, if empty or not present defaults to `frontend.url`",
format: String,
default: "",
env: "NEXTAUTH_AUDIENCE",
},
signingKeyB64: {
doc: "A base64 encoded JWK.Key used for JWT signing",
format: String,
default: undefined,
env: "NEXTAUTH_SIGNING_KEY_B64",
sensitive: true,
},
encryptionKeyB64: {
doc: "A base64 encoded JWK.Key used for JWT encryption",
format: String,
default: undefined,
env: "NEXTAUTH_ENCRYPTION_KEY_B64",
sensitive: true,
},
signingKey: {
doc: "",
format: String,
default: undefined,
sensitive: true,
skipGenerate: true,
},
encryptionKey: {
doc: "",
format: String,
default: undefined,
sensitive: true,
skipGenerate: true,
},
google: {
id: {
doc: "reference https://next-auth.js.org/providers/google",
format: String,
default: undefined,
env: "GOOGLE_ID",
sensitive: true,
},
secret: {
doc: "reference https://next-auth.js.org/providers/google",
format: String,
default: undefined,
env: "GOOGLE_SECRET",
sensitive: true,
},
},
github: {
id: {
doc: "reference https://next-auth.js.org/providers/github",
format: String,
default: undefined,
env: "GITHUB_ID",
sensitive: true,
},
secret: {
doc: "reference https://next-auth.js.org/providers/github",
format: String,
default: undefined,
env: "GITHUB_SECRET",
sensitive: true,
},
},
gitlab: {
id: {
doc: "reference https://next-auth.js.org/providers/gitlab",
format: String,
default: undefined,
env: "GITLAB_ID",
sensitive: true,
},
secret: {
doc: "reference https://next-auth.js.org/providers/gitlab",
format: String,
default: undefined,
env: "GITLAB_SECRET",
sensitive: true,
},
},
cognito: {
id: {
doc: "reference https://next-auth.js.org/providers/cognito",
format: String,
default: undefined,
env: "COGNITO_ID",
sensitive: true,
},
secret: {
doc: "reference https://next-auth.js.org/providers/cognito",
format: String,
default: undefined,
env: "COGNITO_SECRET",
sensitive: true,
},
domain: {
doc: "reference https://next-auth.js.org/providers/cognito",
format: String,
default: undefined,
env: "COGNITO_DOMAIN",
sensitive: true,
},
},
},
cfaccess: {
audience: {
doc: "the cloudflare access audience id",
format: String,
default: undefined,
env: "CFACCESS_AUDIENCE",
},
domain: {
doc: "the cloudflare access domain, something like `YOURAPP.cloudflareaccess.com`",
format: String,
default: undefined,
env: "CFACCESS_DOMAIN",
},
},
signald: {
enabled: {
doc: "Whether to enable the signald signal backend",
format: "Boolean",
default: false,
env: "SIGNALD_ENABLED",
},
socket: {
doc: "the unix domain socket signald is listening on",
format: String,
default: `${process.cwd()}/signald/signald.sock`,
env: "SIGNALD_SOCKET",
},
},
};
// define the interfaces for the concrete config objects
export interface IDBConfig {
connection: string;
name: string;
owner: string;
}
export interface IWorkerConfig {
connection: string;
concurrency: number;
pollInterval: number;
}
export interface IPostgraphileConfig {
auth: string;
visitor: string;
appRootConnection: string;
authConnection: string;
schema: string;
enableGraphiql: boolean;
}
export interface IDevConfig {
shadowConnection: string;
rootConnection: string;
}
export interface IFrontendConfig {
url: string;
apiUrl: string;
}
export interface INextAuthConfig {
secret: string;
audience: string;
signingKey: string;
encryptionKey: string;
signingKeyB64: string;
encryptionKeyB64: string;
google?: { id: string; secret: string };
github?: { id: string; secret: string };
gitlab?: { id: string; secret: string };
cognito?: { id: string; secret: string; domain: string };
}
export interface ICFAccessConfig {
audience: string;
domain: string;
}
export interface ISignaldConifg {
enabled: boolean;
socket: string;
}
// Extend the metamigo base type to add your app's custom config along side the out
// of the box Metamigo config
export interface IAppConfig extends Metamigo.IMetamigoConfig {
db: IDBConfig;
worker: IWorkerConfig;
postgraphile: IPostgraphileConfig;
dev: IDevConfig;
frontend: IFrontendConfig;
nextAuth: INextAuthConfig;
cfaccess: ICFAccessConfig;
signald: ISignaldConifg;
}
export type IAppConvict = Metamigo.ExtendedConvict<IAppConfig>;
// Merge the Metamigo base schema with your app's schmea
// @ts-ignore
export const schema: convict.Schema<IAppConfig> = {
...Metamigo.configBaseSchema,
...configSchema,
};
export const loadConfig = async (): Promise<IAppConfig> => {
const config = await Metamigo.loadConfiguration(schema);
if (!config.frontend.url || config.frontend.url === "")
throw new Error(
"configuration value frontend.url is missing. Add to config or set NEXTAUTH_URL env var"
);
// nextauth expects the url to be provided with this environment variable, so we will munge it in place here
process.env.NEXTAUTH_URL = config.frontend.url;
if (config.nextAuth.signingKeyB64)
config.nextAuth.signingKey = Buffer.from(
config.nextAuth.signingKeyB64,
"base64"
).toString("utf-8");
if (config.nextAuth.encryptionKeyB64)
config.nextAuth.encryptionKey = Buffer.from(
config.nextAuth.encryptionKeyB64,
"base64"
).toString("utf-8");
if (!config.nextAuth.audience || config.nextAuth.audience === "")
config.nextAuth.audience = config.frontend.url;
return config as any;
};
export const loadConfigRaw = async (): Promise<IAppConvict> => {
return Metamigo.loadConfigurationRaw(schema);
};
const config = defState("config", {
start: loadConfig,
});
export default config;

View file

@ -0,0 +1,35 @@
{
"name": "config",
"version": "0.2.0",
"main": "build/main/index.js",
"author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@digiresilience/montar": "^0.1.6"
},
"devDependencies": {
"@babel/core": "7.20.12",
"@babel/preset-env": "7.20.2",
"@babel/preset-typescript": "7.18.6",
"eslint": "^8.32.0",
"pino-pretty": "^9.1.1",
"prettier": "^2.8.3",
"ts-node": "^10.9.1",
"typedoc": "^0.23.24",
"typescript": "4.9.4"
},
"files": ["build", "src"],
"scripts": {
"build": "tsc -p tsconfig.json",
"doc:html": "typedoc src/ --exclude '**/*.test.ts' --exclude '**/*.spec.ts' --name $npm_package_name --readme README.md --target es2019 --mode file --out build/docs",
"doc": "yarn run doc:html",
"fix:lint": "eslint src --ext .ts --fix",
"fix:prettier": "prettier \"src/**/*.ts\" --write",
"lint:lint": "eslint src --ext .ts",
"lint:prettier": "prettier \"src/**/*.ts\" --list-different",
"test": "echo no tests",
"lint": "yarn lint:lint && yarn lint:prettier",
"watch:build": "tsc -p tsconfig.json -w"
}
}

View file

@ -0,0 +1,8 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"outDir": "build/main"
},
"include": ["**/*.ts", "**/.*.ts", "index.ts"],
"exclude": ["node_modules", "**/*.spec.ts", "**/*.test.ts"]
}

1
metamigo-db/.eslintrc.js Normal file
View file

@ -0,0 +1 @@
require("../.eslintrc.js");

117
metamigo-db/.gmrc Normal file
View file

@ -0,0 +1,117 @@
/*
* Graphile Migrate configuration.
*
* MUST NOT CONTAIN SECRETS/PASSWORDS
* This file is in JSON5 format.
*/
{
/*
* Database connections strings are sourced from the DATABASE_URL,
* SHADOW_DATABASE_URL and ROOT_DATABASE_URL environmental variables.
*/
/*
* pgSettings: key-value settings to be automatically loaded into PostgreSQL
* before running migrations, using an equivalent of `SET LOCAL <key> TO
* <value>`
*/
"pgSettings": {
"search_path": "public",
},
/*
* placeholders: substituted in SQL files when compiled/executed. Placeholder
* keys should be prefixed with a colon and in all caps, like
* `:COLON_PREFIXED_ALL_CAPS`. Placeholder values should be strings. They
* will be replaced verbatim with NO ESCAPING AT ALL (this differs from how
* psql handles placeholders) so should only be used with "safe" values. This
* is useful for committing migrations where certain parameters can change
* between environments (development, staging, production) but you wish to
* use the same signed migration files for all.
*
* The special value "!ENV" can be used to indicate an environmental variable
* of the same name should be used.
*
* Graphile Migrate automatically sets the `:DATABASE_NAME` and
* `:DATABASE_OWNER` placeholders, and you should not attempt to override
* these.
*/
"placeholders": {
":DATABASE_VISITOR": "!ENV",
":DATABASE_AUTHENTICATOR": "!ENV",
},
/*
* Actions allow you to run scripts or commands at certain points in the
* migration lifecycle. SQL files are ran against the database directly.
* "command" actions are ran with the following environmental variables set:
*
* - GM_DBURL: the PostgreSQL URL of the database being migrated
* - GM_DBNAME: the name of the database from GM_DBURL
* - GM_DBUSER: the user from GM_DBURL
* - GM_SHADOW: set to 1 if the shadow database is being migrated, left unset
* otherwise
*
* If "shadow" is unspecified, the actions will run on events to both shadow
* and normal databases. If "shadow" is true the action will only run on
* actions to the shadow DB, and if false only on actions to the main DB.
*/
/*
* afterReset: actions executed after a `graphile-migrate reset` command.
*/
"afterReset": [
"!../scripts/afterReset.sql",
],
/*
* afterAllMigrations: actions executed once all migrations are complete.
*/
"afterAllMigrations": [
{
"_": "command",
"shadow": true,
"command": "node scripts/dump-db.js"
},
],
/*
* afterCurrent: actions executed once the current migration has been
* evaluated (i.e. in watch mode).
*/
"afterCurrent": [
{
"_": "command",
"command": "./scripts/afterCurrent.sh",
}
],
/*
* blankMigrationContent: content to be written to the current migration
* after commit. NOTE: this should only contain comments.
*/
// "blankMigrationContent": "-- Write your migration here\n",
/****************************************************************************\
*** ***
*** You probably don't want to edit anything below here. ***
*** ***
\****************************************************************************/
/*
* manageGraphileMigrateSchema: if you set this false, you must be sure to
* keep the graphile_migrate schema up to date yourself. We recommend you
* leave it at its default.
*/
// "manageGraphileMigrateSchema": true,
/*
* migrationsFolder: path to the folder in which to store your migrations.
*/
// migrationsFolder: "./migrations",
"//generatedWith": "1.0.2"
}

2
metamigo-db/Dockerfile Normal file
View file

@ -0,0 +1,2 @@
FROM postgres:13
COPY scripts/bootstrap.sh /docker-entrypoint-initdb.d/bootstrap.sh

67
metamigo-db/helpers.ts Normal file
View file

@ -0,0 +1,67 @@
import process from "process";
import { existsSync } from "fs";
import { exec } from "child_process";
import type { IAppConfig } from "config";
/**
* We use graphile-migrate for managing database migrations.
*
* However we also use convict as the sole source of truth for our app's configuration. We do not want to have to configure
* separate env files or config files for graphile-migrate and yet again others for convict.
*
* So we wrap the graphile-migrate cli tool here. We parse our convict config, set necessary env vars, and then shell out to
* graphile-migrate.
*
* Commander eats all args starting with --, so you must use the -- escape to indicate the arguments have finished
*
* Example:
* ./cli db -- --help // will show graphile migrate help
* ./cli db -- watch // will watch the current sql for changes
* ./cli db -- watch --once // will apply the current sql once
*/
export const migrateWrapper = async (
commands: string[],
config: IAppConfig,
silent = false
): Promise<void> => {
const env = {
DATABASE_URL: config.db.connection,
SHADOW_DATABASE_URL: config.dev.shadowConnection,
ROOT_DATABASE_URL: config.dev.rootConnection,
DATABASE_NAME: config.db.name,
DATABASE_OWNER: config.db.owner,
DATABASE_AUTHENTICATOR: config.postgraphile.auth,
DATABASE_VISITOR: config.postgraphile.visitor,
};
const cmd = `npx --no-install graphile-migrate ${commands.join(" ")}`;
const dbDir = `../../db`;
const gmrc = `${dbDir}/.gmrc`;
if (!existsSync(gmrc)) {
throw new Error(`graphile migrate config not found at ${gmrc}`);
}
if (!silent) console.log("executing:", cmd);
return new Promise((resolve, reject) => {
const proc = exec(cmd, {
env: { ...process.env, ...env },
cwd: dbDir,
});
proc.stdout.on("data", (data) => {
if (!silent) console.log("MIGRATE:", data);
});
proc.stderr.on("data", (data) => {
console.error("MIGRATE", data);
});
proc.on("close", (code) => {
if (code !== 0) {
reject(new Error(`graphile-migrate exited with code ${code}`));
return;
}
resolve();
});
});
};

89
metamigo-db/index.ts Normal file
View file

@ -0,0 +1,89 @@
import { IAppConfig } from "config";
import camelcaseKeys from "camelcase-keys";
import PgSimplifyInflectorPlugin from "@graphile-contrib/pg-simplify-inflector";
// import PgManyToManyPlugin from "@graphile-contrib/pg-many-to-many";
import * as ConnectionFilterPlugin from "postgraphile-plugin-connection-filter";
import type { PostGraphileCoreOptions } from "postgraphile-core";
import {
UserRecordRepository,
AccountRecordRepository,
SessionRecordRepository,
} from "common";
import {
SettingRecordRepository,
VoiceProviderRecordRepository,
VoiceLineRecordRepository,
WebhookRecordRepository,
WhatsappBotRecordRepository,
WhatsappMessageRecordRepository,
WhatsappAttachmentRecordRepository,
SignalBotRecordRepository,
} from "./records";
import type { IInitOptions, IDatabase } from "pg-promise";
export interface IRepositories {
users: UserRecordRepository;
sessions: SessionRecordRepository;
accounts: AccountRecordRepository;
settings: SettingRecordRepository;
voiceLines: VoiceLineRecordRepository;
voiceProviders: VoiceProviderRecordRepository;
webhooks: WebhookRecordRepository;
whatsappBots: WhatsappBotRecordRepository;
whatsappMessages: WhatsappMessageRecordRepository;
whatsappAttachments: WhatsappAttachmentRecordRepository;
signalBots: SignalBotRecordRepository;
}
export type AppDatabase = IDatabase<IRepositories> & IRepositories;
export const dbInitOptions = (
_config: IAppConfig
): IInitOptions<IRepositories> => {
return {
noWarnings: true,
receive(data, result) {
if (result) result.rows = camelcaseKeys(data);
},
// Extending the database protocol with our custom repositories;
// API: http://vitaly-t.github.io/pg-promise/global.html#event:extend
extend(obj: any, _dc) { // AppDatase was obj type
// Database Context (_dc) is mainly needed for extending multiple databases with different access API.
// NOTE:
// This event occurs for every task and transaction being executed (which could be every request!)
// so it should be as fast as possible. Do not use 'require()' or do any other heavy lifting.
obj.users = new UserRecordRepository(obj);
obj.sessions = new SessionRecordRepository(obj);
obj.accounts = new AccountRecordRepository(obj);
obj.settings = new SettingRecordRepository(obj);
obj.voiceLines = new VoiceLineRecordRepository(obj);
obj.voiceProviders = new VoiceProviderRecordRepository(obj);
obj.webhooks = new WebhookRecordRepository(obj);
obj.whatsappBots = new WhatsappBotRecordRepository(obj);
obj.whatsappMessages = new WhatsappMessageRecordRepository(obj);
obj.whatsappAttachments = new WhatsappAttachmentRecordRepository(obj);
obj.signalBots = new SignalBotRecordRepository(obj);
},
};
};
export const getPostGraphileOptions = (): PostGraphileCoreOptions => {
return {
ignoreRBAC: false,
dynamicJson: true,
ignoreIndexes: false,
appendPlugins: [
PgSimplifyInflectorPlugin,
// PgManyToManyPlugin,
ConnectionFilterPlugin as any,
],
};
};
export * from "./helpers";
export * from "./records";

View file

@ -0,0 +1,650 @@
--! Previous: -
--! Hash: sha1:b13a5217288f5d349d8d9e3afbd7bb30c0dbad21
-- region Bootstrap
drop schema if exists app_public cascade;
alter default privileges revoke all on sequences from public;
alter default privileges revoke all on functions from public;
-- By default the public schema is owned by `postgres`; we need superuser privileges to change this :(
-- alter schema public owner to waterbear;
revoke all on schema public from public;
grant all on schema public to :DATABASE_OWNER;
create schema app_public;
grant usage on schema
public,
app_public
to
:DATABASE_VISITOR,
app_admin,
app_anonymous,
app_user;
/**********/
drop schema if exists app_hidden cascade;
create schema app_hidden;
grant usage on schema app_hidden to :DATABASE_VISITOR;
alter default privileges in schema app_hidden grant usage, select on sequences to :DATABASE_VISITOR;
/**********/
alter default privileges in schema public, app_public, app_hidden grant usage, select on sequences to :DATABASE_VISITOR;
alter default privileges in schema public, app_public, app_hidden
grant execute on functions to
:DATABASE_VISITOR,
app_admin,
app_user;
/**********/
drop schema if exists app_private cascade;
create schema app_private;
-- endregion
-- region UtilFunctions
create function app_private.tg__add_job() returns trigger as
$$
begin
perform graphile_worker.add_job(tg_argv[0], json_build_object('id', NEW.id),
coalesce(tg_argv[1], public.gen_random_uuid()::text));
return NEW;
end;
$$ language plpgsql volatile
security definer
set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__add_job() is
E'Useful shortcut to create a job on insert/update. Pass the task name as the first trigger argument, and optionally the queue name as the second argument. The record id will automatically be available on the JSON payload.';
/* ------------------------------------------------------------------ */
create function app_private.tg__timestamps() returns trigger as
$$
begin
NEW.created_at = (case when TG_OP = 'INSERT' then NOW() else OLD.created_at end);
NEW.updated_at = (case
when TG_OP = 'UPDATE' and OLD.updated_at >= NOW()
then OLD.updated_at + interval '1 millisecond'
else NOW() end);
return NEW;
end;
$$ language plpgsql volatile
set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__timestamps() is
E'This trigger should be called on all tables with created_at, updated_at - it ensures that they cannot be manipulated and that updated_at will always be larger than the previous updated_at.';
-- endregion
-- region Users, Sessions, and Accounts
/* ------------------------------------------------------------------ */
create table app_private.sessions
(
id uuid not null default gen_random_uuid() primary key,
user_id uuid not null,
expires timestamptz not null,
session_token text not null,
access_token text not null,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
last_active_at timestamptz not null default now()
);
create unique index session_token on app_private.sessions(session_token);
create unique index access_token on app_private.sessions(access_token);
alter table app_private.sessions
enable row level security;
/* ------------------------------------------------------------------ */
create function app_public.current_session_id() returns uuid as
$$
-- note the jwt.claims.session_id doesn't mean you have to use jwt, it is just where this function will always look for the session id.
select nullif(pg_catalog.current_setting('jwt.claims.session_id', true), '')::uuid;
$$ language sql stable;
comment on function app_public.current_session_id() is
E'Handy method to get the current session ID.';
/*
* A less secure but more performant version of this function would be just:
*
* select nullif(pg_catalog.current_setting('jwt.claims.user_id', true), '')::int;
*
* The increased security of this implementation is because even if someone gets
* the ability to run SQL within this transaction they cannot impersonate
* another user without knowing their session_id (which should be closely
* guarded).
*/
create function app_public.current_user_id() returns uuid as
$$
select user_id
from app_private.sessions
where id = app_public.current_session_id();
$$ language sql stable
security definer
set search_path to pg_catalog, public, pg_temp;
comment on function app_public.current_user_id() is
E'Handy method to get the current user ID for use in RLS policies, etc; in GraphQL, use `currentUser{id}` instead.';
-- We've put this in public, but omitted it, because it's often useful for debugging auth issues.
/* ------------------------------------------------------------------ */
-- These are the user roles for our application
create type app_public.role_type as
ENUM ('none','admin', 'user');
/* ------------------------------------------------------------------ */
create table app_public.users
(
id uuid not null default uuid_generate_v1mc() primary key,
email citext not null,
email_verified timestamptz,
name text not null,
avatar text,
user_role app_public.role_type not null default 'none',
is_active boolean not null default false,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
created_by text not null,
constraint users_email_validity check (email ~* '^[A-Za-z0-9._%-]+@[A-Za-z0-9.-]+[.][A-Za-z]+$'),
constraint users_avatar_validity check (avatar ~ '^https?://[^/]+'),
constraint users_email_unique unique (email)
);
comment on table app_public.users is
E'A user who can log in to the application.';
comment on column app_public.users.id is
E'Unique identifier for the user.';
comment on column app_public.users.email is
E'The email address of the user.';
comment on column app_public.users.email_verified is
E'The time at which the email address was verified';
comment on column app_public.users.name is
E'Public-facing name (or pseudonym) of the user.';
comment on column app_public.users.avatar is
E'Optional avatar URL.';
comment on column app_public.users.user_role is
E'The role that defines the user''s privileges.';
comment on column app_public.users.is_active is
E'If false, the user is not allowed to login or access the application';
alter table app_public.users
enable row level security;
alter table app_private.sessions
add constraint sessions_user_id_fkey foreign key ("user_id") references app_public.users on delete cascade;
create index on app_private.sessions (user_id);
-- app_public perms default
create policy access_self on app_public.users to app_anonymous using (id = app_public.current_user_id());
--create policy update_self on app_public.users for update using (id = app_public.current_user_id());
grant select on app_public.users to app_anonymous;
grant update (name, avatar) on app_public.users to :DATABASE_VISITOR, app_user;
-- app_public perms for app_admin
create policy access_all on app_public.users to app_admin using (true);
grant update (email, name, avatar, is_active, user_role) on app_public.users to app_admin;
grant select on app_public.users to app_admin;
grant insert (email, name, avatar, user_role, is_active, created_by) on app_public.users to app_admin;
grant update (email, name, avatar, user_role, is_active, created_by) on app_public.users to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.users
for each row
execute procedure app_private.tg__timestamps();
/* ------------------------------------------------------------------ */
create function app_public.current_user() returns app_public.users as
$$
select users.*
from app_public.users
where id = app_public.current_user_id();
$$ language sql stable;
comment on function app_public.current_user() is
E'The currently logged in user (or null if not logged in).';
/* ------------------------------------------------------------------ */
create function app_public.logout() returns void as
$$
begin
-- Delete the session
delete from app_private.sessions where id = app_public.current_session_id();
-- Clear the identifier from the transaction
perform set_config('jwt.claims.session_id', '', true);
end;
$$ language plpgsql security definer
volatile
set search_path to pg_catalog, public, pg_temp;
/* ------------------------------------------------------------------ */
create table app_public.accounts
(
id uuid not null default uuid_generate_v1mc() primary key,
compound_id text not null,
user_id uuid not null,
provider_type text not null,
provider_id text not null,
provider_account_id text not null,
refresh_token text,
access_token text,
access_token_expires timestamptz,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
alter table app_public.accounts
enable row level security;
alter table app_public.accounts
add constraint accounts_user_id_fkey foreign key ("user_id") references app_public.users on delete cascade;
create unique index accounts_compound_id on app_public.accounts(compound_id);
create index accounts_provider_account_id on app_public.accounts(provider_account_id);
create index accounts_provider_id on app_public.accounts(provider_id);
create index accounts_user_id on app_public.accounts (user_id);
create policy access_self on app_public.accounts to app_anonymous using (user_id = app_public.current_user_id());
grant select on app_public.accounts to app_anonymous;
grant update (compound_id, provider_type, provider_id, provider_account_id, refresh_token, access_token, access_token_expires) on app_public.accounts to app_user;
create policy access_all on app_public.accounts to app_admin using (true);
grant update (compound_id, provider_type, provider_id, provider_account_id, refresh_token, access_token, access_token_expires) on app_public.accounts to app_admin;
grant select on app_public.accounts to app_admin;
grant insert (user_id, compound_id, provider_type, provider_id, provider_account_id, refresh_token, access_token, access_token_expires) on app_public.accounts to app_admin;
grant update (compound_id, provider_type, provider_id, provider_account_id, refresh_token, access_token, access_token_expires) on app_public.accounts to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.accounts
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region Create first user function
create or replace function app_public.create_first_user (user_email text, user_name text)
returns setof app_public.users
as
$$
declare
user_count int;
begin
user_count := (select count(id) from app_public.users);
if (user_count != 0) then
raise exception 'Admin user already created';
end if;
return query insert into app_public.users (email, email_verified, name, user_role, is_active, created_by)
values (user_email, now(), user_name, 'admin', true, 'first user hook') returning *;
end ;
$$ LANGUAGE plpgsql VOLATILE
SECURITY DEFINER;
comment on function app_public.create_first_user(user_email text, user_name text) is
E'Creates the first user with an admin role. Only possible when there are no other users in the database.';
grant execute on function app_public.create_first_user(user_email text, user_name text) to app_anonymous;
create function app_private.tg__first_user() returns trigger as
$$
declare
user_count int;
begin
user_count := (select count(id) from app_public.users);
if (user_count = 0) then
NEW.user_role = 'admin';
end if;
return NEW;
end;
$$ language plpgsql volatile
set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__first_user() is
E'This trigger is called to ensure the first user created is an admin';
create trigger _101_first_user
before insert
on app_public.users
for each row
execute procedure app_private.tg__first_user();
-- endregion
-- region Settings
create table app_public.settings
(
id uuid not null default uuid_generate_v1mc() primary key,
name text not null,
value jsonb,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index setting_name on app_public.settings(name);
alter table app_public.settings
enable row level security;
create policy access_all on app_public.settings to app_admin using (true);
grant update (name, value) on app_public.settings to app_admin;
grant select on app_public.settings to app_admin;
grant insert (name, value) on app_public.settings to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.settings
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region Provider
create table app_public.voice_providers
(
id uuid not null default uuid_generate_v1mc() primary key,
kind text not null,
name text not null,
credentials jsonb not null,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index voice_providers_number on app_public.voice_providers(name);
alter table app_public.voice_providers
enable row level security;
create policy access_all on app_public.voice_providers to app_admin using (true);
grant update (name, credentials) on app_public.voice_providers to app_admin;
grant select on app_public.voice_providers to app_admin;
grant insert (kind, name, credentials) on app_public.voice_providers to app_admin;
grant delete on app_public.voice_providers to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.voice_providers
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region Voice Line
create table app_public.voice_lines
(
id uuid not null default uuid_generate_v1mc() primary key,
provider_id uuid not null,
provider_line_sid text not null,
number text not null,
language text not null,
voice text not null,
prompt_text text,
prompt_audio jsonb,
audio_prompt_enabled boolean not null default false,
audio_converted_at timestamptz,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
alter table app_public.voice_lines
add constraint voice_lines_provider_id_fkey foreign key ("provider_id") references app_public.voice_providers on delete cascade;
create index on app_public.voice_lines (provider_id);
create index on app_public.voice_lines (provider_line_sid);
create unique index voice_lines_number on app_public.voice_lines(number);
alter table app_public.voice_lines
enable row level security;
create policy access_all on app_public.voice_lines to app_admin using (true);
grant update (prompt_text, prompt_audio, audio_prompt_enabled, language, voice) on app_public.voice_lines to app_admin;
grant select on app_public.voice_lines to app_admin;
grant insert (provider_id, provider_line_sid, number, prompt_text, prompt_audio, audio_prompt_enabled, language, voice) on app_public.voice_lines to app_admin;
grant delete on app_public.voice_lines to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.voice_lines
for each row
execute procedure app_private.tg__timestamps();
create function app_private.tg__voice_line_provider_update() returns trigger as $$
begin
if (TG_OP = 'DELETE') then
perform graphile_worker.add_job('voice-line-delete', json_build_object('voiceLineId', OLD.id, 'providerId', OLD.provider_id, 'providerLineSid', OLD.provider_line_sid));
else
perform graphile_worker.add_job('voice-line-provider-update', json_build_object('voiceLineId', NEW.id));
end if;
return null;
end;
$$ language plpgsql volatile security definer set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__voice_line_provider_update() is
E'This trigger is called to ensure a voice line is connected to twilio properly';
create trigger _101_voice_line_provider_update
after insert or update of provider_line_sid or delete
on app_public.voice_lines
for each row
execute procedure app_private.tg__voice_line_provider_update();
create function app_private.tg__voice_line_prompt_audio_update() returns trigger as $$
begin
perform graphile_worker.add_job('voice-line-audio-update', json_build_object('voiceLineId', NEW.id));
return null;
end;
$$ language plpgsql volatile security definer set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__voice_line_prompt_audio_update() is
E'This trigger is called to ensure a voice line is connected to twilio properly';
create trigger _101_voice_line_prompt_audio_update
after insert or update of prompt_audio
on app_public.voice_lines
for each row
execute procedure app_private.tg__voice_line_prompt_audio_update();
-- endregion
-- region Webhooks
create table app_public.webhooks
(
id uuid not null default uuid_generate_v1mc() primary key,
backend_type text not null,
backend_id uuid not null,
name text not null,
endpoint_url text not null,
http_method text not null default 'post',
headers jsonb,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
constraint webhook_http_method_validity check (http_method in ('post', 'put')),
constraint webhook_endpoint_url_validity check (endpoint_url ~ '^https?://[^/]+')
);
create index on app_public.webhooks (backend_type, backend_id);
alter table app_public.webhooks
enable row level security;
create policy access_all on app_public.webhooks to app_admin using (true);
grant update (name, endpoint_url, http_method, headers) on app_public.webhooks to app_admin;
grant select on app_public.webhooks to app_admin;
grant insert (backend_type, backend_id, name, endpoint_url, http_method, headers) on app_public.webhooks to app_admin;
grant delete on app_public.webhooks to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.webhooks
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region WhatsappBots
set transform_null_equals to true;
create table app_public.whatsapp_bots
(
id uuid not null default uuid_generate_v1mc() primary key,
phone_number text not null,
token uuid not null default uuid_generate_v1mc(),
user_id uuid not null,
description text,
auth_info text,
qr_code text,
is_verified boolean not null default false,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index whatsapp_bot_token on app_public.whatsapp_bots(token);
alter table app_public.whatsapp_bots
add constraint whatsapp_bots_user_id_fkey foreign key ("user_id") references app_public.users on delete cascade;
alter table app_public.whatsapp_bots
enable row level security;
create policy access_all on app_public.whatsapp_bots to app_admin using (true);
grant update (phone_number, token, user_id, description, auth_info, qr_code, is_verified) on app_public.whatsapp_bots to app_admin;
grant select on app_public.whatsapp_bots to app_admin;
grant insert (phone_number, token, user_id, description, auth_info, qr_code, is_verified) on app_public.whatsapp_bots to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.whatsapp_bots
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region WhatsappMessages
create table app_public.whatsapp_messages
(
id uuid not null default uuid_generate_v1mc() primary key,
whatsapp_bot_id uuid not null,
wa_message_id text,
wa_message text,
wa_timestamp timestamptz,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index whatsapp_message_whatsapp_bot_id on app_public.whatsapp_messages(whatsapp_bot_id);
alter table app_public.whatsapp_messages
add constraint whatsapp_messages_whatsapp_bot_id_fkey foreign key ("whatsapp_bot_id") references app_public.whatsapp_bots on delete cascade;
alter table app_public.whatsapp_messages
enable row level security;
create policy access_all on app_public.whatsapp_messages to app_admin using (true);
grant update (whatsapp_bot_id, wa_message_id, wa_message, wa_timestamp) on app_public.whatsapp_messages to app_admin;
grant select on app_public.whatsapp_messages to app_admin;
grant insert (whatsapp_bot_id, wa_message_id, wa_message, wa_timestamp) on app_public.whatsapp_messages to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.whatsapp_messages
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region WhatsappAttachments
create table app_public.whatsapp_attachments
(
id uuid not null default uuid_generate_v1mc() primary key,
whatsapp_bot_id uuid not null,
whatsapp_message_id uuid,
attachment bytea,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index whatsapp_attachment_whatsapp_bot_id on app_public.whatsapp_attachments(whatsapp_bot_id);
create unique index whatsapp_attachment_whatsapp_message_id on app_public.whatsapp_attachments(whatsapp_message_id);
alter table app_public.whatsapp_attachments
add constraint whatsapp_attachments_whatsapp_bot_id_fkey foreign key ("whatsapp_bot_id") references app_public.whatsapp_bots on delete cascade;
alter table app_public.whatsapp_attachments
add constraint whatsapp_attachments_whatsapp_message_id_fkey foreign key ("whatsapp_message_id") references app_public.whatsapp_messages on delete cascade;
alter table app_public.whatsapp_attachments
enable row level security;
create policy access_all on app_public.whatsapp_attachments to app_admin using (true);
grant update (whatsapp_bot_id, whatsapp_message_id, attachment) on app_public.whatsapp_attachments to app_admin;
grant select on app_public.whatsapp_attachments to app_admin;
grant insert (whatsapp_bot_id, whatsapp_message_id, attachment) on app_public.whatsapp_attachments to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.whatsapp_attachments
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region SignalBots
set transform_null_equals to true;
create table app_public.signal_bots
(
id uuid not null default uuid_generate_v1mc() primary key,
phone_number text not null,
token uuid not null default uuid_generate_v1mc(),
user_id uuid not null,
description text,
auth_info text,
is_verified boolean not null default false,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index signal_bot_token on app_public.signal_bots(token);
alter table app_public.signal_bots
add constraint signal_bots_user_id_fkey foreign key ("user_id") references app_public.users on delete cascade;
alter table app_public.signal_bots
enable row level security;
create policy access_all on app_public.signal_bots to app_admin using (true);
grant update (phone_number, token, user_id, description, auth_info, is_verified) on app_public.signal_bots to app_admin;
grant select on app_public.signal_bots to app_admin;
grant insert (phone_number, token, user_id, description, auth_info, is_verified) on app_public.signal_bots to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.signal_bots
for each row
execute procedure app_private.tg__timestamps();
-- endregion

View file

@ -0,0 +1,10 @@
--! Previous: sha1:b13a5217288f5d349d8d9e3afbd7bb30c0dbad21
--! Hash: sha1:8659f815ff013a793f2e01113a9a61a98c7bd8d5
-- Enter migration here
drop table if exists app_public.whatsapp_attachments cascade;
drop table if exists app_public.whatsapp_messages cascade;
grant delete on app_public.whatsapp_bots to app_admin;
grant delete on app_public.signal_bots to app_admin;

View file

@ -0,0 +1 @@
-- Enter migration here

39
metamigo-db/package.json Normal file
View file

@ -0,0 +1,39 @@
{
"name": "db",
"private": true,
"version": "0.2.0",
"main": "build/main/db/src/index.js",
"author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later",
"dependencies": {
"graphile-migrate": "^1.4.1"
},
"devDependencies": {
"common": "0.2.5",
"@babel/core": "7.20.12",
"@babel/preset-env": "7.20.2",
"@babel/preset-typescript": "7.18.6",
"@types/jest": "^29.2.5",
"eslint": "^8.32.0",
"jest": "^29.3.1",
"jest-junit": "^15.0.0",
"pino-pretty": "^9.1.1",
"prettier": "^2.8.3",
"ts-node": "^10.9.1",
"typedoc": "^0.23.24",
"typescript": "4.9.4"
},
"scripts": {
"build": "tsc -p tsconfig.json",
"build-test": "tsc -p tsconfig.json",
"doc:html": "typedoc src/ --exclude '**/*.test.ts' --exclude '**/*.spec.ts' --name $npm_package_name --readme README.md --target es2019 --mode file --out build/docs",
"doc": "yarn run doc:html",
"fix:lint": "eslint src --ext .ts --fix",
"fix:prettier": "prettier \"src/**/*.ts\" --write",
"worker": "NODE_ENV=development yarn cli worker",
"lint:lint": "eslint src --ext .ts",
"lint:prettier": "prettier \"src/**/*.ts\" --list-different",
"lint": "npm run lint:lint && npm run lint:prettier",
"watch:build": "tsc -p tsconfig.json -w"
}
}

View file

@ -0,0 +1,9 @@
export * from "./settings";
export * from "./signal/bots";
export * from "./whatsapp/bots";
export * from "./whatsapp/messages";
export * from "./whatsapp/attachments";
export * from "./settings";
export * from "./voice/voice-line";
export * from "./voice/voice-provider";
export * from "./webhooks";

View file

@ -0,0 +1,104 @@
/* eslint-disable @typescript-eslint/explicit-module-boundary-types,@typescript-eslint/no-unused-vars,@typescript-eslint/no-explicit-any,prefer-destructuring */
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type SettingId = Flavor<UUID, "Setting Id">;
export interface UnsavedSetting<T> {
name: string;
value: T;
}
export interface SavedSetting<T> extends UnsavedSetting<T> {
id: SettingId;
createdAt: Date;
updatedAt: Date;
}
export const SettingRecord = recordInfo<UnsavedSetting<any>, SavedSetting<any>>(
"app_public",
"settings"
);
export class SettingRecordRepository extends RepositoryBase(SettingRecord) {
async findByName<T>(name: string): Promise<SavedSetting<T> | null> {
return this.db.oneOrNone("SELECT * FROM $1 $2:raw LIMIT 1", [
this.schemaTable,
this.where({ name }),
]);
}
async upsert<T>(name: string, value: T): Promise<SavedSetting<T>> {
return this.db.one(
`INSERT INTO $1 ($2:name) VALUES ($2:csv)
ON CONFLICT (name)
DO UPDATE SET value = EXCLUDED.value RETURNING *`,
[this.schemaTable, this.columnize({ name, value })]
);
}
}
// these helpers let us create type safe setting constants
export interface SettingType<T = any> {
_type: T;
}
export interface SettingInfo<T = any> extends SettingType<T> {
name: string;
}
export function castToSettingInfo(
runtimeData: Omit<SettingInfo, "_type">
): SettingInfo {
return runtimeData as SettingInfo;
}
export function settingInfo<T>(name: string): SettingInfo<T>;
// don't use this signature, use the explicit typed signature
export function settingInfo(name: string) {
return castToSettingInfo({
name,
});
}
export interface ISettingsService {
name: string;
lookup<T>(settingInfo: SettingInfo<T>): Promise<T>;
save<T>(settingInfo: SettingInfo<T>, value: T): Promise<T>;
}
export const SettingsService = (
repo: SettingRecordRepository
): ISettingsService => ({
name: "settingService",
lookup: async <T>(settingInfo: SettingInfo<T>): Promise<T> => {
const s = await repo.findByName<T>(settingInfo.name);
return s.value;
},
save: async <T>(settingInfo: SettingInfo<T>, value: T): Promise<T> => {
const s = await repo.upsert(settingInfo.name, value);
return s.value;
},
});
const _test = async () => {
// here is an example of how to use this module
// it also serves as a compile-time test case
const repo = new SettingRecordRepository({} as any);
// create your own custom setting types!
// the value is serialized as json in the database
type Custom = { foo: string; bar: string };
type CustomUnsavedSetting = UnsavedSetting<Custom>;
type CustomSetting = SavedSetting<Custom>;
const s3: CustomSetting = await repo.findByName("test");
const customValue = { foo: "monkeys", bar: "eggplants" };
let customSetting = { name: "custom", value: customValue };
customSetting = await repo.insert(customSetting);
const value: Custom = customSetting.value;
const MySetting = settingInfo<string>("my-setting");
};

View file

@ -0,0 +1,35 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type SignalBotId = Flavor<UUID, "Signal Bot Id">;
export interface UnsavedSignalBot {
phoneNumber: string;
userId: string;
description: string;
}
export interface SavedSignalBot extends UnsavedSignalBot {
id: SignalBotId;
createdAt: Date;
updatedAt: Date;
token: string;
authInfo: string;
isVerified: boolean;
}
export const SignalBotRecord = recordInfo<UnsavedSignalBot, SavedSignalBot>(
"app_public",
"signal_bots"
);
export class SignalBotRecordRepository extends RepositoryBase(SignalBotRecord) {
async updateAuthInfo(
bot: SavedSignalBot,
authInfo: string | undefined
): Promise<SavedSignalBot> {
return this.db.one(
"UPDATE $1 SET (auth_info, is_verified) = ROW($2, true) WHERE id = $3 RETURNING *",
[this.schemaTable, authInfo, bot.id]
);
}
}

View file

@ -0,0 +1,62 @@
import {
RepositoryBase,
recordInfo,
UUID,
Flavor,
} from "common";
import type { } from "pg-promise";
export type VoiceLineId = Flavor<UUID, "VoiceLine Id">;
export type VoiceLineAudio = {
"audio/webm": string;
"audio/mpeg"?: string;
checksum?: string;
};
export interface UnsavedVoiceLine {
providerId: string;
providerLineSid: string;
number: string;
language: string;
voice: string;
promptText?: string;
promptAudio?: VoiceLineAudio;
audioPromptEnabled: boolean;
audioConvertedAt?: Date;
}
export interface SavedVoiceLine extends UnsavedVoiceLine {
id: VoiceLineId;
createdAt: Date;
updatedAt: Date;
}
export const VoiceLineRecord = recordInfo<UnsavedVoiceLine, SavedVoiceLine>(
"app_public",
"voice_lines"
);
export class VoiceLineRecordRepository extends RepositoryBase(VoiceLineRecord) {
/**
* Fetch all voice lines given the numbers
* @param numbers
*/
async findAllByNumbers(numbers: string[]): Promise<SavedVoiceLine[]> {
return this.db.any(
"SELECT id,provider_id,provider_line_sid,number FROM $1 WHERE number in ($2:csv)",
[this.schemaTable, numbers]
);
}
/**
* Fetch all voice lines given a list of provider line ids
* @param ids
*/
async findAllByProviderLineSids(ids: string[]): Promise<SavedVoiceLine[]> {
return this.db.any(
"SELECT id,provider_id,provider_line_sid,number FROM $1 WHERE provider_line_sid in ($2:csv)",
[this.schemaTable, ids]
);
}
}

View file

@ -0,0 +1,52 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
/*
* VoiceProvider
*
* A provider is a company that provides incoming voice call services
*/
export type VoiceProviderId = Flavor<UUID, "VoiceProvider Id">;
export enum VoiceProviderKinds {
TWILIO = "TWILIO",
}
export type TwilioCredentials = {
accountSid: string;
apiKeySid: string;
apiKeySecret: string;
};
// expand this type later when we support more providers
export type VoiceProviderCredentials = TwilioCredentials;
export interface UnsavedVoiceProvider {
kind: VoiceProviderKinds;
name: string;
credentials: VoiceProviderCredentials;
}
export interface SavedVoiceProvider extends UnsavedVoiceProvider {
id: VoiceProviderId;
createdAt: Date;
updatedAt: Date;
}
export const VoiceProviderRecord = recordInfo<
UnsavedVoiceProvider,
SavedVoiceProvider
>("app_public", "voice_providers");
export class VoiceProviderRecordRepository extends RepositoryBase(
VoiceProviderRecord
) {
async findByTwilioAccountSid(
accountSid: string
): Promise<SavedVoiceProvider | null> {
return this.db.oneOrNone(
"select * from $1 where credentials->>'accountSid' = $2",
[this.schemaTable, accountSid]
);
}
}

View file

@ -0,0 +1,50 @@
import {
RepositoryBase,
recordInfo,
UUID,
Flavor,
} from "common";
/*
* Webhook
*
* A webhook allows external services to be notified when a recorded call is available
*/
export type WebhookId = Flavor<UUID, "Webhook Id">;
export interface HttpHeaders {
header: string;
value: string;
}
export interface UnsavedWebhook {
name: string;
voiceLineId: string;
endpointUrl: string;
httpMethod: "post" | "put";
headers?: HttpHeaders[];
}
export interface SavedWebhook extends UnsavedWebhook {
id: WebhookId;
createdAt: Date;
updatedAt: Date;
}
export const WebhookRecord = recordInfo<UnsavedWebhook, SavedWebhook>(
"app_public",
"webhooks"
);
export class WebhookRecordRepository extends RepositoryBase(WebhookRecord) {
async findAllByBackendId(
backendType: string,
backendId: string
): Promise<SavedWebhook[]> {
return this.db.any(
"select * from $1 where backend_type = $2 and backend_id = $3",
[this.schemaTable, backendType, backendId]
);
}
}

View file

@ -0,0 +1,24 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type WhatsappAttachmentId = Flavor<UUID, "Whatsapp Attachment Id">;
export interface UnsavedWhatsappAttachment {
whatsappBotId: string;
whatsappMessageId: string;
attachment: Buffer;
}
export interface SavedWhatsappAttachment extends UnsavedWhatsappAttachment {
id: WhatsappAttachmentId;
createdAt: Date;
updatedAt: Date;
}
export const WhatsappAttachmentRecord = recordInfo<
UnsavedWhatsappAttachment,
SavedWhatsappAttachment
>("app_public", "whatsapp_attachments");
export class WhatsappAttachmentRecordRepository extends RepositoryBase(
WhatsappAttachmentRecord
) { }

View file

@ -0,0 +1,48 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type WhatsappBotId = Flavor<UUID, "Whatsapp Bot Id">;
export interface UnsavedWhatsappBot {
phoneNumber: string;
userId: string;
description: string;
}
export interface SavedWhatsappBot extends UnsavedWhatsappBot {
id: WhatsappBotId;
createdAt: Date;
updatedAt: Date;
token: string;
authInfo: string;
qrCode: string;
isVerified: boolean;
}
export const WhatsappBotRecord = recordInfo<
UnsavedWhatsappBot,
SavedWhatsappBot
>("app_public", "whatsapp_bots");
export class WhatsappBotRecordRepository extends RepositoryBase(
WhatsappBotRecord
) {
async updateQR(
bot: SavedWhatsappBot,
qrCode: string | undefined
): Promise<SavedWhatsappBot> {
return this.db.one(
"UPDATE $1 SET (qr_code) = ROW($2) WHERE id = $3 RETURNING *",
[this.schemaTable, qrCode, bot.id]
);
}
async updateAuthInfo(
bot: SavedWhatsappBot,
authInfo: string | undefined
): Promise<SavedWhatsappBot> {
return this.db.one(
"UPDATE $1 SET (auth_info, is_verified) = ROW($2, true) WHERE id = $3 RETURNING *",
[this.schemaTable, authInfo, bot.id]
);
}
}

Some files were not shown because too many files have changed in this diff Show more