Move packages/apps back

This commit is contained in:
Darren Clarke 2023-03-10 08:26:51 +00:00
parent 6eaaf8e9be
commit 5535d6b575
348 changed files with 0 additions and 0 deletions

View file

@ -0,0 +1 @@
require('../.eslintrc.js");

View file

@ -0,0 +1,18 @@
import { generateConfig, printConfigOptions } from "common";
import { loadConfigRaw } from "config";
export const genConf = async (): Promise<void> => {
const c = await loadConfigRaw();
const generated = generateConfig(c);
console.log(generated);
};
export const genSchema = async (): Promise<void> => {
const c = await loadConfigRaw();
console.log(c.getSchemaString());
};
export const listConfig = async (): Promise<void> => {
const c = await loadConfigRaw();
printConfigOptions(c);
};

View file

@ -0,0 +1,67 @@
#!/usr/bin/env node
import { Command } from "commander";
import { startWithout } from "@digiresilience/montar";
import { migrateWrapper } from "db";
import { loadConfig } from "config";
import { genConf, listConfig } from "./config";
import { createTokenForTesting, generateJwks } from "./jwks";
import { exportGraphqlSchema } from "./postgraphile";
import "api/build/main/server";
import "api/build/main/logger";
import "worker/build/main";
const program = new Command();
export async function runServer(): Promise<void> {
await startWithout(["worker"]);
}
export async function runWorker(): Promise<void> {
await startWithout(["server"]);
}
program
.command("config-generate")
.description("Generate a sample JSON configuration file (to stdout)")
.action(genConf);
program
.command("config-help")
.description("Prints the entire convict config ")
.action(listConfig);
program
.command("api")
.description("Run the application api server")
.action(runServer);
program
.command("worker")
.description("Run the worker to process jobs")
.action(runWorker);
program
.command("db <commands...>")
.description("Run graphile-migrate commands with your app's config loaded.")
.action(async (args) => {
const config = await loadConfig();
return migrateWrapper(args, config);
});
program
.command("gen-jwks")
.description("Generate the JWKS")
.action(generateJwks);
program
.command("gen-testing-jwt")
.description("Generate a JWT for the test suite")
.action(createTokenForTesting);
program
.command("export-graphql-schema")
.description("Export the graphql schema")
.action(exportGraphqlSchema);
program.parse(process.argv);

View file

@ -0,0 +1,68 @@
import jose from "node-jose";
import * as jwt from "jsonwebtoken";
const generateKeystore = async () => {
const keystore = jose.JWK.createKeyStore();
await keystore.generate("oct", 256, {
alg: "A256GCM",
use: "enc",
});
await keystore.generate("oct", 256, {
alg: "HS512",
use: "sig",
});
return keystore;
};
const safeString = (input) => {
return Buffer.from(JSON.stringify(input)).toString("base64");
};
const stringify = (v) => JSON.stringify(v, undefined, 2);
const _generateJwks = async () => {
const keystore = await generateKeystore();
const encryption = keystore.all({ use: "enc" })[0].toJSON(true);
const signing = keystore.all({ use: "sig" })[0].toJSON(true);
return {
nextAuth: {
signingKeyB64: safeString(signing),
encryptionKeyB64: safeString(encryption),
},
};
};
export const generateJwks = async (): Promise<void> => {
console.log(stringify(await _generateJwks()));
};
export const createTokenForTesting = async (): Promise<void> => {
const keys = await _generateJwks();
const signingKey = Buffer.from(
JSON.parse(
Buffer.from(keys.nextAuth.signingKeyB64, "base64").toString("utf-8")
).k,
"base64"
);
const token = jwt.sign(
{
iss: "Test Env",
iat: 1606893960,
aud: "metamigo",
sub: "abel@guardianproject.info",
name: "Abel Luck",
email: "abel@guardianproject.info",
userRole: "admin",
},
signingKey,
{ expiresIn: "100y", algorithm: "HS512" }
);
console.log("CONFIG");
console.log(stringify(keys));
console.log();
console.log("TOKEN");
console.log(token);
console.log();
};

View file

@ -0,0 +1,39 @@
{
"name": "cli",
"version": "0.2.0",
"main": "build/main/index.js",
"author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@digiresilience/montar": "^0.1.6",
"commander": "^10.0.0",
"graphile-migrate": "^1.4.1",
"graphile-worker": "^0.13.0",
"node-jose": "^2.1.1"
},
"devDependencies": {
"@babel/core": "7.20.12",
"@babel/preset-env": "7.20.2",
"@babel/preset-typescript": "7.18.6",
"@types/jest": "^29.2.5",
"eslint": "^8.32.0",
"jest": "^29.3.1",
"jest-junit": "^15.0.0",
"pino-pretty": "^9.1.1",
"prettier": "^2.8.3",
"ts-node": "^10.9.1",
"typedoc": "^0.23.24",
"typescript": "4.9.4",
"nodemon": "^2.0.20"
},
"scripts": {
"build": "tsc -p tsconfig.json",
"cli": "NODE_ENV=development node --unhandled-rejections=strict build/main/index.js",
"fix:lint": "eslint src --ext .ts --fix",
"fix:prettier": "prettier \"src/**/*.ts\" --write",
"lint:lint": "eslint src --ext .ts",
"lint:prettier": "prettier \"src/**/*.ts\" --list-different",
"test": "echo no tests",
"lint": "yarn lint:lint && yarn lint:prettier"
}
}

View file

@ -0,0 +1,37 @@
import { writeFileSync } from "fs";
import {
getIntrospectionQuery,
graphqlSync,
lexicographicSortSchema,
printSchema,
} from "graphql";
import { createPostGraphileSchema } from "postgraphile";
import { Pool } from "pg";
import { loadConfig } from "config";
import { getPostGraphileOptions } from "db";
export const exportGraphqlSchema = async (): Promise<void> => {
const config = await loadConfig();
const rootPgPool = new Pool({
connectionString: config.db.connection,
});
const exportSchema = `../../data/schema.graphql`;
const exportJson = `../../frontend/lib/graphql-schema.json`;
try {
const schema = await createPostGraphileSchema(
config.postgraphile.authConnection,
"app_public",
getPostGraphileOptions()
);
const sorted = lexicographicSortSchema(schema);
const json = graphqlSync(schema, getIntrospectionQuery());
writeFileSync(exportSchema, printSchema(sorted));
writeFileSync(exportJson, JSON.stringify(json));
console.log(`GraphQL schema exported to ${exportSchema}`);
console.log(`GraphQL schema json exported to ${exportJson}`);
} finally {
rootPgPool.end();
}
};

View file

@ -0,0 +1,8 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"outDir": "build/main"
},
"include": ["**/*.ts", "**/.*.ts", "config.ts"],
"exclude": ["node_modules", "**/*.spec.ts", "**/*.test.ts"]
}

View file

@ -0,0 +1,13 @@
require('@digiresilience/eslint-config-metamigo/patch/modern-module-resolution');
module.exports = {
extends: [
"@digiresilience/eslint-config-metamigo/profile/node",
"@digiresilience/eslint-config-metamigo/profile/typescript"
],
rules: {
// TODO: enable this after jest fixes this issue https://github.com/nodejs/node/issues/38343
"unicorn/prefer-node-protocol": "off"
},
parserOptions: { tsconfigRootDir: __dirname }
};

13
packages/metamigo-common/.gitignore vendored Normal file
View file

@ -0,0 +1,13 @@
.idea/*
.nyc_output
build
node_modules
test
src/*/*.js
coverage
*.log
package-lock.json
.npmrc
.yalc
yalc.lock
junit.xml

View file

@ -0,0 +1,13 @@
.eslintrc.js
.editorconfig
.prettierignore
.versionrc
Makefile
.gitlab-ci.yml
coverage
jest*
tsconfig*
*.log
test*
.yalc
yalc.lock

View file

@ -0,0 +1,2 @@
# package.json is formatted by package managers, so we ignore it here
package.json

View file

@ -0,0 +1,5 @@
{
"presets": [
"@digiresilience/babel-preset-metamigo"
]
}

View file

@ -0,0 +1,31 @@
import { ConvictSchema } from "./types";
export interface IAppMetaConfig {
name: string;
version: string;
figletFont: string;
}
export const AppMetaConfig: ConvictSchema<IAppMetaConfig> = {
version: {
doc: "The current application version",
format: String,
env: "npm_package_version",
default: null,
skipGenerate: true,
},
name: {
doc: "Application name",
format: String,
env: "npm_package_name",
default: null,
skipGenerate: true,
},
figletFont: {
doc: "The figlet font name used to print the site name on boot",
format: String,
env: "FIGLET_FONT",
default: "Sub-Zero",
skipGenerate: true,
},
};

View file

@ -0,0 +1,23 @@
import { ConvictSchema } from "./types";
export interface ISessionConfig {
sessionMaxAgeSeconds: number;
sessionUpdateAgeSeconds: number;
}
export const SessionConfig: ConvictSchema<ISessionConfig> = {
sessionMaxAgeSeconds: {
doc: "How long in seconds until an idle session expires and is no longer valid.",
format: "positiveInt",
default: 30 * 24 * 60 * 60, // 30 days
env: "SESSION_MAX_AGE_SECONDS",
},
sessionUpdateAgeSeconds: {
doc: `Throttle how frequently in seconds to write to database to extend a session.
Use it to limit write operations. Set to 0 to always update the database.
Note: This option is ignored if using JSON Web Tokens`,
format: "positiveInt",
default: 24 * 60 * 60, // 24 hours
env: "SESSION_UPDATE_AGE_SECONDS",
},
};

View file

@ -0,0 +1,32 @@
import { ConvictSchema } from "./types";
export interface ICorsConfig {
allowedMethods: Array<string>;
allowedOrigins: Array<string>;
allowedHeaders: Array<string>;
}
export const CorsConfig: ConvictSchema<ICorsConfig> = {
allowedMethods: {
doc: "The allowed CORS methods",
format: "Array",
env: "CORS_ALLOWED_METHODS",
default: ["GET", "PUT", "POST", "PATCH", "DELETE", "HEAD", "OPTIONS"],
},
allowedOrigins: {
doc: "The allowed origins",
format: "Array",
env: "CORS_ALLOWED_ORIGINS",
default: [],
},
allowedHeaders: {
doc: "The allowed headers",
format: "Array",
env: "CORS_ALLOWED_HEADERS",
default: [
"content-type",
"authorization",
"cf-access-authenticated-user-email",
],
},
};

View file

@ -0,0 +1,58 @@
import * as Joi from "joi";
import type { Format } from "convict";
const coerceString = (v: any): string => v.toString();
const validator = (s: any) => (v: any) => Joi.assert(v, s);
const url = Joi.string().uri({
scheme: ["http", "https"],
});
const ip = Joi.string().ip({ version: ["ipv4", "ipv6"], cidr: "optional" });
/**
* Additional configuration value formats for convict.
*
* You can use these to achieve richer validation for your configuration.
*/
export const MetamigoConvictFormats: { [index: string]: Format } = {
positiveInt: {
name: "positveInt",
coerce: (n: string): number => Number.parseInt(n, 10),
validate: validator(Joi.number().positive().integer()),
},
port: {
name: "port",
coerce: (n: string): number => Number.parseInt(n, 10),
validate: validator(Joi.number().port()),
},
ipaddress: {
name: "ipaddress",
coerce: coerceString,
validate: validator(ip),
},
url: {
name: "url",
coerce: coerceString,
validate: validator(url),
},
uri: {
name: "uri",
coerce: coerceString,
validate: validator(Joi.string().uri()),
},
optionalUri: {
name: "uri",
coerce: coerceString,
validate: validator(Joi.string().uri().allow("")),
},
email: {
name: "email",
coerce: coerceString,
validate: validator(Joi.string().email()),
},
uuid: {
name: "uuid",
coerce: coerceString,
validate: validator(Joi.string().guid()),
},
};

View file

@ -0,0 +1,44 @@
import convict from "convict";
const visitLeaf = (acc: any, key: any, leaf: any) => {
if (leaf.skipGenerate) {
return;
}
if (leaf.default === undefined) {
acc[key] = undefined;
} else {
acc[key] = leaf.default;
}
};
const visitNode = (acc: any, node: any, key = "") => {
if (node._cvtProperties) {
const keys = Object.keys(node._cvtProperties);
let subacc: any;
if (key === "") {
subacc = acc;
} else {
subacc = {};
acc[key] = subacc;
}
keys.forEach((key) => {
visitNode(subacc, node._cvtProperties[key], key);
});
// In the case that the entire sub-tree specified skipGenerate, remove the empty node
if (Object.keys(subacc).length === 0) {
delete acc[key];
}
} else {
visitLeaf(acc, key, node);
}
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const generateConfig = (conf: convict.Config<any>): unknown => {
const schema = conf.getSchema();
const generated = {};
visitNode(generated, schema);
return JSON.stringify(generated, undefined, 1);
};

View file

@ -0,0 +1,142 @@
import process from "process";
import convict, { SchemaObj } from "convict";
import { IServerConfig, ServerConfig } from "./server";
import { IMetricsConfig, MetricsConfig } from "./metrics-server";
import { IAppMetaConfig, AppMetaConfig } from "./app-meta";
import { ICorsConfig, CorsConfig } from "./cors";
import { ILoggingConfig, LoggingConfig } from "./logging";
import { ExtendedConvict } from "./types";
import { MetamigoConvictFormats } from "./formats";
type IEnvConfig = "production" | "development" | "test";
const EnvConfig: SchemaObj<IEnvConfig> = {
doc: "The application environment",
format: ["production", "development", "test"],
default: "development",
env: "NODE_ENV",
};
export const configBaseSchema = {
env: EnvConfig,
server: ServerConfig,
meta: AppMetaConfig,
cors: CorsConfig,
metrics: MetricsConfig,
logging: LoggingConfig,
};
/**
*
* The metamigo base configuration object. Use this for easy typed access to your
* config.
*
*/
interface IMetamigoConfig {
env: IEnvConfig;
server: IServerConfig;
meta: IAppMetaConfig;
cors: ICorsConfig;
metrics: IMetricsConfig;
logging: ILoggingConfig;
isProd?: boolean;
isTest?: boolean;
isDev?: boolean;
frontend: any;
nextAuth: any;
}
export type IMetamigoConvict = ExtendedConvict<IMetamigoConfig>;
export type {
IServerConfig,
IMetricsConfig,
IAppMetaConfig,
ICorsConfig,
ILoggingConfig,
IMetamigoConfig,
};
export * from "./formats";
export * from "./generate";
export * from "./print";
export * from "./types";
/**
* Loads your applications configuration from environment variables and configuration files (see METAMIGO_CONFIG).
*
* @param schema your schema definition
* @param override an optional object with config value that will override defaults but not config files and env vars (see [convict precedence docs](https://github.com/mozilla/node-convict/tree/master/packages/convict#precedence-order ))
* @returns the raw convict config object
*/
export const loadConfigurationRaw = async <T extends IMetamigoConfig>(
schema: convict.Schema<T>,
override?: Partial<T>
): Promise<ExtendedConvict<T>> => {
convict.addFormats(MetamigoConvictFormats);
const config: ExtendedConvict<T> = convict(schema);
const env = config.get("env");
config.isProd = env === "production";
config.isTest = env === "test";
config.isDev = env === "development";
try {
if (process.env.METAMIGO_CONFIG) {
config.loadFile(process.env.METAMIGO_CONFIG);
}
} catch (error) {
const msg = `
🚫 Your application's configuration is invalid JSON. 🚫
${error}
`;
throw new Error(msg);
}
if (override) {
config.load(override);
}
try {
config.validate({ allowed: "strict" });
} catch (error: any) {
const msg = `
🚫 Your application's configuration is invalid. 🚫
${error.message}
`;
throw new Error(msg);
}
// set our helpers
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const configDirty = config as any;
configDirty.set("isProd", config.isProd);
configDirty.set("isTest", config.isTest);
configDirty.set("isDev", config.isDev);
return config;
};
/**
* Loads your applications configuration from environment variables and configuration files (see METAMIGO_CONFIG).
*
* @param schema your schema definition
* @param override an optional object with config value that will override defaults but not config files and env vars (see [convict precedence docs](https://github.com/mozilla/node-convict/tree/master/packages/convict#precedence-order ))
* @returns a vanilla javascript object with the config loaded values
*/
export const loadConfiguration = async <T extends IMetamigoConfig>(
schema: convict.Schema<T>,
override?: Partial<T>
): Promise<T> => {
const c = await loadConfigurationRaw(schema, override);
return c.getProperties();
};

View file

@ -0,0 +1,90 @@
import { ConvictSchema } from "./types";
export interface ILoggingConfig {
level: string;
sql: boolean;
redact: string[];
ignorePaths: string[];
ignoreTags: string[];
requestIdHeader: string;
logRequestStart: boolean;
logRequestComplete: boolean;
logRequestPayload: boolean;
logRequestQueryParams: boolean;
prettyPrint: boolean | "auto";
}
export const LoggingConfig: ConvictSchema<ILoggingConfig> = {
level: {
doc: "The logging level",
format: ["trace", "debug", "info", "warn", "error"],
default: "info",
env: "LOG_LEVEL",
},
sql: {
doc: "Whether to log sql statements",
format: "Boolean",
default: false,
env: "LOG_SQL",
},
redact: {
doc: "Pino redaction array. These are always redacted. see https://getpino.io/#/docs/redaction",
format: "Array",
default: [
"req.remoteAddress",
"req.headers.authorization",
`req.headers["cf-access-jwt-assertion"]`,
`req.headers["cf-access-authenticated-user-email"]`,
`req.headers["cf-connecting-ip"]`,
`req.headers["cf-ipcountry"]`,
`req.headers["x-forwarded-for"]`,
"req.headers.cookie",
],
},
ignorePaths: {
doc: "Ignore http paths (exact) when logging requests",
format: "Array",
default: ["/graphql"],
},
ignoreTags: {
doc: "Ignore routes tagged with these tags when logging requests",
format: "Array",
default: ["status", "swagger", "nolog"],
},
requestIdHeader: {
doc: "The header where the request id lives",
format: String,
default: "x-request-id",
env: "REQUEST_ID_HEADER",
},
logRequestStart: {
doc: "Whether hapi-pino should add a log.info() at the beginning of Hapi requests for the given Request.",
format: "Boolean",
default: false,
env: "LOG_REQUEST_START",
},
logRequestComplete: {
doc: "Whether hapi-pino should add a log.info() at the completion of Hapi requests for the given Request.",
format: "Boolean",
default: true,
env: "LOG_REQUEST_COMPLETE",
},
logRequestPayload: {
doc: "When enabled, add the request payload as payload to the response event log.",
format: "Boolean",
default: false,
env: "LOG_REQUEST_PAYLOAD",
},
logRequestQueryParams: {
doc: "When enabled, add the request query as queryParams to the response event log.",
format: "Boolean",
default: false,
env: "LOG_REQUEST_QUERY_PARAMS",
},
prettyPrint: {
doc: "Pretty print the logs",
format: ["auto", true, false],
default: "auto",
env: "LOG_PRETTY_PRINT",
},
};

View file

@ -0,0 +1,22 @@
import { ConvictSchema } from "./types";
export interface IMetricsConfig {
address: string;
port: number;
}
export const MetricsConfig: ConvictSchema<IMetricsConfig> = {
address: {
doc: "The ip address to bind the prometheus metrics to",
format: "ipaddress",
default: "127.0.0.1",
env: "METRICS_ADDRESS",
},
port: {
doc: "The port to bind the prometheus metrics to",
format: "port",
default: 3002,
env: "METRICS_PORT",
arg: "port",
},
};

View file

@ -0,0 +1,41 @@
import chalk from "chalk";
import convict from "convict";
const visitLeaf = (path: any, key: any, leaf: any) => {
if (leaf.skipGenerate) {
return;
}
let name = `${path}.${key}`;
if (path.length === 0) name = key;
console.log(chalk.green(name));
console.log(leaf.doc);
if (leaf.default === undefined) {
console.log(chalk.red("\t required"));
} else {
console.log(`\tdefault: ${JSON.stringify(leaf.default)}`);
}
console.log(`\tformat: ${leaf.format}`);
console.log(`\tenv: ${leaf.env}`);
};
const visitNode = (path: any, node: any, key = "") => {
if (node._cvtProperties) {
const keys = Object.keys(node._cvtProperties);
const subpath = key === "" ? path : `${key}`;
keys.forEach((key) => {
visitNode(subpath, node._cvtProperties[key], key);
});
console.log();
} else {
visitLeaf(path, key, node);
}
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const printConfigOptions = (conf: convict.Config<any>): void => {
const schema = conf.getSchema();
visitNode("", schema);
};

View file

@ -0,0 +1,21 @@
import { ConvictSchema } from "./types";
export interface IServerConfig {
address: string;
port: number;
}
export const ServerConfig: ConvictSchema<IServerConfig> = {
address: {
doc: "The IP address to bind the server to",
format: "ipaddress",
default: "0.0.0.0",
env: "SERVER_ADDRESS",
},
port: {
doc: "The port to bind the server to",
format: "port",
default: 3001,
env: "SERVER_PORT",
},
};

View file

@ -0,0 +1,26 @@
import convict from "convict";
/*
interface SSMObj {
path: string;
}
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
interface ConvictSchemaObj<T = any> extends convict.SchemaObj<T> {
// ssm?: SSMObj;
/**
* The config item will be ignored for purposes of config file generation
*/
skipGenerate?: boolean;
}
export type ConvictSchema<T> = {
[P in keyof T]: convict.Schema<T[P]> | ConvictSchemaObj<T[P]>;
};
export interface ExtendedConvict<T> extends convict.Config<T> {
isProd?: boolean;
isTest?: boolean;
isDev?: boolean;
}

View file

@ -0,0 +1,295 @@
/* eslint-disable @typescript-eslint/ban-types,@typescript-eslint/no-explicit-any,max-params */
import * as Boom from "@hapi/boom";
import * as Hapi from "@hapi/hapi";
import { CrudRepository } from "../records/crud-repository";
import { createResponse } from "../helpers/response";
import {
PgRecordInfo,
UnsavedR,
SavedR,
KeyType,
} from "../records/record-info";
/**
*
* A generic controller that handles exposes a [[CrudRepository]] as HTTP
* endpoints with full POST, PUT, GET, DELETE semantics.
*
* The controller yanks the instance of the crud repository out of the request at runtime.
* This assumes you're following the pattern exposed with the hapi-pg-promise plugin.
*
* @typeParam ID The type of the id column
* @typeParam T The type of the record
*/
export abstract class AbstractCrudController<
TUnsavedR,
TSavedR extends TUnsavedR & IdKeyT,
IdKeyT extends object
> {
/**
* @param repoName the key at which the repository for the record can be accessed (that is, request.db[repoName])
* @param paramsIdField the placeholder used in the Hapi route for the id of the record
* @param dbDecoration the decorated function on the request to use (defaults to request.db())
*/
abstract repoName: string;
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
abstract paramsIdField = "id";
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
abstract dbDecoration = "db";
abstract recordType: PgRecordInfo<TUnsavedR, TSavedR, IdKeyT>;
repo(request: Hapi.Request): CrudRepository<TUnsavedR, TSavedR, IdKeyT> {
// @ts-expect-error
const db = request[this.dbDecoration];
if (!db)
throw Boom.badImplementation(
`CrudController for table ${this.recordType.tableName} could not find request decoration '${this.dbDecoration}'`
);
const repo = db()[this.repoName];
if (!repo)
throw Boom.badImplementation(
`CrudController for table ${this.recordType.tableName} could not find repository for '${this.dbDecoration}().${this.repoName}'`
);
return repo;
}
/**
* Creates a new record
*/
public create = async (
request: Hapi.Request,
toolkit: Hapi.ResponseToolkit
): Promise<any> => {
try {
// would love to know how to get rid of this double cast hack
const payload: TSavedR = <TSavedR>(<any>request.payload);
const data: TSavedR = await this.repo(request).insert(payload);
return toolkit.response(
createResponse(request, {
value: data,
})
);
} catch (error: any) {
return toolkit.response(
createResponse(request, {
boom: Boom.badImplementation(error),
})
);
}
};
/**
* Updates a record by ID. This method can accept partial updates.
*/
public updateById = async (
request: Hapi.Request,
toolkit: Hapi.ResponseToolkit
): Promise<any> => {
try {
const payload: Partial<TSavedR> = <any>request.payload;
const id: IdKeyT = request.params[this.paramsIdField];
const updatedRow: TSavedR = await this.repo(request).updateById(
id,
payload
);
if (!updatedRow) {
return toolkit.response(
createResponse(request, {
boom: Boom.notFound(),
})
);
}
return toolkit.response(
createResponse(request, {
value: updatedRow,
})
);
} catch (error: any) {
return toolkit.response(
createResponse(request, {
boom: Boom.badImplementation(error),
})
);
}
};
/**
* Return a record given its id.
*/
public getById = async (
request: Hapi.Request,
toolkit: Hapi.ResponseToolkit
): Promise<any> => {
try {
const id: IdKeyT = request.params[this.paramsIdField];
// @ts-expect-error
const row: TSavedR = await this.repo(request).findById(id);
if (!row) {
return toolkit.response(
createResponse(request, {
boom: Boom.notFound(),
})
);
}
return toolkit.response(
createResponse(request, {
value: row,
})
);
} catch (error: any) {
return toolkit.response(
createResponse(request, {
boom: Boom.badImplementation(error),
})
);
}
};
/**
* Return all records.
*/
public getAll = async (
request: Hapi.Request,
toolkit: Hapi.ResponseToolkit
): Promise<any> => {
try {
const rows: TSavedR[] = await this.repo(request).findAll();
return toolkit.response(
createResponse(request, {
value: rows,
})
);
} catch (error: any) {
return toolkit.response(
createResponse(request, {
boom: Boom.badImplementation(error),
})
);
}
};
/**
* Delete a record given its id.
*/
public deleteById = async (
request: Hapi.Request,
toolkit: Hapi.ResponseToolkit
): Promise<any> => {
try {
const id: IdKeyT = request.params[this.paramsIdField];
const count = await this.repo(request).removeById(id);
if (count === 0) {
return createResponse(request, { boom: Boom.notFound() });
}
return toolkit.response(
createResponse(request, {
value: { id },
})
);
} catch (error: any) {
return toolkit.response(
createResponse(request, {
boom: Boom.badImplementation(error),
})
);
}
};
}
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
export function unboundCrudController<TRecordInfo extends PgRecordInfo>(
aRecordType: TRecordInfo
) {
return class CrudController extends AbstractCrudController<
UnsavedR<TRecordInfo>,
SavedR<TRecordInfo>,
KeyType<TRecordInfo>
> {
public readonly repoName: string;
public readonly paramsIdField;
public readonly dbDecoration;
public readonly recordType = aRecordType;
constructor(repoName: string, paramsIdField = "id", dbDecoration = "db") {
super();
this.repoName = repoName;
this.paramsIdField = paramsIdField;
this.dbDecoration = dbDecoration;
}
};
}
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
export function CrudControllerBase<Rec extends PgRecordInfo>(recordType: Rec) {
return unboundCrudController<Rec>(recordType);
}
export const crudRoutesFor = (
name: string,
path: string,
controller: AbstractCrudController<any, any, any>,
idParam: string,
validate: Record<string, Hapi.RouteOptionsValidate>
): Hapi.ServerRoute[] => [
{
method: "POST",
path: `${path}`,
options: {
handler: controller.create,
validate: validate.create,
description: `Method that creates a new ${name}.`,
tags: ["api", name],
},
},
{
method: "PUT",
path: `${path}/{${idParam}}`,
options: {
handler: controller.updateById,
validate: validate.updateById,
description: `Method that updates a ${name} by its id.`,
tags: ["api", name],
},
},
{
method: "GET",
path: `${path}/{${idParam}}`,
options: {
handler: controller.getById,
validate: validate.getById,
description: `Method that gets a ${name} by its id.`,
tags: ["api", name],
},
},
{
method: "GET",
path: `${path}`,
options: {
handler: controller.getAll,
description: `Method that gets all ${name}s.`,
tags: ["api", name],
},
},
{
method: "DELETE",
path: `${path}/{${idParam}}`,
options: {
handler: controller.deleteById,
validate: validate.deleteById,
description: `Method that deletes a ${name} by its id.`,
tags: ["api", name],
},
},
];

View file

@ -0,0 +1,185 @@
/* eslint-disable unicorn/no-null,max-params */
import { createHash, randomBytes } from "crypto";
import type { AdapterInstance } from "next-auth/adapters";
import omit from "lodash/omit";
import type { IMetamigoRepositories } from "../records";
import type { UnsavedAccount, SavedAccount } from "../records/account";
import type { UserId, UnsavedUser, SavedUser } from "../records/user";
import type { UnsavedSession, SavedSession } from "../records/session";
// Sessions expire after 30 days of being idle
export const defaultSessionMaxAge = 30 * 24 * 60 * 60 * 1000;
// Sessions updated only if session is greater than this value (0 = always)
export const defaulteSessionUpdateAge = 24 * 60 * 60 * 1000;
const getCompoundId = (providerId: any, providerAccountId: any) =>
createHash("sha256")
.update(`${providerId}:${providerAccountId}`)
.digest("hex");
const randomToken = () => randomBytes(32).toString("hex");
export class NextAuthAdapter<TRepositories extends IMetamigoRepositories>
implements AdapterInstance<SavedUser, UnsavedUser, SavedSession>
{
constructor(
private repos: TRepositories,
private readonly sessionMaxAge = defaultSessionMaxAge,
private readonly sessionUpdateAge = defaulteSessionUpdateAge
) { }
async createUser(profile: UnsavedUser): Promise<SavedUser> {
// @ts-expect-error
return this.repos.users.upsert(omit(profile, ["isActive", "id"]));
}
async getUser(id: UserId): Promise<SavedUser | null> {
const user = await this.repos.users.findById({ id });
if (!user) return null;
// if a user has no linked accounts, then we do not return it
// see: https://github.com/nextauthjs/next-auth/issues/876
const accounts = await this.repos.accounts.findAllBy({
userId: user.id,
});
if (!accounts || accounts.length === 0) return null;
return user;
}
async getUserByEmail(email: string): Promise<SavedUser | null> {
const user = await this.repos.users.findBy({ email });
if (!user) return null;
// if a user has no linked accounts, then we do not return it
// see: https://github.com/nextauthjs/next-auth/issues/876
const accounts = await this.repos.accounts.findAllBy({
userId: user.id,
});
if (!accounts || accounts.length === 0) return null;
return user;
}
async getUserByProviderAccountId(
providerId: string,
providerAccountId: string
): Promise<SavedUser | null> {
const account = await this.repos.accounts.findBy({
compoundId: getCompoundId(providerId, providerAccountId),
});
if (!account) return null;
return this.repos.users.findById({ id: account.userId });
}
async updateUser(user: SavedUser): Promise<SavedUser> {
return this.repos.users.update(user);
}
// @ts-expect-error
async linkAccount(
userId: string,
providerId: string,
providerType: string,
providerAccountId: string,
refreshToken: string,
accessToken: string,
accessTokenExpires: number
): Promise<void> {
const exists = await this.repos.users.existsById({ id: userId });
if (!exists) return;
const account: UnsavedAccount = {
accessToken,
refreshToken,
compoundId: getCompoundId(providerId, providerAccountId),
providerAccountId,
providerId,
providerType,
accessTokenExpires: accessTokenExpires
? new Date(accessTokenExpires)
: new Date(),
userId,
};
await this.repos.accounts.insert(account);
}
async unlinkAccount(
userId: string,
providerId: string,
providerAccountId: string
): Promise<void> {
await this.repos.accounts.removeBy({
userId,
compoundId: getCompoundId(providerId, providerAccountId),
});
}
createSession(user: SavedUser): Promise<SavedSession> {
let expires;
if (this.sessionMaxAge) {
const dateExpires = new Date(Date.now() + this.sessionMaxAge);
expires = dateExpires.toISOString();
}
const session: UnsavedSession = {
// @ts-expect-error
expires,
userId: user.id,
sessionToken: randomToken(),
accessToken: randomToken(),
};
return this.repos.sessions.insert(session);
}
async getSession(sessionToken: string): Promise<SavedSession | null> {
const session = await this.repos.sessions.findBy({ sessionToken });
if (session && session.expires && new Date() > session.expires) {
this.repos.sessions.remove(session);
return null;
}
return session;
}
async updateSession(
session: SavedSession,
force?: boolean
): Promise<SavedSession | null> {
if (
this.sessionMaxAge &&
(this.sessionUpdateAge || this.sessionUpdateAge === 0) &&
session.expires
) {
// Calculate last updated date, to throttle write updates to database
// Formula: ({expiry date} - sessionMaxAge) + sessionUpdateAge
// e.g. ({expiry date} - 30 days) + 1 hour
//
// Default for sessionMaxAge is 30 days.
// Default for sessionUpdateAge is 1 hour.
const dateSessionIsDueToBeUpdated = new Date(
session.expires.getTime() - this.sessionMaxAge + this.sessionUpdateAge
);
// Trigger update of session expiry date and write to database, only
// if the session was last updated more than {sessionUpdateAge} ago
if (new Date() > dateSessionIsDueToBeUpdated) {
const newExpiryDate = new Date();
newExpiryDate.setTime(newExpiryDate.getTime() + this.sessionMaxAge);
session.expires = newExpiryDate;
} else if (!force) {
return null;
}
} else if (!force) {
// If session MaxAge, session UpdateAge or session.expires are
// missing then don't even try to save changes, unless force is set.
return null;
}
const { expires } = session;
return this.repos.sessions.update({ ...session, expires });
}
async deleteSession(sessionToken: string): Promise<void> {
await this.repos.sessions.removeBy({ sessionToken });
}
}

View file

@ -0,0 +1,8 @@
import * as PGP from "pg-promise";
import * as PGPTS from "pg-promise/typescript/pg-subset";
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export type IDatabase = PGP.IDatabase<any>;
export type IMain = PGP.IMain;
export type IResult = PGPTS.IResult;
export type IInitOptions = PGP.IInitOptions;

View file

@ -0,0 +1,123 @@
import * as Hapi from "@hapi/hapi";
import * as http from "http";
import type { HttpTerminator } from "http-terminator";
import * as Glue from "@hapi/glue";
import * as Promster from "@promster/hapi";
import figlet from "figlet";
import PinoPlugin from "hapi-pino";
import { createServer as createPrometheusServer } from "@promster/server";
import { createHttpTerminator } from "http-terminator";
import { getPrettyPrint } from "./logger";
import RequestIdPlugin from "./plugins/request-id";
import StatusPlugin from "./plugins/status";
import ConfigPlugin from "./plugins/config";
import { IMetamigoConfig } from "./config";
export interface Server {
hapiServer: Hapi.Server;
promServer?: http.Server;
promTerminator?: HttpTerminator;
}
export const deployment = async <T extends IMetamigoConfig>(
manifest: Glue.Manifest,
config: T,
start = false
): Promise<Server> => {
const hapiServer: Hapi.Server = await Glue.compose(manifest);
await hapiServer.initialize();
if (!start) return { hapiServer };
await announce(config);
await hapiServer.start();
const { port, address } = config.metrics;
const promServer = await createPrometheusServer({
port,
hostname: address,
});
const promTerminator = createHttpTerminator({
server: promServer,
});
console.log(`
🚀 Server listening on http://${hapiServer.info.address}:${hapiServer.info.port}
Metrics listening on http://${address}:${port}
`);
return {
hapiServer,
promServer,
promTerminator,
};
};
export const stopDeployment = async (server: Server): Promise<void> => {
await server.hapiServer.stop();
if (server.promTerminator) await server.promTerminator.terminate();
};
export const defaultPlugins = <T extends IMetamigoConfig>(
config: T
): string[] | Glue.PluginObject[] | Array<string | Glue.PluginObject> => {
const {
logRequestStart,
logRequestComplete,
logRequestPayload,
logRequestQueryParams,
level,
redact,
ignorePaths,
ignoreTags,
requestIdHeader,
} = config.logging;
const plugins = [
{ plugin: ConfigPlugin, options: { config } },
{
plugin: PinoPlugin,
options: {
prettyPrint: getPrettyPrint(config),
level,
logRequestStart,
logRequestComplete,
logPayload: logRequestPayload,
logQueryParams: logRequestQueryParams,
redact: {
paths: redact,
remove: true,
},
ignorePaths,
ignoreTags,
},
},
{
plugin: RequestIdPlugin,
options: {
header: requestIdHeader,
},
},
{ plugin: StatusPlugin },
{ plugin: Promster.createPlugin() },
];
// @ts-ignore
return plugins;
};
export const announce = async <T extends IMetamigoConfig>(
config: T
): Promise<void> =>
new Promise((resolve, reject) => {
// @ts-expect-error
figlet.text(
config.meta.name,
{ font: config.meta.figletFont },
(err, text) => {
if (err) reject(err);
console.log(`${text}`);
resolve();
}
);
});

View file

@ -0,0 +1,44 @@
/**
* Used by Flavor to mark a type in a readable way.
*/
export interface Flavoring<FlavorT> {
_type?: FlavorT;
}
/**
*
* Create a "flavored" version of a type. TypeScript will disallow mixing
* flavors, but will allow unflavored values of that type to be passed in where
* a flavored version is expected. This is a less restrictive form of branding.
*
*/
export type Flavor<T, FlavorT> = T & Flavoring<FlavorT>;
export type UUID = Flavor<string, "A UUID">;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const deepFreeze = (o: unknown): any => {
Object.freeze(o);
const oIsFunction = typeof o === "function";
const hasOwnProp = Object.prototype.hasOwnProperty;
Object.getOwnPropertyNames(o).forEach((prop) => {
if (
hasOwnProp.call(o, prop) &&
(oIsFunction
? prop !== "caller" && prop !== "callee" && prop !== "arguments"
: true) &&
// @ts-expect-error
o[prop] !== null &&
// @ts-expect-error
(typeof o[prop] === "object" || typeof o[prop] === "function") &&
// @ts-expect-error
!Object.isFrozen(o[prop])
) {
// @ts-expect-error
deepFreeze(o[prop]);
}
});
return o;
};

View file

@ -0,0 +1,59 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import * as Boom from "@hapi/boom";
import * as Hapi from "@hapi/hapi";
interface IResponseMeta {
operation?: string;
method?: string;
paging?: string | null;
}
interface IResponseError {
code?: string | number;
message?: string;
error?: string;
}
interface IResponse<T> {
meta: IResponseMeta;
data: T[];
errors: IResponseError[];
}
interface IResponseOptions<T> {
value?: T | null | undefined;
boom?: Boom.Boom<any> | null | undefined;
}
export function createResponse<T>(
request: Hapi.Request,
{ value = undefined, boom = undefined }: IResponseOptions<T>
): IResponse<T> {
const errors: IResponseError[] = [];
const data: any = [];
if (boom) {
errors.push({
code: boom.output.payload.statusCode,
error: boom.output.payload.error,
message: boom.output.payload.message,
});
}
if (value && data) {
if (Array.isArray(value)) {
data.push(...value);
} else {
data.push(value);
}
}
return {
meta: {
method: request.method.toUpperCase(),
operation: request.url.pathname,
},
data,
errors,
};
}

View file

@ -0,0 +1,62 @@
import process from "process";
import * as Hapi from "@hapi/hapi";
import * as Joi from "joi";
import Hoek from "@hapi/hoek";
import * as Boom from "@hapi/boom";
export interface HapiValidationError extends Joi.ValidationError {
output: {
statusCode: number;
headers: Hapi.Utils.Dictionary<string | string[]>;
payload: {
statusCode: number;
error: string;
message?: string;
validation: {
source: string;
keys: string[];
};
};
};
}
export function defaultValidationErrorHandler(
request: Hapi.Request,
h: Hapi.ResponseToolkit,
err?: Error
): Hapi.Lifecycle.ReturnValue {
// Newer versions of Joi don't format the key for missing params the same way. This shim
// provides backwards compatibility. Unfortunately, Joi doesn't export it's own Error class
// in JS so we have to rely on the `name` key before we can cast it.
//
// The Hapi code we're 'overwriting' can be found here:
// https://github.com/hapijs/hapi/blob/master/lib/validation.js#L102
if (err && err.name === "ValidationError" && err.hasOwnProperty("output")) {
const validationError: HapiValidationError = err as HapiValidationError;
const validationKeys: string[] = [];
validationError.details.forEach((detail) => {
if (detail.path.length > 0) {
validationKeys.push(Hoek.escapeHtml(detail.path.join(".")));
} else {
// If no path, use the value sigil to signal the entire value had an issue.
validationKeys.push("value");
}
});
validationError.output.payload.validation.keys = validationKeys;
}
throw err;
}
export const validatingFailAction = async (
request: Hapi.Request,
h: Hapi.ResponseToolkit,
err: Error
): Promise<void> => {
if (process.env.NODE_ENV === "production") {
throw Boom.badRequest("Invalid request payload input");
} else {
defaultValidationErrorHandler(request, h, err);
}
};

View file

@ -0,0 +1,23 @@
export * from "./config";
export * from "./controllers/crud-controller";
export * from "./controllers/nextauth-adapter";
export * from "./hapi";
export * from "./helpers";
export * from "./helpers/response";
export * from "./helpers/validation-error";
export * from "./logger";
export * from "./records";
import * as pino from "pino";
declare module "@hapi/hapi" {
interface Server {
// @ts-ignore
logger: pino.Logger;
}
interface Request {
// @ts-ignore
logger: pino.Logger;
}
}

View file

@ -0,0 +1,22 @@
import pino, { LoggerOptions } from "pino";
import { IMetamigoConfig } from "./config";
export const getPrettyPrint = <T extends IMetamigoConfig>(config: T): boolean => {
const { prettyPrint } = config.logging;
if (prettyPrint === "auto") return config?.isDev || false;
return prettyPrint === true;
};
export const configureLogger = <T extends IMetamigoConfig>(
config: T
): pino.Logger => {
const { level, redact } = config.logging;
const options: LoggerOptions = {
level,
redact: {
paths: redact,
remove: true,
},
};
return pino(options);
};

View file

@ -0,0 +1,65 @@
{
"name": "common",
"version": "0.2.0",
"description": "",
"main": "build/main/index.js",
"types": "build/main/index.d.ts",
"author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later",
"private": false,
"scripts": {
"build": "tsc -p tsconfig.json",
"fix:lint": "eslint src --ext .ts --fix",
"fix:prettier": "prettier \"src/**/*.ts\" --write",
"test": "yarn test:jest && yarn test:lint && yarn test:prettier",
"test:lint": "eslint src --ext .ts",
"test:prettier": "prettier \"src/**/*.ts\" --list-different",
"test:jest": "jest --coverage --forceExit --detectOpenHandles --reporters=default --reporters=jest-junit",
"doc": "yarn run doc:html",
"doc:html": "typedoc src/ --exclude '**/*.test.ts' --exclude '**/*.spec.ts' --name $npm_package_name --readme README.md --target es2019 --mode file --out build/docs",
"watch:build": "tsc -p tsconfig.json -w"
},
"devDependencies": {
"@types/figlet": "^1.5.5",
"@types/lodash": "^4.14.191",
"@types/node": "*",
"@types/uuid": "^9.0.0",
"camelcase-keys": "^8.0.2",
"pg-monitor": "^2.0.0",
"typedoc": "^0.23.24"
},
"dependencies": {
"@digiresilience/hapi-nextauth": "0.2.1",
"@hapi/boom": "^10.0.0",
"@hapi/glue": "^9.0.0",
"@hapi/hapi": "^21.2.0",
"@hapi/hoek": "^11.0.2",
"@hapi/inert": "^7.0.0",
"@hapi/vision": "^7.0.0",
"@hapipal/schmervice": "^2.1.0",
"@promster/hapi": "^8.0.6",
"@promster/server": "^7.0.8",
"@promster/types": "^3.2.5",
"@types/convict": "^6.1.1",
"@types/hapi__glue": "^6.1.6",
"@types/hapi__hapi": "^20.0.13",
"@types/hapi__inert": "^5.2.4",
"@types/hapi__vision": "^5.5.4",
"@types/hapi-pino": "^9.1.2",
"@types/hapipal__schmervice": "^2.0.3",
"@types/joi": "^17.2.3",
"chalk": "^5.2.0",
"commander": "^10.0.0",
"convict": "^6.2.4",
"decamelcase-keys": "^1.1.1",
"figlet": "^1.5.2",
"hapi-pino": "^11.0.1",
"http-terminator": "^3.2.0",
"joi": "^17.7.0",
"lodash": "^4.17.21",
"pg-promise": "^11.0.2",
"pino": "^8.8.0",
"prom-client": "^14.x.x",
"uuid": "^9.0.0"
}
}

View file

@ -0,0 +1,23 @@
import { Server } from "@hapi/hapi";
import cloneDeep from "lodash/cloneDeep";
import { deepFreeze } from "../helpers";
interface ConfigOptions {
config: unknown;
}
const register = async (
server: Server,
options: ConfigOptions
): Promise<void> => {
const safeConfig = deepFreeze(cloneDeep(options.config));
server.decorate("server", "config", () => safeConfig);
};
const ConfigPlugin = {
register,
name: "config",
version: "0.0.1",
};
export default ConfigPlugin;

View file

@ -0,0 +1,37 @@
import { Server } from "@hapi/hapi";
import { v4 as uuid } from "uuid";
interface RequestIdOptions {
header?: string;
}
const register = async (
server: Server,
options?: RequestIdOptions
): Promise<void> => {
const header = options?.header || "x-request-id";
server.ext("onPreResponse", async (request, h) => {
if (!request.response) {
return h.continue;
}
if ("isBoom" in request.response) {
const id = request.response.output.headers[header] || uuid();
request.response.output.headers[header] = id;
} else {
const id = request.headers[header] || uuid();
// @ts-ignore
request.response.header(header, id);
}
return h.continue;
});
};
const RequestIdPlugin = {
register,
name: "request-id",
version: "0.0.1",
};
export default RequestIdPlugin;

View file

@ -0,0 +1,60 @@
import { Server, RouteOptionsAccess } from "@hapi/hapi";
import { Prometheus } from "@promster/hapi";
interface StatusOptions {
path?: string;
auth?: RouteOptionsAccess;
}
const count = (statusCounter: any) => async () => {
statusCounter.inc();
return "Incremented metamigo_status_test counter";
};
const ping = async () => "OK";
const statusRoutes = (server: Server, opt?: StatusOptions) => {
const path = opt?.path || "/status";
const statusCounter = new Prometheus.Counter({
name: "metamigo_status_test",
help: "Test counter",
});
return [
{
method: "GET",
path: `${path}/ping`,
handler: ping,
options: {
auth: opt?.auth,
tags: ["api", "status", "ping"],
description: "Returns 200 and OK as the response.",
},
},
{
method: "GET",
path: `${path}/inc`,
handler: count(statusCounter),
options: {
auth: opt?.auth,
tags: ["api", "status", "prometheus"],
description: "Increments a test counter, for testing prometheus.",
},
},
];
};
const register = async (
server: Server,
options: StatusOptions
): Promise<void> => {
server.route(statusRoutes(server, options));
};
const StatusPlugin = {
register,
name: "status",
version: "0.0.1",
};
export default StatusPlugin;

View file

@ -0,0 +1,30 @@
import { recordInfo } from "./record-info";
import { RepositoryBase } from "./base";
import { Flavor, UUID } from "../helpers";
import { UserId } from "./user";
export type AccountId = Flavor<UUID, "Account Id">;
export interface UnsavedAccount {
compoundId: string;
userId: UserId;
providerType: string;
providerId: string;
providerAccountId: string;
refreshToken: string;
accessToken: string;
accessTokenExpires: Date;
}
export interface SavedAccount extends UnsavedAccount {
id: AccountId;
createdAt: Date;
updatedAt: Date;
}
export const AccountRecord = recordInfo<UnsavedAccount, SavedAccount>(
"app_public",
"accounts"
);
export class AccountRecordRepository extends RepositoryBase(AccountRecord) {}

View file

@ -0,0 +1,57 @@
import { TableName } from "pg-promise";
import { IMain } from "../db/types";
import { CrudRepository } from "./crud-repository";
import { PgRecordInfo, UnsavedR, SavedR, KeyType } from "./record-info";
import type { IDatabase } from "pg-promise";
export type PgProtocol<T> = IDatabase<T> & T;
/**
* This function returns a constructor for a repository class for [[TRecordInfo]]
*
* @param aRecordType the record type runtime definition
*/
// haven't figured out a good return type for this function
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
export function unboundRepositoryBase<
TRecordInfo extends PgRecordInfo,
TDatabaseExtension
>(aRecordType: TRecordInfo) {
return class Repository extends CrudRepository<
UnsavedR<TRecordInfo>,
SavedR<TRecordInfo>,
KeyType<TRecordInfo>
> {
_recordType!: TRecordInfo;
static readonly recordType = aRecordType;
static readonly schemaName = aRecordType.schemaName;
static readonly tableName = aRecordType.tableName;
public readonly recordType = aRecordType;
public readonly schemaTable: TableName;
public db: PgProtocol<TDatabaseExtension>;
public pgp: IMain;
constructor(db: PgProtocol<TDatabaseExtension>) {
super();
this.pgp = db.$config.pgp;
this.schemaTable = new this.pgp.helpers.TableName({
schema: aRecordType.schemaName,
table: aRecordType.tableName,
});
this.db = db;
if (!this.db) {
throw new Error("Missing database in repository");
}
}
};
}
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
export function RepositoryBase<
Rec extends PgRecordInfo,
TDatabaseExtension = unknown
>(recordType: Rec) {
return unboundRepositoryBase<Rec, TDatabaseExtension>(recordType);
}

View file

@ -0,0 +1,321 @@
/* eslint-disable @typescript-eslint/ban-types,@typescript-eslint/no-explicit-any */
import { TableName } from "pg-promise";
import decamelcaseKeys from "decamelcase-keys";
import isObject from "lodash/isObject";
import isArray from "lodash/isArray";
import zipObject from "lodash/zipObject";
import isEmpty from "lodash/isEmpty";
import omit from "lodash/omit";
import { IDatabase, IMain, IResult } from "../db/types";
import { PgRecordInfo, idKeysOf } from "./record-info";
export interface ICrudRepository<
TUnsavedR,
TSavedR extends TUnsavedR & IdKeyT,
IdKeyT extends object
> {
findById(id: IdKeyT): Promise<TSavedR | null>;
findBy(example: Partial<TSavedR>): Promise<TSavedR | null>;
findAll(): Promise<TSavedR[]>;
findAllBy(example: Partial<TSavedR>): Promise<TSavedR[]>;
existsById(id: IdKeyT): Promise<boolean>;
countBy(example: Partial<TSavedR>): Promise<number>;
count(): Promise<number>;
insert(record: TUnsavedR): Promise<TSavedR>;
insertAll(toInsert: TUnsavedR[]): Promise<TSavedR[]>;
updateById(id: IdKeyT, attrs: Partial<TSavedR>): Promise<TSavedR>;
update(record: TSavedR): Promise<TSavedR>;
updateAll(toUpdate: TSavedR[]): Promise<TSavedR[]>;
remove(record: TSavedR): Promise<number>;
removeAll(toRemove: TSavedR[]): Promise<number>;
removeBy(example: Partial<TSavedR>): Promise<TSavedR | null>;
removeById(id: IdKeyT): Promise<number>;
}
// The snake cased object going into the db
type DatabaseRow = Record<string, unknown>;
/**
* Base class for generic CRUD operations on a repository for a specific type.
*
* Several assumptions are made about your environment for this generic CRUD repository to work:
*
* - the underlying column names are snake_cased (this behavior can be changed, see [[columnize]])
* - the rows have only a single primary key (composite keys are not supported)
*
* @typeParam ID The type of the id column
* @typeParam T The type of the record
*/
export abstract class CrudRepository<
TUnsavedR,
TSavedR extends TUnsavedR & IdKeyT,
IdKeyT extends object
> implements ICrudRepository<TUnsavedR, TSavedR, IdKeyT>
{
/**
* the fully qualified table name
*/
abstract schemaTable: TableName;
abstract recordType: PgRecordInfo<TUnsavedR, TSavedR, IdKeyT>;
abstract db: IDatabase;
abstract pgp: IMain;
/**
* Converts the record's columns into snake_case
*
* @param record the record of type T to convert
*/
columnize(record: TSavedR | Partial<TSavedR>): DatabaseRow {
return decamelcaseKeys(record);
}
/*
* Creates a simple where clause with each key-value in `example` is
* formatted as KEY=VALUE and all kv-pairs are ANDed together.
*
* @param example key value pair of column names and values
*/
where(example: Partial<TSavedR>): string {
const snaked = this.columnize(example);
const clauses = Object.keys(snaked).reduce((acc, cur) => {
const colName = this.pgp.as.format("$1:name", cur);
return `${acc} and ${colName} = $<${cur}>`;
}, "");
const where = this.pgp.as.format(`WHERE 1=1 ${clauses}`, { ...snaked }); // Pre-format WHERE condition
return where;
}
/**
* Converts a value containing the id of the record (which could be a primitive type, a composite object, or an array of values)
* into an object which can be safely passed to [[where]].
*/
idsObj(idValues: IdKeyT): IdKeyT {
if (isEmpty(idValues)) {
throw new Error(`idsObj(${this.schemaTable}): passed empty id(s)`);
}
let ids = {};
const idKeys = idKeysOf(this.recordType as any);
if (isArray(idValues)) {
ids = zipObject(idKeys, idValues);
} else if (isObject(idValues)) {
ids = idValues;
} else {
if (idKeys.length !== 1) {
throw new Error(
`idsObj(${this.schemaTable}): passed record has multiple primary keys. the ids must be passed as an object or array. ${idValues}`
);
}
// @ts-ignore
ids[idKeys[0]] = idValues;
}
// this is a sanity check so we don't do something like
// deleting all the data if a WHERE slips in with no ids
if (isEmpty(ids)) {
throw new Error(`idsObj(${this.schemaTable}): passed empty ids`);
}
return ids as IdKeyT;
}
/**
* Returns all rows in the table
*/
async findAll(): Promise<TSavedR[]> {
return this.db.any("SELECT * FROM $1", [this.schemaTable]);
}
/**
* Returns the number of rows in the table
*/
async count(): Promise<number> {
return this.db.one(
"SELECT count(*) FROM $1",
[this.schemaTable],
(a: { count: string }) => Number(a.count)
);
}
/**
* Returns the number of rows in the table matching the example
*/
async countBy(example: Partial<TSavedR>): Promise<number> {
return this.db.one(
"SELECT count(*) FROM $1 $2:raw ",
[this.schemaTable, this.where(example)],
(a: { count: string }) => Number(a.count)
);
}
/**
* Find a single row where the example are true.
* @param example key-value pairs of column names and values
*/
async findBy(example: Partial<TSavedR>): Promise<TSavedR | null> {
return this.db.oneOrNone("SELECT * FROM $1 $2:raw LIMIT 1", [
this.schemaTable,
this.where(example),
]);
}
/**
* Retrieves a row by ID
* @param id
*/
async findById(id: IdKeyT): Promise<TSavedR | null> {
const where = this.idsObj(id);
return this.db.oneOrNone("SELECT * FROM $1 $2:raw", [
this.schemaTable,
this.where(where),
]);
}
/**
* Returns whether a given row with id exists
* @param id
*/
async existsById(id: IdKeyT): Promise<boolean> {
return this.db.one(
"SELECT EXISTS(SELECT 1 FROM $1 $2:raw)",
[this.schemaTable, this.where(this.idsObj(id))],
(a: { exists: boolean }) => a.exists
);
}
/**
* Find all rows where the example are true.
* @param example key-value pairs of column names and values
*/
async findAllBy(example: Partial<TSavedR>): Promise<TSavedR[]> {
return this.db.any("SELECT * FROM $1 $2:raw", [
this.schemaTable,
this.where(example),
]);
}
/**
* Creates a new row
* @param record
* @return the new row
*/
async insert(record: TUnsavedR): Promise<TSavedR> {
return this.db.one("INSERT INTO $1 ($2:name) VALUES ($2:csv) RETURNING *", [
this.schemaTable,
this.columnize(record as any),
]);
}
/**
* Like `insert` but will insert/update a batch of rows at once
*/
async insertAll(toInsert: TUnsavedR[]): Promise<TSavedR[]> {
return this.db.tx((t) => {
const insertCommands: any[] = [];
toInsert.forEach((record) => {
insertCommands.push(this.insert(record));
});
return t.batch(insertCommands);
});
}
/**
* Deletes a row by id
* @param id
* @return the number of rows affected
*/
async removeById(id: IdKeyT): Promise<number> {
return this.db.result(
"DELETE FROM $1 $2:raw",
[this.schemaTable, this.where(this.idsObj(id))],
(r: IResult) => r.rowCount
);
}
/**
* Delete records matching the query
* @param example key-value pairs of column names and values
*/
async removeBy(example: Partial<TSavedR>): Promise<TSavedR | null> {
if (isEmpty(example))
throw new Error(
`removeBy(${this.schemaTable}): passed empty constraint!`
);
return this.db.result("DELETE FROM $1 $2:raw", [
this.schemaTable,
this.where(example),
]);
}
/**
* Deletes the given row
*
* @param record to remove
* @return the number of rows affected
*/
async remove(record: TSavedR): Promise<number> {
return this.removeById(this.recordType.idOf(record));
}
/**
* Deletes all rows
* @param toRemove a list of rows to remove, if empty, DELETES ALL ROWS
* @return the number of rows affected
*/
async removeAll(toRemove: TSavedR[] = []): Promise<number> {
if (toRemove.length === 0) {
return this.db.result(
"DELETE FROM $1 WHERE 1=1;",
[this.schemaTable],
(r: IResult) => r.rowCount
);
}
const results = await this.db.tx((t) => {
const delCommands: any[] = [];
toRemove.forEach((record) => {
delCommands.push(this.remove(record));
});
return t.batch(delCommands);
});
return results.length;
}
/**
* Updates an existing row
* @param id
* @param attrs
* @return the updated row
*/
async updateById(id: IdKeyT, attrs: Partial<TSavedR>): Promise<TSavedR> {
const idKeys = idKeysOf(this.recordType as any);
const attrsSafe = omit(attrs, idKeys);
return this.db.one(
"UPDATE $1 SET ($2:name) = ROW($2:csv) $3:raw RETURNING *",
[this.schemaTable, this.columnize(attrsSafe), this.where(this.idsObj(id))]
);
}
async update(record: TSavedR): Promise<TSavedR> {
return this.updateById(this.recordType.idOf(record), record);
}
/**
* Update a batch of records at once
*/
async updateAll(toUpdate: TSavedR[]): Promise<TSavedR[]> {
return this.db.tx((t) => {
const updateCommands: any[] = [];
toUpdate.forEach((record) => {
updateCommands.push(this.update(record));
});
return t.batch(updateCommands);
});
}
}

View file

@ -0,0 +1,16 @@
export * from "./base";
export * from "./record-info";
export * from "./crud-repository";
export * from "./user";
export * from "./session";
export * from "./account";
import type { AccountRecordRepository } from "./account";
import type { UserRecordRepository } from "./user";
import type { SessionRecordRepository } from "./session";
export interface IMetamigoRepositories {
users: UserRecordRepository;
sessions: SessionRecordRepository;
accounts: AccountRecordRepository;
}

View file

@ -0,0 +1,54 @@
export interface EntityType<TUnsaved = any, TSaved = any, TIds extends object = any> {
_saved: TSaved;
_unsaved: TUnsaved;
_idKeys: TIds;
idOf: (rec: TSaved) => TIds;
}
export declare type UnsavedR<T extends {
_unsaved: any;
}> = T["_unsaved"];
export declare type SavedR<T extends {
_saved: any;
}> = T["_saved"];
export declare type KeyType<R extends EntityType> = R["_idKeys"];
export interface PgRecordInfo<Unsaved = any, Saved extends Unsaved & IdType = any, IdType extends object = any> extends EntityType<Unsaved, Saved, IdType> {
tableName: string;
schemaName: string;
idKeys: (keyof Saved)[];
}
/**
* Extract the runtime key name from a recordInfo
*/
export declare function idKeysOf<RI extends PgRecordInfo>(recordInfoWithIdKey: RI): string[];
/**
* Turns a record type with possibly more fields than "id" into an array
*/
export declare function collectIdValues<RecordT extends PgRecordInfo>(idObj: KeyType<RecordT>, knexRecordType: RecordT): string[];
/**
*
* Creates a record descriptor that captures the table name, primary key name,
* unsaved type, and saved type of a database record type. Assumes "id" as the
* primary key name
*
*/
export declare function recordInfo<Unsaved, Saved extends Unsaved & {
id: any;
}>(schemaName: string, tableName: string): PgRecordInfo<Unsaved, Saved, Pick<Saved, "id">>;
export declare function recordInfo<Type extends {
id: string;
}>(schemaName: string, tableName: string): PgRecordInfo<Type, Type, Pick<Type, "id">>;
/**
*
* Creates a record descriptor that captures the table name, primary key name,
* unsaved type, and saved type of a database record type.
*
*/
export declare function recordInfo<Unsaved, Saved extends Unsaved, Id extends keyof Saved>(schemaName: string, tableName: string, idKey: Id[]): PgRecordInfo<Unsaved, Saved, Pick<Saved, Id>>;
/**
*
* Creates a record descriptor for records with composite primary keys
*
*/
export declare function compositeRecordType<TUnsaved, TSaved extends TUnsaved = TUnsaved>(schemaName: string, tableName: string): {
withCompositeKeys<TKeys extends keyof TSaved>(keys: TKeys[]): PgRecordInfo<TUnsaved, TSaved, Pick<TSaved, TKeys>>;
};

View file

@ -0,0 +1,133 @@
/* eslint-disable @typescript-eslint/ban-types,@typescript-eslint/no-explicit-any,@typescript-eslint/explicit-module-boundary-types */
import at from "lodash/at";
import pick from "lodash/pick";
export interface EntityType<
TUnsaved = any,
TSaved = any,
TIds extends object = any
> {
_saved: TSaved;
_unsaved: TUnsaved;
_idKeys: TIds;
idOf: (rec: TSaved) => TIds;
}
export type UnsavedR<T extends { _unsaved: any }> = T["_unsaved"];
export type SavedR<T extends { _saved: any }> = T["_saved"];
export type KeyType<R extends EntityType> = R["_idKeys"];
export interface PgRecordInfo<
Unsaved = any,
Saved extends Unsaved & IdType = any,
IdType extends object = any
> extends EntityType<Unsaved, Saved, IdType> {
tableName: string;
schemaName: string;
idKeys: (keyof Saved)[];
}
/**
* Extract the runtime key name from a recordInfo
*/
export function idKeysOf<RI extends PgRecordInfo>(
recordInfoWithIdKey: RI
): string[] {
return recordInfoWithIdKey.idKeys as any;
}
/**
* Turns a record type with possibly more fields than "id" into an array
*/
export function collectIdValues<RecordT extends PgRecordInfo>(
idObj: KeyType<RecordT>,
knexRecordType: RecordT
): string[] {
return at(idObj, idKeysOf(knexRecordType));
}
function castToRecordInfo(
runtimeData: Omit<PgRecordInfo, "_idKeys" | "_saved" | "_unsaved">
): PgRecordInfo {
return runtimeData as PgRecordInfo;
}
/**
*
* Creates a record descriptor that captures the table name, primary key name,
* unsaved type, and saved type of a database record type. Assumes "id" as the
* primary key name
*
*/
export function recordInfo<Unsaved, Saved extends Unsaved & { id: any }>(
schemaName: string,
tableName: string
): PgRecordInfo<Unsaved, Saved, Pick<Saved, "id">>;
export function recordInfo<Type extends { id: string }>(
schemaName: string,
tableName: string
): PgRecordInfo<Type, Type, Pick<Type, "id">>;
/**
*
* Creates a record descriptor that captures the table name, primary key name,
* unsaved type, and saved type of a database record type.
*
*/
export function recordInfo<
Unsaved,
Saved extends Unsaved,
Id extends keyof Saved
>(
schemaName: string,
tableName: string,
idKey: Id[]
): PgRecordInfo<Unsaved, Saved, Pick<Saved, Id>>;
/**
*
* Don't use this signature be sure to provide unsaved and saved types.
*
*/
export function recordInfo(
schemaName: string,
tableName: string,
idKeys?: string[]
) {
idKeys = idKeys || ["id"];
return castToRecordInfo({
schemaName,
tableName,
idKeys,
idOf: (rec) => pick(rec, idKeys as any),
});
}
/**
*
* Creates a record descriptor for records with composite primary keys
*
*/
export function compositeRecordType<
TUnsaved,
TSaved extends TUnsaved = TUnsaved
>(
schemaName: string,
tableName: string
): {
withCompositeKeys<TKeys extends keyof TSaved>(
keys: TKeys[]
): PgRecordInfo<TUnsaved, TSaved, Pick<TSaved, TKeys>>;
} {
return {
withCompositeKeys(keys) {
return castToRecordInfo({
schemaName,
tableName,
idKeys: keys,
idOf: (rec) => pick(rec, keys),
});
},
};
}

View file

@ -0,0 +1,26 @@
import { recordInfo } from "./record-info";
import { RepositoryBase } from "./base";
import { Flavor, UUID } from "../helpers";
import { UserId } from "./user";
export type SessionId = Flavor<UUID, "Session Id">;
export interface UnsavedSession {
userId: UserId;
expires: Date;
sessionToken: string;
accessToken: string;
}
export interface SavedSession extends UnsavedSession {
id: SessionId;
createdAt: Date;
updatedAt: Date;
}
export const SessionRecord = recordInfo<UnsavedSession, SavedSession>(
"app_private",
"sessions"
);
export class SessionRecordRepository extends RepositoryBase(SessionRecord) {}

View file

@ -0,0 +1,40 @@
import { recordInfo } from "./record-info";
import { RepositoryBase } from "./base";
import { Flavor, UUID } from "../helpers";
export type UserId = Flavor<UUID, "User Id">;
export interface UnsavedUser {
name: string;
email: string;
emailVerified: Date;
avatar: string;
isActive: boolean;
userRole: string;
}
export interface SavedUser extends UnsavedUser {
id: UserId;
createdAt: Date;
updatedAt: Date;
}
export const UserRecord = recordInfo<UnsavedUser, SavedUser>(
"app_public",
"users"
);
export class UserRecordRepository extends RepositoryBase(UserRecord) {
async upsert(record: UnsavedUser | SavedUser): Promise<SavedUser> {
return this.db.one(
`INSERT INTO $1 ($2:name) VALUES ($2:csv)
ON CONFLICT (email)
DO UPDATE SET
name = EXCLUDED.name,
avatar = EXCLUDED.avatar,
email_verified = EXCLUDED.email_verified
RETURNING *`,
[this.schemaTable, this.columnize(record)]
);
}
}

View file

@ -0,0 +1,13 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"incremental": true,
"outDir": "build/main",
"paths": {
"@hapipal/schmervice": ["vendor/hapipal__schmervice"],
"*": ["node_modules/@types/*", "*"]
}
},
"include": ["**/*.ts"],
"exclude": ["node_modules/**"]
}

View file

@ -0,0 +1,18 @@
require('@digiresilience/eslint-config-metamigo/patch/modern-module-resolution');
module.exports = {
extends: [
"@digiresilience/eslint-config-metamigo/profile/node",
"@digiresilience/eslint-config-metamigo/profile/typescript"
],
parserOptions: { tsconfigRootDir: __dirname },
rules: {
"import/no-extraneous-dependencies": [
// enable this when this is fixed
// https://github.com/benmosher/eslint-plugin-import/pull/1696
"off",
{ packageDir: [".", "node_modules/@digiresilience/metamigo", "node_modules/@digiresilience/metamigo-dev"] },
],
// TODO: enable this after jest fixes this issue https://github.com/nodejs/node/issues/38343
"unicorn/prefer-node-protocol": "off"
}
};

View file

@ -0,0 +1,376 @@
import * as process from "process";
import * as convict from "convict";
import * as Metamigo from "common";
import { defState } from "@digiresilience/montar";
export const configSchema = {
db: {
connection: {
doc: "The postgres connection url.",
format: "uri",
default: "postgresql://metamigo:metamigo@127.0.0.1:5435/metamigo_dev",
env: "DATABASE_URL",
sensitive: true,
},
name: {
doc: "The name of the postgres database",
format: String,
default: "metamigo_dev",
env: "DATABASE_NAME",
},
owner: {
doc: "The username of the postgres database owner",
format: String,
default: "metamigo",
env: "DATABASE_OWNER",
},
},
worker: {
connection: {
doc: "The postgres connection url for the worker database.",
format: "uri",
default: "postgresql://metamigo:metamigo@127.0.0.1:5435/metamigo_dev",
env: "WORKER_DATABASE_URL",
},
concurrency: {
doc: "The number of jobs to run concurrently",
default: 1,
format: "positiveInt",
env: "WORKER_CONCURRENT_JOBS",
},
pollInterval: {
doc: "How long to wait between polling for jobs in milliseconds (for jobs scheduled in the future/retries)",
default: 2000,
format: "positiveInt",
env: "WORKER_POLL_INTERVAL_MS",
},
},
postgraphile: {
auth: {
doc: "The postgres role that postgraphile logs in with",
format: String,
default: "metamigo_graphile_auth",
env: "DATABASE_AUTHENTICATOR",
},
appRootConnection: {
doc: "The postgres root/superuser connection url for development mode so PG can watch the schema changes, this is strangely named in the postgraphile API 'ownerConnectionString'",
format: String,
default: "postgresql://postgres:metamigo@127.0.0.1:5435/metamigo_dev",
env: "APP_ROOT_DATABASE_URL",
},
authConnection: {
doc: "The postgres connection URL for postgraphile, must not be superuser and must have limited privs.",
format: String,
default:
"postgresql://metamigo_graphile_auth:metamigo@127.0.0.1:5435/metamigo_dev",
env: "DATABASE_AUTH_URL",
},
visitor: {
doc: "The postgres role that postgraphile switches to",
format: String,
default: "app_postgraphile",
env: "DATABASE_VISITOR",
},
schema: {
doc: "The schema postgraphile should expose with graphql",
format: String,
default: "app_public",
},
enableGraphiql: {
doc: "Whether to enable the graphiql web interface or not",
format: "Boolean",
default: false,
env: "ENABLE_GRAPHIQL",
},
},
dev: {
shadowConnection: {
doc: "The shadow databse connection url used by postgraphile-migrate. Not needed in production.",
format: "uri",
default: "postgresql://metamigo:metamigo@127.0.0.1:5435/metamigo_shadow",
env: "SHADOW_DATABASE_URL",
sensitive: true,
},
rootConnection: {
doc: "The postgres root/superuser connection url for testing only, database must NOT be the app database. Not needed in production.",
format: "uri",
default: "postgresql://postgres:metamigo@127.0.0.1:5435/template1",
env: "ROOT_DATABASE_URL",
sensitive: true,
},
},
frontend: {
url: {
doc: "The url the frontend can be accessed at",
format: "url",
default: "http://localhost:3000",
env: "FRONTEND_URL",
},
apiUrl: {
doc: "The url the api backend can be accessed at from the frontend server",
format: "url",
default: "http://localhost:3001",
env: "API_URL",
},
},
nextAuth: {
secret: {
doc: "A random string used to hash tokens, sign cookies and generate crytographic keys. Shared with the api backend.",
format: String,
default: undefined,
env: "NEXTAUTH_SECRET",
sensitive: true,
},
audience: {
doc: "We will add this string as the `aud` claim to our JWT token, if empty or not present defaults to `frontend.url`",
format: String,
default: "",
env: "NEXTAUTH_AUDIENCE",
},
signingKeyB64: {
doc: "A base64 encoded JWK.Key used for JWT signing",
format: String,
default: undefined,
env: "NEXTAUTH_SIGNING_KEY_B64",
sensitive: true,
},
encryptionKeyB64: {
doc: "A base64 encoded JWK.Key used for JWT encryption",
format: String,
default: undefined,
env: "NEXTAUTH_ENCRYPTION_KEY_B64",
sensitive: true,
},
signingKey: {
doc: "",
format: String,
default: undefined,
sensitive: true,
skipGenerate: true,
},
encryptionKey: {
doc: "",
format: String,
default: undefined,
sensitive: true,
skipGenerate: true,
},
google: {
id: {
doc: "reference https://next-auth.js.org/providers/google",
format: String,
default: undefined,
env: "GOOGLE_ID",
sensitive: true,
},
secret: {
doc: "reference https://next-auth.js.org/providers/google",
format: String,
default: undefined,
env: "GOOGLE_SECRET",
sensitive: true,
},
},
github: {
id: {
doc: "reference https://next-auth.js.org/providers/github",
format: String,
default: undefined,
env: "GITHUB_ID",
sensitive: true,
},
secret: {
doc: "reference https://next-auth.js.org/providers/github",
format: String,
default: undefined,
env: "GITHUB_SECRET",
sensitive: true,
},
},
gitlab: {
id: {
doc: "reference https://next-auth.js.org/providers/gitlab",
format: String,
default: undefined,
env: "GITLAB_ID",
sensitive: true,
},
secret: {
doc: "reference https://next-auth.js.org/providers/gitlab",
format: String,
default: undefined,
env: "GITLAB_SECRET",
sensitive: true,
},
},
cognito: {
id: {
doc: "reference https://next-auth.js.org/providers/cognito",
format: String,
default: undefined,
env: "COGNITO_ID",
sensitive: true,
},
secret: {
doc: "reference https://next-auth.js.org/providers/cognito",
format: String,
default: undefined,
env: "COGNITO_SECRET",
sensitive: true,
},
domain: {
doc: "reference https://next-auth.js.org/providers/cognito",
format: String,
default: undefined,
env: "COGNITO_DOMAIN",
sensitive: true,
},
},
},
cfaccess: {
audience: {
doc: "the cloudflare access audience id",
format: String,
default: undefined,
env: "CFACCESS_AUDIENCE",
},
domain: {
doc: "the cloudflare access domain, something like `YOURAPP.cloudflareaccess.com`",
format: String,
default: undefined,
env: "CFACCESS_DOMAIN",
},
},
signald: {
enabled: {
doc: "Whether to enable the signald signal backend",
format: "Boolean",
default: false,
env: "SIGNALD_ENABLED",
},
socket: {
doc: "the unix domain socket signald is listening on",
format: String,
default: `${process.cwd()}/signald/signald.sock`,
env: "SIGNALD_SOCKET",
},
},
};
// define the interfaces for the concrete config objects
export interface IDBConfig {
connection: string;
name: string;
owner: string;
}
export interface IWorkerConfig {
connection: string;
concurrency: number;
pollInterval: number;
}
export interface IPostgraphileConfig {
auth: string;
visitor: string;
appRootConnection: string;
authConnection: string;
schema: string;
enableGraphiql: boolean;
}
export interface IDevConfig {
shadowConnection: string;
rootConnection: string;
}
export interface IFrontendConfig {
url: string;
apiUrl: string;
}
export interface INextAuthConfig {
secret: string;
audience: string;
signingKey: string;
encryptionKey: string;
signingKeyB64: string;
encryptionKeyB64: string;
google?: { id: string; secret: string };
github?: { id: string; secret: string };
gitlab?: { id: string; secret: string };
cognito?: { id: string; secret: string; domain: string };
}
export interface ICFAccessConfig {
audience: string;
domain: string;
}
export interface ISignaldConifg {
enabled: boolean;
socket: string;
}
// Extend the metamigo base type to add your app's custom config along side the out
// of the box Metamigo config
export interface IAppConfig extends Metamigo.IMetamigoConfig {
db: IDBConfig;
worker: IWorkerConfig;
postgraphile: IPostgraphileConfig;
dev: IDevConfig;
frontend: IFrontendConfig;
nextAuth: INextAuthConfig;
cfaccess: ICFAccessConfig;
signald: ISignaldConifg;
}
export type IAppConvict = Metamigo.ExtendedConvict<IAppConfig>;
// Merge the Metamigo base schema with your app's schmea
// @ts-ignore
export const schema: convict.Schema<IAppConfig> = {
...Metamigo.configBaseSchema,
...configSchema,
};
export const loadConfig = async (): Promise<IAppConfig> => {
const config = await Metamigo.loadConfiguration(schema);
if (!config.frontend.url || config.frontend.url === "")
throw new Error(
"configuration value frontend.url is missing. Add to config or set NEXTAUTH_URL env var"
);
// nextauth expects the url to be provided with this environment variable, so we will munge it in place here
process.env.NEXTAUTH_URL = config.frontend.url;
if (config.nextAuth.signingKeyB64)
config.nextAuth.signingKey = Buffer.from(
config.nextAuth.signingKeyB64,
"base64"
).toString("utf-8");
if (config.nextAuth.encryptionKeyB64)
config.nextAuth.encryptionKey = Buffer.from(
config.nextAuth.encryptionKeyB64,
"base64"
).toString("utf-8");
if (!config.nextAuth.audience || config.nextAuth.audience === "")
config.nextAuth.audience = config.frontend.url;
return config as any;
};
export const loadConfigRaw = async (): Promise<IAppConvict> => {
return Metamigo.loadConfigurationRaw(schema);
};
const config = defState("config", {
start: loadConfig,
});
export default config;

View file

@ -0,0 +1,35 @@
{
"name": "config",
"version": "0.2.0",
"main": "build/main/index.js",
"author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@digiresilience/montar": "^0.1.6"
},
"devDependencies": {
"@babel/core": "7.20.12",
"@babel/preset-env": "7.20.2",
"@babel/preset-typescript": "7.18.6",
"eslint": "^8.32.0",
"pino-pretty": "^9.1.1",
"prettier": "^2.8.3",
"ts-node": "^10.9.1",
"typedoc": "^0.23.24",
"typescript": "4.9.4"
},
"files": ["build", "src"],
"scripts": {
"build": "tsc -p tsconfig.json",
"doc:html": "typedoc src/ --exclude '**/*.test.ts' --exclude '**/*.spec.ts' --name $npm_package_name --readme README.md --target es2019 --mode file --out build/docs",
"doc": "yarn run doc:html",
"fix:lint": "eslint src --ext .ts --fix",
"fix:prettier": "prettier \"src/**/*.ts\" --write",
"lint:lint": "eslint src --ext .ts",
"lint:prettier": "prettier \"src/**/*.ts\" --list-different",
"test": "echo no tests",
"lint": "yarn lint:lint && yarn lint:prettier",
"watch:build": "tsc -p tsconfig.json -w"
}
}

View file

@ -0,0 +1,8 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"outDir": "build/main"
},
"include": ["**/*.ts", "**/.*.ts", "index.ts"],
"exclude": ["node_modules", "**/*.spec.ts", "**/*.test.ts"]
}

View file

@ -0,0 +1 @@
require("../.eslintrc.js");

117
packages/metamigo-db/.gmrc Normal file
View file

@ -0,0 +1,117 @@
/*
* Graphile Migrate configuration.
*
* MUST NOT CONTAIN SECRETS/PASSWORDS
* This file is in JSON5 format.
*/
{
/*
* Database connections strings are sourced from the DATABASE_URL,
* SHADOW_DATABASE_URL and ROOT_DATABASE_URL environmental variables.
*/
/*
* pgSettings: key-value settings to be automatically loaded into PostgreSQL
* before running migrations, using an equivalent of `SET LOCAL <key> TO
* <value>`
*/
"pgSettings": {
"search_path": "public",
},
/*
* placeholders: substituted in SQL files when compiled/executed. Placeholder
* keys should be prefixed with a colon and in all caps, like
* `:COLON_PREFIXED_ALL_CAPS`. Placeholder values should be strings. They
* will be replaced verbatim with NO ESCAPING AT ALL (this differs from how
* psql handles placeholders) so should only be used with "safe" values. This
* is useful for committing migrations where certain parameters can change
* between environments (development, staging, production) but you wish to
* use the same signed migration files for all.
*
* The special value "!ENV" can be used to indicate an environmental variable
* of the same name should be used.
*
* Graphile Migrate automatically sets the `:DATABASE_NAME` and
* `:DATABASE_OWNER` placeholders, and you should not attempt to override
* these.
*/
"placeholders": {
":DATABASE_VISITOR": "!ENV",
":DATABASE_AUTHENTICATOR": "!ENV",
},
/*
* Actions allow you to run scripts or commands at certain points in the
* migration lifecycle. SQL files are ran against the database directly.
* "command" actions are ran with the following environmental variables set:
*
* - GM_DBURL: the PostgreSQL URL of the database being migrated
* - GM_DBNAME: the name of the database from GM_DBURL
* - GM_DBUSER: the user from GM_DBURL
* - GM_SHADOW: set to 1 if the shadow database is being migrated, left unset
* otherwise
*
* If "shadow" is unspecified, the actions will run on events to both shadow
* and normal databases. If "shadow" is true the action will only run on
* actions to the shadow DB, and if false only on actions to the main DB.
*/
/*
* afterReset: actions executed after a `graphile-migrate reset` command.
*/
"afterReset": [
"!../scripts/afterReset.sql",
],
/*
* afterAllMigrations: actions executed once all migrations are complete.
*/
"afterAllMigrations": [
{
"_": "command",
"shadow": true,
"command": "node scripts/dump-db.js"
},
],
/*
* afterCurrent: actions executed once the current migration has been
* evaluated (i.e. in watch mode).
*/
"afterCurrent": [
{
"_": "command",
"command": "./scripts/afterCurrent.sh",
}
],
/*
* blankMigrationContent: content to be written to the current migration
* after commit. NOTE: this should only contain comments.
*/
// "blankMigrationContent": "-- Write your migration here\n",
/****************************************************************************\
*** ***
*** You probably don't want to edit anything below here. ***
*** ***
\****************************************************************************/
/*
* manageGraphileMigrateSchema: if you set this false, you must be sure to
* keep the graphile_migrate schema up to date yourself. We recommend you
* leave it at its default.
*/
// "manageGraphileMigrateSchema": true,
/*
* migrationsFolder: path to the folder in which to store your migrations.
*/
// migrationsFolder: "./migrations",
"//generatedWith": "1.0.2"
}

View file

@ -0,0 +1,2 @@
FROM postgres:13
COPY scripts/bootstrap.sh /docker-entrypoint-initdb.d/bootstrap.sh

View file

@ -0,0 +1,67 @@
import process from "process";
import { existsSync } from "fs";
import { exec } from "child_process";
import type { IAppConfig } from "config";
/**
* We use graphile-migrate for managing database migrations.
*
* However we also use convict as the sole source of truth for our app's configuration. We do not want to have to configure
* separate env files or config files for graphile-migrate and yet again others for convict.
*
* So we wrap the graphile-migrate cli tool here. We parse our convict config, set necessary env vars, and then shell out to
* graphile-migrate.
*
* Commander eats all args starting with --, so you must use the -- escape to indicate the arguments have finished
*
* Example:
* ./cli db -- --help // will show graphile migrate help
* ./cli db -- watch // will watch the current sql for changes
* ./cli db -- watch --once // will apply the current sql once
*/
export const migrateWrapper = async (
commands: string[],
config: IAppConfig,
silent = false
): Promise<void> => {
const env = {
DATABASE_URL: config.db.connection,
SHADOW_DATABASE_URL: config.dev.shadowConnection,
ROOT_DATABASE_URL: config.dev.rootConnection,
DATABASE_NAME: config.db.name,
DATABASE_OWNER: config.db.owner,
DATABASE_AUTHENTICATOR: config.postgraphile.auth,
DATABASE_VISITOR: config.postgraphile.visitor,
};
const cmd = `npx --no-install graphile-migrate ${commands.join(" ")}`;
const dbDir = `../../db`;
const gmrc = `${dbDir}/.gmrc`;
if (!existsSync(gmrc)) {
throw new Error(`graphile migrate config not found at ${gmrc}`);
}
if (!silent) console.log("executing:", cmd);
return new Promise((resolve, reject) => {
const proc = exec(cmd, {
env: { ...process.env, ...env },
cwd: dbDir,
});
proc.stdout.on("data", (data) => {
if (!silent) console.log("MIGRATE:", data);
});
proc.stderr.on("data", (data) => {
console.error("MIGRATE", data);
});
proc.on("close", (code) => {
if (code !== 0) {
reject(new Error(`graphile-migrate exited with code ${code}`));
return;
}
resolve();
});
});
};

View file

@ -0,0 +1,89 @@
import { IAppConfig } from "config";
import camelcaseKeys from "camelcase-keys";
import PgSimplifyInflectorPlugin from "@graphile-contrib/pg-simplify-inflector";
// import PgManyToManyPlugin from "@graphile-contrib/pg-many-to-many";
import * as ConnectionFilterPlugin from "postgraphile-plugin-connection-filter";
import type { PostGraphileCoreOptions } from "postgraphile-core";
import {
UserRecordRepository,
AccountRecordRepository,
SessionRecordRepository,
} from "common";
import {
SettingRecordRepository,
VoiceProviderRecordRepository,
VoiceLineRecordRepository,
WebhookRecordRepository,
WhatsappBotRecordRepository,
WhatsappMessageRecordRepository,
WhatsappAttachmentRecordRepository,
SignalBotRecordRepository,
} from "./records";
import type { IInitOptions, IDatabase } from "pg-promise";
export interface IRepositories {
users: UserRecordRepository;
sessions: SessionRecordRepository;
accounts: AccountRecordRepository;
settings: SettingRecordRepository;
voiceLines: VoiceLineRecordRepository;
voiceProviders: VoiceProviderRecordRepository;
webhooks: WebhookRecordRepository;
whatsappBots: WhatsappBotRecordRepository;
whatsappMessages: WhatsappMessageRecordRepository;
whatsappAttachments: WhatsappAttachmentRecordRepository;
signalBots: SignalBotRecordRepository;
}
export type AppDatabase = IDatabase<IRepositories> & IRepositories;
export const dbInitOptions = (
_config: IAppConfig
): IInitOptions<IRepositories> => {
return {
noWarnings: true,
receive(data, result) {
if (result) result.rows = camelcaseKeys(data);
},
// Extending the database protocol with our custom repositories;
// API: http://vitaly-t.github.io/pg-promise/global.html#event:extend
extend(obj: any, _dc) { // AppDatase was obj type
// Database Context (_dc) is mainly needed for extending multiple databases with different access API.
// NOTE:
// This event occurs for every task and transaction being executed (which could be every request!)
// so it should be as fast as possible. Do not use 'require()' or do any other heavy lifting.
obj.users = new UserRecordRepository(obj);
obj.sessions = new SessionRecordRepository(obj);
obj.accounts = new AccountRecordRepository(obj);
obj.settings = new SettingRecordRepository(obj);
obj.voiceLines = new VoiceLineRecordRepository(obj);
obj.voiceProviders = new VoiceProviderRecordRepository(obj);
obj.webhooks = new WebhookRecordRepository(obj);
obj.whatsappBots = new WhatsappBotRecordRepository(obj);
obj.whatsappMessages = new WhatsappMessageRecordRepository(obj);
obj.whatsappAttachments = new WhatsappAttachmentRecordRepository(obj);
obj.signalBots = new SignalBotRecordRepository(obj);
},
};
};
export const getPostGraphileOptions = (): PostGraphileCoreOptions => {
return {
ignoreRBAC: false,
dynamicJson: true,
ignoreIndexes: false,
appendPlugins: [
PgSimplifyInflectorPlugin,
// PgManyToManyPlugin,
ConnectionFilterPlugin as any,
],
};
};
export * from "./helpers";
export * from "./records";

View file

@ -0,0 +1,650 @@
--! Previous: -
--! Hash: sha1:b13a5217288f5d349d8d9e3afbd7bb30c0dbad21
-- region Bootstrap
drop schema if exists app_public cascade;
alter default privileges revoke all on sequences from public;
alter default privileges revoke all on functions from public;
-- By default the public schema is owned by `postgres`; we need superuser privileges to change this :(
-- alter schema public owner to waterbear;
revoke all on schema public from public;
grant all on schema public to :DATABASE_OWNER;
create schema app_public;
grant usage on schema
public,
app_public
to
:DATABASE_VISITOR,
app_admin,
app_anonymous,
app_user;
/**********/
drop schema if exists app_hidden cascade;
create schema app_hidden;
grant usage on schema app_hidden to :DATABASE_VISITOR;
alter default privileges in schema app_hidden grant usage, select on sequences to :DATABASE_VISITOR;
/**********/
alter default privileges in schema public, app_public, app_hidden grant usage, select on sequences to :DATABASE_VISITOR;
alter default privileges in schema public, app_public, app_hidden
grant execute on functions to
:DATABASE_VISITOR,
app_admin,
app_user;
/**********/
drop schema if exists app_private cascade;
create schema app_private;
-- endregion
-- region UtilFunctions
create function app_private.tg__add_job() returns trigger as
$$
begin
perform graphile_worker.add_job(tg_argv[0], json_build_object('id', NEW.id),
coalesce(tg_argv[1], public.gen_random_uuid()::text));
return NEW;
end;
$$ language plpgsql volatile
security definer
set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__add_job() is
E'Useful shortcut to create a job on insert/update. Pass the task name as the first trigger argument, and optionally the queue name as the second argument. The record id will automatically be available on the JSON payload.';
/* ------------------------------------------------------------------ */
create function app_private.tg__timestamps() returns trigger as
$$
begin
NEW.created_at = (case when TG_OP = 'INSERT' then NOW() else OLD.created_at end);
NEW.updated_at = (case
when TG_OP = 'UPDATE' and OLD.updated_at >= NOW()
then OLD.updated_at + interval '1 millisecond'
else NOW() end);
return NEW;
end;
$$ language plpgsql volatile
set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__timestamps() is
E'This trigger should be called on all tables with created_at, updated_at - it ensures that they cannot be manipulated and that updated_at will always be larger than the previous updated_at.';
-- endregion
-- region Users, Sessions, and Accounts
/* ------------------------------------------------------------------ */
create table app_private.sessions
(
id uuid not null default gen_random_uuid() primary key,
user_id uuid not null,
expires timestamptz not null,
session_token text not null,
access_token text not null,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
last_active_at timestamptz not null default now()
);
create unique index session_token on app_private.sessions(session_token);
create unique index access_token on app_private.sessions(access_token);
alter table app_private.sessions
enable row level security;
/* ------------------------------------------------------------------ */
create function app_public.current_session_id() returns uuid as
$$
-- note the jwt.claims.session_id doesn't mean you have to use jwt, it is just where this function will always look for the session id.
select nullif(pg_catalog.current_setting('jwt.claims.session_id', true), '')::uuid;
$$ language sql stable;
comment on function app_public.current_session_id() is
E'Handy method to get the current session ID.';
/*
* A less secure but more performant version of this function would be just:
*
* select nullif(pg_catalog.current_setting('jwt.claims.user_id', true), '')::int;
*
* The increased security of this implementation is because even if someone gets
* the ability to run SQL within this transaction they cannot impersonate
* another user without knowing their session_id (which should be closely
* guarded).
*/
create function app_public.current_user_id() returns uuid as
$$
select user_id
from app_private.sessions
where id = app_public.current_session_id();
$$ language sql stable
security definer
set search_path to pg_catalog, public, pg_temp;
comment on function app_public.current_user_id() is
E'Handy method to get the current user ID for use in RLS policies, etc; in GraphQL, use `currentUser{id}` instead.';
-- We've put this in public, but omitted it, because it's often useful for debugging auth issues.
/* ------------------------------------------------------------------ */
-- These are the user roles for our application
create type app_public.role_type as
ENUM ('none','admin', 'user');
/* ------------------------------------------------------------------ */
create table app_public.users
(
id uuid not null default uuid_generate_v1mc() primary key,
email citext not null,
email_verified timestamptz,
name text not null,
avatar text,
user_role app_public.role_type not null default 'none',
is_active boolean not null default false,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
created_by text not null,
constraint users_email_validity check (email ~* '^[A-Za-z0-9._%-]+@[A-Za-z0-9.-]+[.][A-Za-z]+$'),
constraint users_avatar_validity check (avatar ~ '^https?://[^/]+'),
constraint users_email_unique unique (email)
);
comment on table app_public.users is
E'A user who can log in to the application.';
comment on column app_public.users.id is
E'Unique identifier for the user.';
comment on column app_public.users.email is
E'The email address of the user.';
comment on column app_public.users.email_verified is
E'The time at which the email address was verified';
comment on column app_public.users.name is
E'Public-facing name (or pseudonym) of the user.';
comment on column app_public.users.avatar is
E'Optional avatar URL.';
comment on column app_public.users.user_role is
E'The role that defines the user''s privileges.';
comment on column app_public.users.is_active is
E'If false, the user is not allowed to login or access the application';
alter table app_public.users
enable row level security;
alter table app_private.sessions
add constraint sessions_user_id_fkey foreign key ("user_id") references app_public.users on delete cascade;
create index on app_private.sessions (user_id);
-- app_public perms default
create policy access_self on app_public.users to app_anonymous using (id = app_public.current_user_id());
--create policy update_self on app_public.users for update using (id = app_public.current_user_id());
grant select on app_public.users to app_anonymous;
grant update (name, avatar) on app_public.users to :DATABASE_VISITOR, app_user;
-- app_public perms for app_admin
create policy access_all on app_public.users to app_admin using (true);
grant update (email, name, avatar, is_active, user_role) on app_public.users to app_admin;
grant select on app_public.users to app_admin;
grant insert (email, name, avatar, user_role, is_active, created_by) on app_public.users to app_admin;
grant update (email, name, avatar, user_role, is_active, created_by) on app_public.users to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.users
for each row
execute procedure app_private.tg__timestamps();
/* ------------------------------------------------------------------ */
create function app_public.current_user() returns app_public.users as
$$
select users.*
from app_public.users
where id = app_public.current_user_id();
$$ language sql stable;
comment on function app_public.current_user() is
E'The currently logged in user (or null if not logged in).';
/* ------------------------------------------------------------------ */
create function app_public.logout() returns void as
$$
begin
-- Delete the session
delete from app_private.sessions where id = app_public.current_session_id();
-- Clear the identifier from the transaction
perform set_config('jwt.claims.session_id', '', true);
end;
$$ language plpgsql security definer
volatile
set search_path to pg_catalog, public, pg_temp;
/* ------------------------------------------------------------------ */
create table app_public.accounts
(
id uuid not null default uuid_generate_v1mc() primary key,
compound_id text not null,
user_id uuid not null,
provider_type text not null,
provider_id text not null,
provider_account_id text not null,
refresh_token text,
access_token text,
access_token_expires timestamptz,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
alter table app_public.accounts
enable row level security;
alter table app_public.accounts
add constraint accounts_user_id_fkey foreign key ("user_id") references app_public.users on delete cascade;
create unique index accounts_compound_id on app_public.accounts(compound_id);
create index accounts_provider_account_id on app_public.accounts(provider_account_id);
create index accounts_provider_id on app_public.accounts(provider_id);
create index accounts_user_id on app_public.accounts (user_id);
create policy access_self on app_public.accounts to app_anonymous using (user_id = app_public.current_user_id());
grant select on app_public.accounts to app_anonymous;
grant update (compound_id, provider_type, provider_id, provider_account_id, refresh_token, access_token, access_token_expires) on app_public.accounts to app_user;
create policy access_all on app_public.accounts to app_admin using (true);
grant update (compound_id, provider_type, provider_id, provider_account_id, refresh_token, access_token, access_token_expires) on app_public.accounts to app_admin;
grant select on app_public.accounts to app_admin;
grant insert (user_id, compound_id, provider_type, provider_id, provider_account_id, refresh_token, access_token, access_token_expires) on app_public.accounts to app_admin;
grant update (compound_id, provider_type, provider_id, provider_account_id, refresh_token, access_token, access_token_expires) on app_public.accounts to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.accounts
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region Create first user function
create or replace function app_public.create_first_user (user_email text, user_name text)
returns setof app_public.users
as
$$
declare
user_count int;
begin
user_count := (select count(id) from app_public.users);
if (user_count != 0) then
raise exception 'Admin user already created';
end if;
return query insert into app_public.users (email, email_verified, name, user_role, is_active, created_by)
values (user_email, now(), user_name, 'admin', true, 'first user hook') returning *;
end ;
$$ LANGUAGE plpgsql VOLATILE
SECURITY DEFINER;
comment on function app_public.create_first_user(user_email text, user_name text) is
E'Creates the first user with an admin role. Only possible when there are no other users in the database.';
grant execute on function app_public.create_first_user(user_email text, user_name text) to app_anonymous;
create function app_private.tg__first_user() returns trigger as
$$
declare
user_count int;
begin
user_count := (select count(id) from app_public.users);
if (user_count = 0) then
NEW.user_role = 'admin';
end if;
return NEW;
end;
$$ language plpgsql volatile
set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__first_user() is
E'This trigger is called to ensure the first user created is an admin';
create trigger _101_first_user
before insert
on app_public.users
for each row
execute procedure app_private.tg__first_user();
-- endregion
-- region Settings
create table app_public.settings
(
id uuid not null default uuid_generate_v1mc() primary key,
name text not null,
value jsonb,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index setting_name on app_public.settings(name);
alter table app_public.settings
enable row level security;
create policy access_all on app_public.settings to app_admin using (true);
grant update (name, value) on app_public.settings to app_admin;
grant select on app_public.settings to app_admin;
grant insert (name, value) on app_public.settings to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.settings
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region Provider
create table app_public.voice_providers
(
id uuid not null default uuid_generate_v1mc() primary key,
kind text not null,
name text not null,
credentials jsonb not null,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index voice_providers_number on app_public.voice_providers(name);
alter table app_public.voice_providers
enable row level security;
create policy access_all on app_public.voice_providers to app_admin using (true);
grant update (name, credentials) on app_public.voice_providers to app_admin;
grant select on app_public.voice_providers to app_admin;
grant insert (kind, name, credentials) on app_public.voice_providers to app_admin;
grant delete on app_public.voice_providers to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.voice_providers
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region Voice Line
create table app_public.voice_lines
(
id uuid not null default uuid_generate_v1mc() primary key,
provider_id uuid not null,
provider_line_sid text not null,
number text not null,
language text not null,
voice text not null,
prompt_text text,
prompt_audio jsonb,
audio_prompt_enabled boolean not null default false,
audio_converted_at timestamptz,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
alter table app_public.voice_lines
add constraint voice_lines_provider_id_fkey foreign key ("provider_id") references app_public.voice_providers on delete cascade;
create index on app_public.voice_lines (provider_id);
create index on app_public.voice_lines (provider_line_sid);
create unique index voice_lines_number on app_public.voice_lines(number);
alter table app_public.voice_lines
enable row level security;
create policy access_all on app_public.voice_lines to app_admin using (true);
grant update (prompt_text, prompt_audio, audio_prompt_enabled, language, voice) on app_public.voice_lines to app_admin;
grant select on app_public.voice_lines to app_admin;
grant insert (provider_id, provider_line_sid, number, prompt_text, prompt_audio, audio_prompt_enabled, language, voice) on app_public.voice_lines to app_admin;
grant delete on app_public.voice_lines to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.voice_lines
for each row
execute procedure app_private.tg__timestamps();
create function app_private.tg__voice_line_provider_update() returns trigger as $$
begin
if (TG_OP = 'DELETE') then
perform graphile_worker.add_job('voice-line-delete', json_build_object('voiceLineId', OLD.id, 'providerId', OLD.provider_id, 'providerLineSid', OLD.provider_line_sid));
else
perform graphile_worker.add_job('voice-line-provider-update', json_build_object('voiceLineId', NEW.id));
end if;
return null;
end;
$$ language plpgsql volatile security definer set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__voice_line_provider_update() is
E'This trigger is called to ensure a voice line is connected to twilio properly';
create trigger _101_voice_line_provider_update
after insert or update of provider_line_sid or delete
on app_public.voice_lines
for each row
execute procedure app_private.tg__voice_line_provider_update();
create function app_private.tg__voice_line_prompt_audio_update() returns trigger as $$
begin
perform graphile_worker.add_job('voice-line-audio-update', json_build_object('voiceLineId', NEW.id));
return null;
end;
$$ language plpgsql volatile security definer set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__voice_line_prompt_audio_update() is
E'This trigger is called to ensure a voice line is connected to twilio properly';
create trigger _101_voice_line_prompt_audio_update
after insert or update of prompt_audio
on app_public.voice_lines
for each row
execute procedure app_private.tg__voice_line_prompt_audio_update();
-- endregion
-- region Webhooks
create table app_public.webhooks
(
id uuid not null default uuid_generate_v1mc() primary key,
backend_type text not null,
backend_id uuid not null,
name text not null,
endpoint_url text not null,
http_method text not null default 'post',
headers jsonb,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
constraint webhook_http_method_validity check (http_method in ('post', 'put')),
constraint webhook_endpoint_url_validity check (endpoint_url ~ '^https?://[^/]+')
);
create index on app_public.webhooks (backend_type, backend_id);
alter table app_public.webhooks
enable row level security;
create policy access_all on app_public.webhooks to app_admin using (true);
grant update (name, endpoint_url, http_method, headers) on app_public.webhooks to app_admin;
grant select on app_public.webhooks to app_admin;
grant insert (backend_type, backend_id, name, endpoint_url, http_method, headers) on app_public.webhooks to app_admin;
grant delete on app_public.webhooks to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.webhooks
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region WhatsappBots
set transform_null_equals to true;
create table app_public.whatsapp_bots
(
id uuid not null default uuid_generate_v1mc() primary key,
phone_number text not null,
token uuid not null default uuid_generate_v1mc(),
user_id uuid not null,
description text,
auth_info text,
qr_code text,
is_verified boolean not null default false,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index whatsapp_bot_token on app_public.whatsapp_bots(token);
alter table app_public.whatsapp_bots
add constraint whatsapp_bots_user_id_fkey foreign key ("user_id") references app_public.users on delete cascade;
alter table app_public.whatsapp_bots
enable row level security;
create policy access_all on app_public.whatsapp_bots to app_admin using (true);
grant update (phone_number, token, user_id, description, auth_info, qr_code, is_verified) on app_public.whatsapp_bots to app_admin;
grant select on app_public.whatsapp_bots to app_admin;
grant insert (phone_number, token, user_id, description, auth_info, qr_code, is_verified) on app_public.whatsapp_bots to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.whatsapp_bots
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region WhatsappMessages
create table app_public.whatsapp_messages
(
id uuid not null default uuid_generate_v1mc() primary key,
whatsapp_bot_id uuid not null,
wa_message_id text,
wa_message text,
wa_timestamp timestamptz,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index whatsapp_message_whatsapp_bot_id on app_public.whatsapp_messages(whatsapp_bot_id);
alter table app_public.whatsapp_messages
add constraint whatsapp_messages_whatsapp_bot_id_fkey foreign key ("whatsapp_bot_id") references app_public.whatsapp_bots on delete cascade;
alter table app_public.whatsapp_messages
enable row level security;
create policy access_all on app_public.whatsapp_messages to app_admin using (true);
grant update (whatsapp_bot_id, wa_message_id, wa_message, wa_timestamp) on app_public.whatsapp_messages to app_admin;
grant select on app_public.whatsapp_messages to app_admin;
grant insert (whatsapp_bot_id, wa_message_id, wa_message, wa_timestamp) on app_public.whatsapp_messages to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.whatsapp_messages
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region WhatsappAttachments
create table app_public.whatsapp_attachments
(
id uuid not null default uuid_generate_v1mc() primary key,
whatsapp_bot_id uuid not null,
whatsapp_message_id uuid,
attachment bytea,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index whatsapp_attachment_whatsapp_bot_id on app_public.whatsapp_attachments(whatsapp_bot_id);
create unique index whatsapp_attachment_whatsapp_message_id on app_public.whatsapp_attachments(whatsapp_message_id);
alter table app_public.whatsapp_attachments
add constraint whatsapp_attachments_whatsapp_bot_id_fkey foreign key ("whatsapp_bot_id") references app_public.whatsapp_bots on delete cascade;
alter table app_public.whatsapp_attachments
add constraint whatsapp_attachments_whatsapp_message_id_fkey foreign key ("whatsapp_message_id") references app_public.whatsapp_messages on delete cascade;
alter table app_public.whatsapp_attachments
enable row level security;
create policy access_all on app_public.whatsapp_attachments to app_admin using (true);
grant update (whatsapp_bot_id, whatsapp_message_id, attachment) on app_public.whatsapp_attachments to app_admin;
grant select on app_public.whatsapp_attachments to app_admin;
grant insert (whatsapp_bot_id, whatsapp_message_id, attachment) on app_public.whatsapp_attachments to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.whatsapp_attachments
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region SignalBots
set transform_null_equals to true;
create table app_public.signal_bots
(
id uuid not null default uuid_generate_v1mc() primary key,
phone_number text not null,
token uuid not null default uuid_generate_v1mc(),
user_id uuid not null,
description text,
auth_info text,
is_verified boolean not null default false,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index signal_bot_token on app_public.signal_bots(token);
alter table app_public.signal_bots
add constraint signal_bots_user_id_fkey foreign key ("user_id") references app_public.users on delete cascade;
alter table app_public.signal_bots
enable row level security;
create policy access_all on app_public.signal_bots to app_admin using (true);
grant update (phone_number, token, user_id, description, auth_info, is_verified) on app_public.signal_bots to app_admin;
grant select on app_public.signal_bots to app_admin;
grant insert (phone_number, token, user_id, description, auth_info, is_verified) on app_public.signal_bots to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.signal_bots
for each row
execute procedure app_private.tg__timestamps();
-- endregion

View file

@ -0,0 +1,10 @@
--! Previous: sha1:b13a5217288f5d349d8d9e3afbd7bb30c0dbad21
--! Hash: sha1:8659f815ff013a793f2e01113a9a61a98c7bd8d5
-- Enter migration here
drop table if exists app_public.whatsapp_attachments cascade;
drop table if exists app_public.whatsapp_messages cascade;
grant delete on app_public.whatsapp_bots to app_admin;
grant delete on app_public.signal_bots to app_admin;

View file

@ -0,0 +1 @@
-- Enter migration here

View file

@ -0,0 +1,39 @@
{
"name": "db",
"private": true,
"version": "0.2.0",
"main": "build/main/db/src/index.js",
"author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later",
"dependencies": {
"graphile-migrate": "^1.4.1"
},
"devDependencies": {
"common": "0.2.5",
"@babel/core": "7.20.12",
"@babel/preset-env": "7.20.2",
"@babel/preset-typescript": "7.18.6",
"@types/jest": "^29.2.5",
"eslint": "^8.32.0",
"jest": "^29.3.1",
"jest-junit": "^15.0.0",
"pino-pretty": "^9.1.1",
"prettier": "^2.8.3",
"ts-node": "^10.9.1",
"typedoc": "^0.23.24",
"typescript": "4.9.4"
},
"scripts": {
"build": "tsc -p tsconfig.json",
"build-test": "tsc -p tsconfig.json",
"doc:html": "typedoc src/ --exclude '**/*.test.ts' --exclude '**/*.spec.ts' --name $npm_package_name --readme README.md --target es2019 --mode file --out build/docs",
"doc": "yarn run doc:html",
"fix:lint": "eslint src --ext .ts --fix",
"fix:prettier": "prettier \"src/**/*.ts\" --write",
"worker": "NODE_ENV=development yarn cli worker",
"lint:lint": "eslint src --ext .ts",
"lint:prettier": "prettier \"src/**/*.ts\" --list-different",
"lint": "npm run lint:lint && npm run lint:prettier",
"watch:build": "tsc -p tsconfig.json -w"
}
}

View file

@ -0,0 +1,9 @@
export * from "./settings";
export * from "./signal/bots";
export * from "./whatsapp/bots";
export * from "./whatsapp/messages";
export * from "./whatsapp/attachments";
export * from "./settings";
export * from "./voice/voice-line";
export * from "./voice/voice-provider";
export * from "./webhooks";

View file

@ -0,0 +1,104 @@
/* eslint-disable @typescript-eslint/explicit-module-boundary-types,@typescript-eslint/no-unused-vars,@typescript-eslint/no-explicit-any,prefer-destructuring */
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type SettingId = Flavor<UUID, "Setting Id">;
export interface UnsavedSetting<T> {
name: string;
value: T;
}
export interface SavedSetting<T> extends UnsavedSetting<T> {
id: SettingId;
createdAt: Date;
updatedAt: Date;
}
export const SettingRecord = recordInfo<UnsavedSetting<any>, SavedSetting<any>>(
"app_public",
"settings"
);
export class SettingRecordRepository extends RepositoryBase(SettingRecord) {
async findByName<T>(name: string): Promise<SavedSetting<T> | null> {
return this.db.oneOrNone("SELECT * FROM $1 $2:raw LIMIT 1", [
this.schemaTable,
this.where({ name }),
]);
}
async upsert<T>(name: string, value: T): Promise<SavedSetting<T>> {
return this.db.one(
`INSERT INTO $1 ($2:name) VALUES ($2:csv)
ON CONFLICT (name)
DO UPDATE SET value = EXCLUDED.value RETURNING *`,
[this.schemaTable, this.columnize({ name, value })]
);
}
}
// these helpers let us create type safe setting constants
export interface SettingType<T = any> {
_type: T;
}
export interface SettingInfo<T = any> extends SettingType<T> {
name: string;
}
export function castToSettingInfo(
runtimeData: Omit<SettingInfo, "_type">
): SettingInfo {
return runtimeData as SettingInfo;
}
export function settingInfo<T>(name: string): SettingInfo<T>;
// don't use this signature, use the explicit typed signature
export function settingInfo(name: string) {
return castToSettingInfo({
name,
});
}
export interface ISettingsService {
name: string;
lookup<T>(settingInfo: SettingInfo<T>): Promise<T>;
save<T>(settingInfo: SettingInfo<T>, value: T): Promise<T>;
}
export const SettingsService = (
repo: SettingRecordRepository
): ISettingsService => ({
name: "settingService",
lookup: async <T>(settingInfo: SettingInfo<T>): Promise<T> => {
const s = await repo.findByName<T>(settingInfo.name);
return s.value;
},
save: async <T>(settingInfo: SettingInfo<T>, value: T): Promise<T> => {
const s = await repo.upsert(settingInfo.name, value);
return s.value;
},
});
const _test = async () => {
// here is an example of how to use this module
// it also serves as a compile-time test case
const repo = new SettingRecordRepository({} as any);
// create your own custom setting types!
// the value is serialized as json in the database
type Custom = { foo: string; bar: string };
type CustomUnsavedSetting = UnsavedSetting<Custom>;
type CustomSetting = SavedSetting<Custom>;
const s3: CustomSetting = await repo.findByName("test");
const customValue = { foo: "monkeys", bar: "eggplants" };
let customSetting = { name: "custom", value: customValue };
customSetting = await repo.insert(customSetting);
const value: Custom = customSetting.value;
const MySetting = settingInfo<string>("my-setting");
};

View file

@ -0,0 +1,35 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type SignalBotId = Flavor<UUID, "Signal Bot Id">;
export interface UnsavedSignalBot {
phoneNumber: string;
userId: string;
description: string;
}
export interface SavedSignalBot extends UnsavedSignalBot {
id: SignalBotId;
createdAt: Date;
updatedAt: Date;
token: string;
authInfo: string;
isVerified: boolean;
}
export const SignalBotRecord = recordInfo<UnsavedSignalBot, SavedSignalBot>(
"app_public",
"signal_bots"
);
export class SignalBotRecordRepository extends RepositoryBase(SignalBotRecord) {
async updateAuthInfo(
bot: SavedSignalBot,
authInfo: string | undefined
): Promise<SavedSignalBot> {
return this.db.one(
"UPDATE $1 SET (auth_info, is_verified) = ROW($2, true) WHERE id = $3 RETURNING *",
[this.schemaTable, authInfo, bot.id]
);
}
}

View file

@ -0,0 +1,62 @@
import {
RepositoryBase,
recordInfo,
UUID,
Flavor,
} from "common";
import type { } from "pg-promise";
export type VoiceLineId = Flavor<UUID, "VoiceLine Id">;
export type VoiceLineAudio = {
"audio/webm": string;
"audio/mpeg"?: string;
checksum?: string;
};
export interface UnsavedVoiceLine {
providerId: string;
providerLineSid: string;
number: string;
language: string;
voice: string;
promptText?: string;
promptAudio?: VoiceLineAudio;
audioPromptEnabled: boolean;
audioConvertedAt?: Date;
}
export interface SavedVoiceLine extends UnsavedVoiceLine {
id: VoiceLineId;
createdAt: Date;
updatedAt: Date;
}
export const VoiceLineRecord = recordInfo<UnsavedVoiceLine, SavedVoiceLine>(
"app_public",
"voice_lines"
);
export class VoiceLineRecordRepository extends RepositoryBase(VoiceLineRecord) {
/**
* Fetch all voice lines given the numbers
* @param numbers
*/
async findAllByNumbers(numbers: string[]): Promise<SavedVoiceLine[]> {
return this.db.any(
"SELECT id,provider_id,provider_line_sid,number FROM $1 WHERE number in ($2:csv)",
[this.schemaTable, numbers]
);
}
/**
* Fetch all voice lines given a list of provider line ids
* @param ids
*/
async findAllByProviderLineSids(ids: string[]): Promise<SavedVoiceLine[]> {
return this.db.any(
"SELECT id,provider_id,provider_line_sid,number FROM $1 WHERE provider_line_sid in ($2:csv)",
[this.schemaTable, ids]
);
}
}

View file

@ -0,0 +1,52 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
/*
* VoiceProvider
*
* A provider is a company that provides incoming voice call services
*/
export type VoiceProviderId = Flavor<UUID, "VoiceProvider Id">;
export enum VoiceProviderKinds {
TWILIO = "TWILIO",
}
export type TwilioCredentials = {
accountSid: string;
apiKeySid: string;
apiKeySecret: string;
};
// expand this type later when we support more providers
export type VoiceProviderCredentials = TwilioCredentials;
export interface UnsavedVoiceProvider {
kind: VoiceProviderKinds;
name: string;
credentials: VoiceProviderCredentials;
}
export interface SavedVoiceProvider extends UnsavedVoiceProvider {
id: VoiceProviderId;
createdAt: Date;
updatedAt: Date;
}
export const VoiceProviderRecord = recordInfo<
UnsavedVoiceProvider,
SavedVoiceProvider
>("app_public", "voice_providers");
export class VoiceProviderRecordRepository extends RepositoryBase(
VoiceProviderRecord
) {
async findByTwilioAccountSid(
accountSid: string
): Promise<SavedVoiceProvider | null> {
return this.db.oneOrNone(
"select * from $1 where credentials->>'accountSid' = $2",
[this.schemaTable, accountSid]
);
}
}

View file

@ -0,0 +1,50 @@
import {
RepositoryBase,
recordInfo,
UUID,
Flavor,
} from "common";
/*
* Webhook
*
* A webhook allows external services to be notified when a recorded call is available
*/
export type WebhookId = Flavor<UUID, "Webhook Id">;
export interface HttpHeaders {
header: string;
value: string;
}
export interface UnsavedWebhook {
name: string;
voiceLineId: string;
endpointUrl: string;
httpMethod: "post" | "put";
headers?: HttpHeaders[];
}
export interface SavedWebhook extends UnsavedWebhook {
id: WebhookId;
createdAt: Date;
updatedAt: Date;
}
export const WebhookRecord = recordInfo<UnsavedWebhook, SavedWebhook>(
"app_public",
"webhooks"
);
export class WebhookRecordRepository extends RepositoryBase(WebhookRecord) {
async findAllByBackendId(
backendType: string,
backendId: string
): Promise<SavedWebhook[]> {
return this.db.any(
"select * from $1 where backend_type = $2 and backend_id = $3",
[this.schemaTable, backendType, backendId]
);
}
}

View file

@ -0,0 +1,24 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type WhatsappAttachmentId = Flavor<UUID, "Whatsapp Attachment Id">;
export interface UnsavedWhatsappAttachment {
whatsappBotId: string;
whatsappMessageId: string;
attachment: Buffer;
}
export interface SavedWhatsappAttachment extends UnsavedWhatsappAttachment {
id: WhatsappAttachmentId;
createdAt: Date;
updatedAt: Date;
}
export const WhatsappAttachmentRecord = recordInfo<
UnsavedWhatsappAttachment,
SavedWhatsappAttachment
>("app_public", "whatsapp_attachments");
export class WhatsappAttachmentRecordRepository extends RepositoryBase(
WhatsappAttachmentRecord
) { }

View file

@ -0,0 +1,48 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type WhatsappBotId = Flavor<UUID, "Whatsapp Bot Id">;
export interface UnsavedWhatsappBot {
phoneNumber: string;
userId: string;
description: string;
}
export interface SavedWhatsappBot extends UnsavedWhatsappBot {
id: WhatsappBotId;
createdAt: Date;
updatedAt: Date;
token: string;
authInfo: string;
qrCode: string;
isVerified: boolean;
}
export const WhatsappBotRecord = recordInfo<
UnsavedWhatsappBot,
SavedWhatsappBot
>("app_public", "whatsapp_bots");
export class WhatsappBotRecordRepository extends RepositoryBase(
WhatsappBotRecord
) {
async updateQR(
bot: SavedWhatsappBot,
qrCode: string | undefined
): Promise<SavedWhatsappBot> {
return this.db.one(
"UPDATE $1 SET (qr_code) = ROW($2) WHERE id = $3 RETURNING *",
[this.schemaTable, qrCode, bot.id]
);
}
async updateAuthInfo(
bot: SavedWhatsappBot,
authInfo: string | undefined
): Promise<SavedWhatsappBot> {
return this.db.one(
"UPDATE $1 SET (auth_info, is_verified) = ROW($2, true) WHERE id = $3 RETURNING *",
[this.schemaTable, authInfo, bot.id]
);
}
}

View file

@ -0,0 +1,26 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type WhatsappMessageId = Flavor<UUID, "Whatsapp Message Id">;
export interface UnsavedWhatsappMessage {
whatsappBotId: string;
waMessageId: string;
waTimestamp: Date;
waMessage: string;
attachments?: string[];
}
export interface SavedWhatsappMessage extends UnsavedWhatsappMessage {
id: WhatsappMessageId;
createdAt: Date;
updatedAt: Date;
}
export const WhatsappMessageRecord = recordInfo<
UnsavedWhatsappMessage,
SavedWhatsappMessage
>("app_public", "whatsapp_messages");
export class WhatsappMessageRecordRepository extends RepositoryBase(
WhatsappMessageRecord
) { }

View file

@ -0,0 +1,28 @@
#!/bin/bash
set -eu
psql -Xv ON_ERROR_STOP=1 "${GM_DBURL}" <<EOF
INSERT INTO app_public.users(email, name, user_role, is_active, created_by)
VALUES('abel@guardianproject.info', 'Abel', 'admin'::app_public.role_type, true, 'afterCurrent Hook')
on conflict (email) do nothing;
INSERT INTO app_public.users(email, name, user_role, is_active, created_by)
VALUES('darren@redaranj.com', 'Darren', 'admin'::app_public.role_type, true, 'afterCurrent Hook')
on conflict (email) do nothing;
INSERT INTO app_public.users(email, name, user_role, is_active, created_by)
VALUES('jking@chambana.net', 'Josh', 'admin'::app_public.role_type, true, 'afterCurrent Hook')
on conflict (email) do nothing;
INSERT INTO app_public.settings(name, value)
VALUES('app-setting', to_jsonb('this is a setting value stored as json text'::text))
on conflict (name) do nothing;
EOF
if [[ -f "${PWD}/scripts/afterCurrent-private.sh" ]]; then
# shellcheck source=/dev/null
source "${PWD}/scripts/afterCurrent-private.sh"
fi

View file

@ -0,0 +1,12 @@
REVOKE ALL ON DATABASE :DATABASE_NAME FROM PUBLIC;
GRANT CONNECT ON DATABASE :DATABASE_NAME TO :DATABASE_OWNER;
GRANT CONNECT ON DATABASE :DATABASE_NAME TO :DATABASE_AUTHENTICATOR;
GRANT ALL ON DATABASE :DATABASE_NAME TO :DATABASE_OWNER;
grant app_anonymous to :DATABASE_VISITOR;
grant app_user to :DATABASE_VISITOR;
grant app_admin to :DATABASE_VISITOR;
CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;
CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS citext WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS tablefunc WITH SCHEMA public;

View file

@ -0,0 +1,69 @@
#!/bin/bash
set -eu
DATABASE_HOST=${POSTGRES_HOST:-}
DATABASE_PORT=${POSTGRES_PORT:-5432}
DATABASE_SUPERUSER=${POSTGRES_USER:-postgres}
DATABASE_SUPERUSER_PASSWORD=${POSTGRES_PASSWORD:-metamigo}
export PGPASSWORD=$DATABASE_SUPERUSER_PASSWORD
# this script is run under two circumstances: with a local postgres and a remote postgres
# local postgres: we should use the unix domain socket to connect
# remote postgres: we should pass the --host param
HOST_PARAM="--host="
if [[ ! -z ${DATABASE_HOST} ]]; then
HOST_PARAM="--host=${DATABASE_HOST}"
fi
# wait for postgres process to settle
set +e
echo "pg_isready $HOST_PARAM --username $POSTGRES_USER --dbname template1"
pg_isready "$HOST_PARAM" --username "$POSTGRES_USER" --dbname template1
while ! pg_isready "$HOST_PARAM" --username "$POSTGRES_USER" --dbname template1; do
echo "$(date) - waiting for database to start"
sleep 10
done
set -e
echo
echo
echo "Creating the database and the roles"
# We're using 'template1' because we know it should exist. We should not actually change this database.
psql -Xv ON_ERROR_STOP=1 "$HOST_PARAM" --username "$POSTGRES_USER" --dbname template1 <<EOF
CREATE ROLE ${DATABASE_OWNER} WITH LOGIN PASSWORD '${DATABASE_OWNER_PASSWORD}';
GRANT ${DATABASE_OWNER} TO ${DATABASE_SUPERUSER};
CREATE ROLE ${DATABASE_AUTHENTICATOR} WITH LOGIN PASSWORD '${DATABASE_AUTHENTICATOR_PASSWORD}' NOINHERIT;
CREATE ROLE ${DATABASE_VISITOR};
GRANT ${DATABASE_VISITOR} TO ${DATABASE_AUTHENTICATOR};
-- Create database
CREATE DATABASE ${DATABASE_NAME} OWNER ${DATABASE_OWNER};
-- Database permissions
REVOKE ALL ON DATABASE ${DATABASE_NAME} FROM PUBLIC;
GRANT ALL ON DATABASE ${DATABASE_NAME} TO ${DATABASE_OWNER};
GRANT CONNECT ON DATABASE ${DATABASE_NAME} TO ${DATABASE_AUTHENTICATOR};
EOF
echo
echo
echo "Installing extensions into the database"
psql -Xv ON_ERROR_STOP=1 "$HOST_PARAM" --username "$POSTGRES_USER" --dbname "$DATABASE_NAME" <<EOF
CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;
CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS citext WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS tablefunc WITH SCHEMA public;
EOF
echo
echo
echo "Creating roles in the database"
psql -Xv ON_ERROR_STOP=1 "$HOST_PARAM" --username "$POSTGRES_USER" --dbname "$DATABASE_NAME" <<EOF
CREATE ROLE app_anonymous;
CREATE ROLE app_user WITH IN ROLE app_anonymous;
CREATE ROLE app_admin WITH IN ROLE app_user;
GRANT app_anonymous TO ${DATABASE_AUTHENTICATOR};
GRANT app_admin TO ${DATABASE_AUTHENTICATOR};
EOF

View file

@ -0,0 +1,17 @@
#!/usr/bin/env bash
if [ "$GM_DBURL" = "" ]; then
echo "This script should only be ran from inside graphile-migrate";
exit 1;
fi
export COMPOSE_PROJECT_NAME
# When ran inside docker-compose we need to be able to run a different pg_dump binary
${PG_DUMP:-pg_dump} \
--no-sync \
--schema-only \
--no-owner \
--exclude-schema=graphile_migrate \
--exclude-schema=graphile_worker \
--file=../../data/schema.sql \
"$GM_DBURL"

View file

@ -0,0 +1,31 @@
const { spawn } = require("child_process");
const findWorkspaceRoot = require("find-yarn-workspace-root");
if (process.env.CI) {
process.exit(0);
}
const connectionString = process.env.GM_DBURL;
if (!connectionString) {
console.error(
"This script should only be called from a graphile-migrate action."
);
process.exit(1);
}
spawn(
process.env.PG_DUMP || "pg_dump",
[
"--no-sync",
"--schema-only",
"--no-owner",
"--exclude-schema=graphile_migrate",
"--exclude-schema=graphile_worker",
`--file=${findWorkspaceRoot()}/data/schema.sql`,
connectionString,
],
{
stdio: "inherit",
shell: true,
}
);

View file

@ -0,0 +1,8 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"outDir": "build/main"
},
"include": ["**/*.ts", "**/.*.ts"],
"exclude": ["node_modules", "**/*.spec.ts", "**/*.test.ts"]
}