Add all repos

This commit is contained in:
Darren Clarke 2023-02-13 12:41:30 +00:00
parent faa12c60bc
commit 8a91c9b89b
369 changed files with 29047 additions and 28 deletions

1
metamigo-db/.eslintrc.js Normal file
View file

@ -0,0 +1 @@
require("../.eslintrc.js");

117
metamigo-db/.gmrc Normal file
View file

@ -0,0 +1,117 @@
/*
* Graphile Migrate configuration.
*
* MUST NOT CONTAIN SECRETS/PASSWORDS
* This file is in JSON5 format.
*/
{
/*
* Database connections strings are sourced from the DATABASE_URL,
* SHADOW_DATABASE_URL and ROOT_DATABASE_URL environmental variables.
*/
/*
* pgSettings: key-value settings to be automatically loaded into PostgreSQL
* before running migrations, using an equivalent of `SET LOCAL <key> TO
* <value>`
*/
"pgSettings": {
"search_path": "public",
},
/*
* placeholders: substituted in SQL files when compiled/executed. Placeholder
* keys should be prefixed with a colon and in all caps, like
* `:COLON_PREFIXED_ALL_CAPS`. Placeholder values should be strings. They
* will be replaced verbatim with NO ESCAPING AT ALL (this differs from how
* psql handles placeholders) so should only be used with "safe" values. This
* is useful for committing migrations where certain parameters can change
* between environments (development, staging, production) but you wish to
* use the same signed migration files for all.
*
* The special value "!ENV" can be used to indicate an environmental variable
* of the same name should be used.
*
* Graphile Migrate automatically sets the `:DATABASE_NAME` and
* `:DATABASE_OWNER` placeholders, and you should not attempt to override
* these.
*/
"placeholders": {
":DATABASE_VISITOR": "!ENV",
":DATABASE_AUTHENTICATOR": "!ENV",
},
/*
* Actions allow you to run scripts or commands at certain points in the
* migration lifecycle. SQL files are ran against the database directly.
* "command" actions are ran with the following environmental variables set:
*
* - GM_DBURL: the PostgreSQL URL of the database being migrated
* - GM_DBNAME: the name of the database from GM_DBURL
* - GM_DBUSER: the user from GM_DBURL
* - GM_SHADOW: set to 1 if the shadow database is being migrated, left unset
* otherwise
*
* If "shadow" is unspecified, the actions will run on events to both shadow
* and normal databases. If "shadow" is true the action will only run on
* actions to the shadow DB, and if false only on actions to the main DB.
*/
/*
* afterReset: actions executed after a `graphile-migrate reset` command.
*/
"afterReset": [
"!../scripts/afterReset.sql",
],
/*
* afterAllMigrations: actions executed once all migrations are complete.
*/
"afterAllMigrations": [
{
"_": "command",
"shadow": true,
"command": "node scripts/dump-db.js"
},
],
/*
* afterCurrent: actions executed once the current migration has been
* evaluated (i.e. in watch mode).
*/
"afterCurrent": [
{
"_": "command",
"command": "./scripts/afterCurrent.sh",
}
],
/*
* blankMigrationContent: content to be written to the current migration
* after commit. NOTE: this should only contain comments.
*/
// "blankMigrationContent": "-- Write your migration here\n",
/****************************************************************************\
*** ***
*** You probably don't want to edit anything below here. ***
*** ***
\****************************************************************************/
/*
* manageGraphileMigrateSchema: if you set this false, you must be sure to
* keep the graphile_migrate schema up to date yourself. We recommend you
* leave it at its default.
*/
// "manageGraphileMigrateSchema": true,
/*
* migrationsFolder: path to the folder in which to store your migrations.
*/
// migrationsFolder: "./migrations",
"//generatedWith": "1.0.2"
}

2
metamigo-db/Dockerfile Normal file
View file

@ -0,0 +1,2 @@
FROM postgres:13
COPY scripts/bootstrap.sh /docker-entrypoint-initdb.d/bootstrap.sh

67
metamigo-db/helpers.ts Normal file
View file

@ -0,0 +1,67 @@
import process from "process";
import { existsSync } from "fs";
import { exec } from "child_process";
import type { IAppConfig } from "config";
/**
* We use graphile-migrate for managing database migrations.
*
* However we also use convict as the sole source of truth for our app's configuration. We do not want to have to configure
* separate env files or config files for graphile-migrate and yet again others for convict.
*
* So we wrap the graphile-migrate cli tool here. We parse our convict config, set necessary env vars, and then shell out to
* graphile-migrate.
*
* Commander eats all args starting with --, so you must use the -- escape to indicate the arguments have finished
*
* Example:
* ./cli db -- --help // will show graphile migrate help
* ./cli db -- watch // will watch the current sql for changes
* ./cli db -- watch --once // will apply the current sql once
*/
export const migrateWrapper = async (
commands: string[],
config: IAppConfig,
silent = false
): Promise<void> => {
const env = {
DATABASE_URL: config.db.connection,
SHADOW_DATABASE_URL: config.dev.shadowConnection,
ROOT_DATABASE_URL: config.dev.rootConnection,
DATABASE_NAME: config.db.name,
DATABASE_OWNER: config.db.owner,
DATABASE_AUTHENTICATOR: config.postgraphile.auth,
DATABASE_VISITOR: config.postgraphile.visitor,
};
const cmd = `npx --no-install graphile-migrate ${commands.join(" ")}`;
const dbDir = `../../db`;
const gmrc = `${dbDir}/.gmrc`;
if (!existsSync(gmrc)) {
throw new Error(`graphile migrate config not found at ${gmrc}`);
}
if (!silent) console.log("executing:", cmd);
return new Promise((resolve, reject) => {
const proc = exec(cmd, {
env: { ...process.env, ...env },
cwd: dbDir,
});
proc.stdout.on("data", (data) => {
if (!silent) console.log("MIGRATE:", data);
});
proc.stderr.on("data", (data) => {
console.error("MIGRATE", data);
});
proc.on("close", (code) => {
if (code !== 0) {
reject(new Error(`graphile-migrate exited with code ${code}`));
return;
}
resolve();
});
});
};

89
metamigo-db/index.ts Normal file
View file

@ -0,0 +1,89 @@
import { IAppConfig } from "config";
import camelcaseKeys from "camelcase-keys";
import PgSimplifyInflectorPlugin from "@graphile-contrib/pg-simplify-inflector";
// import PgManyToManyPlugin from "@graphile-contrib/pg-many-to-many";
import * as ConnectionFilterPlugin from "postgraphile-plugin-connection-filter";
import type { PostGraphileCoreOptions } from "postgraphile-core";
import {
UserRecordRepository,
AccountRecordRepository,
SessionRecordRepository,
} from "common";
import {
SettingRecordRepository,
VoiceProviderRecordRepository,
VoiceLineRecordRepository,
WebhookRecordRepository,
WhatsappBotRecordRepository,
WhatsappMessageRecordRepository,
WhatsappAttachmentRecordRepository,
SignalBotRecordRepository,
} from "./records";
import type { IInitOptions, IDatabase } from "pg-promise";
export interface IRepositories {
users: UserRecordRepository;
sessions: SessionRecordRepository;
accounts: AccountRecordRepository;
settings: SettingRecordRepository;
voiceLines: VoiceLineRecordRepository;
voiceProviders: VoiceProviderRecordRepository;
webhooks: WebhookRecordRepository;
whatsappBots: WhatsappBotRecordRepository;
whatsappMessages: WhatsappMessageRecordRepository;
whatsappAttachments: WhatsappAttachmentRecordRepository;
signalBots: SignalBotRecordRepository;
}
export type AppDatabase = IDatabase<IRepositories> & IRepositories;
export const dbInitOptions = (
_config: IAppConfig
): IInitOptions<IRepositories> => {
return {
noWarnings: true,
receive(data, result) {
if (result) result.rows = camelcaseKeys(data);
},
// Extending the database protocol with our custom repositories;
// API: http://vitaly-t.github.io/pg-promise/global.html#event:extend
extend(obj: any, _dc) { // AppDatase was obj type
// Database Context (_dc) is mainly needed for extending multiple databases with different access API.
// NOTE:
// This event occurs for every task and transaction being executed (which could be every request!)
// so it should be as fast as possible. Do not use 'require()' or do any other heavy lifting.
obj.users = new UserRecordRepository(obj);
obj.sessions = new SessionRecordRepository(obj);
obj.accounts = new AccountRecordRepository(obj);
obj.settings = new SettingRecordRepository(obj);
obj.voiceLines = new VoiceLineRecordRepository(obj);
obj.voiceProviders = new VoiceProviderRecordRepository(obj);
obj.webhooks = new WebhookRecordRepository(obj);
obj.whatsappBots = new WhatsappBotRecordRepository(obj);
obj.whatsappMessages = new WhatsappMessageRecordRepository(obj);
obj.whatsappAttachments = new WhatsappAttachmentRecordRepository(obj);
obj.signalBots = new SignalBotRecordRepository(obj);
},
};
};
export const getPostGraphileOptions = (): PostGraphileCoreOptions => {
return {
ignoreRBAC: false,
dynamicJson: true,
ignoreIndexes: false,
appendPlugins: [
PgSimplifyInflectorPlugin,
// PgManyToManyPlugin,
ConnectionFilterPlugin as any,
],
};
};
export * from "./helpers";
export * from "./records";

View file

@ -0,0 +1,650 @@
--! Previous: -
--! Hash: sha1:b13a5217288f5d349d8d9e3afbd7bb30c0dbad21
-- region Bootstrap
drop schema if exists app_public cascade;
alter default privileges revoke all on sequences from public;
alter default privileges revoke all on functions from public;
-- By default the public schema is owned by `postgres`; we need superuser privileges to change this :(
-- alter schema public owner to waterbear;
revoke all on schema public from public;
grant all on schema public to :DATABASE_OWNER;
create schema app_public;
grant usage on schema
public,
app_public
to
:DATABASE_VISITOR,
app_admin,
app_anonymous,
app_user;
/**********/
drop schema if exists app_hidden cascade;
create schema app_hidden;
grant usage on schema app_hidden to :DATABASE_VISITOR;
alter default privileges in schema app_hidden grant usage, select on sequences to :DATABASE_VISITOR;
/**********/
alter default privileges in schema public, app_public, app_hidden grant usage, select on sequences to :DATABASE_VISITOR;
alter default privileges in schema public, app_public, app_hidden
grant execute on functions to
:DATABASE_VISITOR,
app_admin,
app_user;
/**********/
drop schema if exists app_private cascade;
create schema app_private;
-- endregion
-- region UtilFunctions
create function app_private.tg__add_job() returns trigger as
$$
begin
perform graphile_worker.add_job(tg_argv[0], json_build_object('id', NEW.id),
coalesce(tg_argv[1], public.gen_random_uuid()::text));
return NEW;
end;
$$ language plpgsql volatile
security definer
set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__add_job() is
E'Useful shortcut to create a job on insert/update. Pass the task name as the first trigger argument, and optionally the queue name as the second argument. The record id will automatically be available on the JSON payload.';
/* ------------------------------------------------------------------ */
create function app_private.tg__timestamps() returns trigger as
$$
begin
NEW.created_at = (case when TG_OP = 'INSERT' then NOW() else OLD.created_at end);
NEW.updated_at = (case
when TG_OP = 'UPDATE' and OLD.updated_at >= NOW()
then OLD.updated_at + interval '1 millisecond'
else NOW() end);
return NEW;
end;
$$ language plpgsql volatile
set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__timestamps() is
E'This trigger should be called on all tables with created_at, updated_at - it ensures that they cannot be manipulated and that updated_at will always be larger than the previous updated_at.';
-- endregion
-- region Users, Sessions, and Accounts
/* ------------------------------------------------------------------ */
create table app_private.sessions
(
id uuid not null default gen_random_uuid() primary key,
user_id uuid not null,
expires timestamptz not null,
session_token text not null,
access_token text not null,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
last_active_at timestamptz not null default now()
);
create unique index session_token on app_private.sessions(session_token);
create unique index access_token on app_private.sessions(access_token);
alter table app_private.sessions
enable row level security;
/* ------------------------------------------------------------------ */
create function app_public.current_session_id() returns uuid as
$$
-- note the jwt.claims.session_id doesn't mean you have to use jwt, it is just where this function will always look for the session id.
select nullif(pg_catalog.current_setting('jwt.claims.session_id', true), '')::uuid;
$$ language sql stable;
comment on function app_public.current_session_id() is
E'Handy method to get the current session ID.';
/*
* A less secure but more performant version of this function would be just:
*
* select nullif(pg_catalog.current_setting('jwt.claims.user_id', true), '')::int;
*
* The increased security of this implementation is because even if someone gets
* the ability to run SQL within this transaction they cannot impersonate
* another user without knowing their session_id (which should be closely
* guarded).
*/
create function app_public.current_user_id() returns uuid as
$$
select user_id
from app_private.sessions
where id = app_public.current_session_id();
$$ language sql stable
security definer
set search_path to pg_catalog, public, pg_temp;
comment on function app_public.current_user_id() is
E'Handy method to get the current user ID for use in RLS policies, etc; in GraphQL, use `currentUser{id}` instead.';
-- We've put this in public, but omitted it, because it's often useful for debugging auth issues.
/* ------------------------------------------------------------------ */
-- These are the user roles for our application
create type app_public.role_type as
ENUM ('none','admin', 'user');
/* ------------------------------------------------------------------ */
create table app_public.users
(
id uuid not null default uuid_generate_v1mc() primary key,
email citext not null,
email_verified timestamptz,
name text not null,
avatar text,
user_role app_public.role_type not null default 'none',
is_active boolean not null default false,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
created_by text not null,
constraint users_email_validity check (email ~* '^[A-Za-z0-9._%-]+@[A-Za-z0-9.-]+[.][A-Za-z]+$'),
constraint users_avatar_validity check (avatar ~ '^https?://[^/]+'),
constraint users_email_unique unique (email)
);
comment on table app_public.users is
E'A user who can log in to the application.';
comment on column app_public.users.id is
E'Unique identifier for the user.';
comment on column app_public.users.email is
E'The email address of the user.';
comment on column app_public.users.email_verified is
E'The time at which the email address was verified';
comment on column app_public.users.name is
E'Public-facing name (or pseudonym) of the user.';
comment on column app_public.users.avatar is
E'Optional avatar URL.';
comment on column app_public.users.user_role is
E'The role that defines the user''s privileges.';
comment on column app_public.users.is_active is
E'If false, the user is not allowed to login or access the application';
alter table app_public.users
enable row level security;
alter table app_private.sessions
add constraint sessions_user_id_fkey foreign key ("user_id") references app_public.users on delete cascade;
create index on app_private.sessions (user_id);
-- app_public perms default
create policy access_self on app_public.users to app_anonymous using (id = app_public.current_user_id());
--create policy update_self on app_public.users for update using (id = app_public.current_user_id());
grant select on app_public.users to app_anonymous;
grant update (name, avatar) on app_public.users to :DATABASE_VISITOR, app_user;
-- app_public perms for app_admin
create policy access_all on app_public.users to app_admin using (true);
grant update (email, name, avatar, is_active, user_role) on app_public.users to app_admin;
grant select on app_public.users to app_admin;
grant insert (email, name, avatar, user_role, is_active, created_by) on app_public.users to app_admin;
grant update (email, name, avatar, user_role, is_active, created_by) on app_public.users to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.users
for each row
execute procedure app_private.tg__timestamps();
/* ------------------------------------------------------------------ */
create function app_public.current_user() returns app_public.users as
$$
select users.*
from app_public.users
where id = app_public.current_user_id();
$$ language sql stable;
comment on function app_public.current_user() is
E'The currently logged in user (or null if not logged in).';
/* ------------------------------------------------------------------ */
create function app_public.logout() returns void as
$$
begin
-- Delete the session
delete from app_private.sessions where id = app_public.current_session_id();
-- Clear the identifier from the transaction
perform set_config('jwt.claims.session_id', '', true);
end;
$$ language plpgsql security definer
volatile
set search_path to pg_catalog, public, pg_temp;
/* ------------------------------------------------------------------ */
create table app_public.accounts
(
id uuid not null default uuid_generate_v1mc() primary key,
compound_id text not null,
user_id uuid not null,
provider_type text not null,
provider_id text not null,
provider_account_id text not null,
refresh_token text,
access_token text,
access_token_expires timestamptz,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
alter table app_public.accounts
enable row level security;
alter table app_public.accounts
add constraint accounts_user_id_fkey foreign key ("user_id") references app_public.users on delete cascade;
create unique index accounts_compound_id on app_public.accounts(compound_id);
create index accounts_provider_account_id on app_public.accounts(provider_account_id);
create index accounts_provider_id on app_public.accounts(provider_id);
create index accounts_user_id on app_public.accounts (user_id);
create policy access_self on app_public.accounts to app_anonymous using (user_id = app_public.current_user_id());
grant select on app_public.accounts to app_anonymous;
grant update (compound_id, provider_type, provider_id, provider_account_id, refresh_token, access_token, access_token_expires) on app_public.accounts to app_user;
create policy access_all on app_public.accounts to app_admin using (true);
grant update (compound_id, provider_type, provider_id, provider_account_id, refresh_token, access_token, access_token_expires) on app_public.accounts to app_admin;
grant select on app_public.accounts to app_admin;
grant insert (user_id, compound_id, provider_type, provider_id, provider_account_id, refresh_token, access_token, access_token_expires) on app_public.accounts to app_admin;
grant update (compound_id, provider_type, provider_id, provider_account_id, refresh_token, access_token, access_token_expires) on app_public.accounts to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.accounts
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region Create first user function
create or replace function app_public.create_first_user (user_email text, user_name text)
returns setof app_public.users
as
$$
declare
user_count int;
begin
user_count := (select count(id) from app_public.users);
if (user_count != 0) then
raise exception 'Admin user already created';
end if;
return query insert into app_public.users (email, email_verified, name, user_role, is_active, created_by)
values (user_email, now(), user_name, 'admin', true, 'first user hook') returning *;
end ;
$$ LANGUAGE plpgsql VOLATILE
SECURITY DEFINER;
comment on function app_public.create_first_user(user_email text, user_name text) is
E'Creates the first user with an admin role. Only possible when there are no other users in the database.';
grant execute on function app_public.create_first_user(user_email text, user_name text) to app_anonymous;
create function app_private.tg__first_user() returns trigger as
$$
declare
user_count int;
begin
user_count := (select count(id) from app_public.users);
if (user_count = 0) then
NEW.user_role = 'admin';
end if;
return NEW;
end;
$$ language plpgsql volatile
set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__first_user() is
E'This trigger is called to ensure the first user created is an admin';
create trigger _101_first_user
before insert
on app_public.users
for each row
execute procedure app_private.tg__first_user();
-- endregion
-- region Settings
create table app_public.settings
(
id uuid not null default uuid_generate_v1mc() primary key,
name text not null,
value jsonb,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index setting_name on app_public.settings(name);
alter table app_public.settings
enable row level security;
create policy access_all on app_public.settings to app_admin using (true);
grant update (name, value) on app_public.settings to app_admin;
grant select on app_public.settings to app_admin;
grant insert (name, value) on app_public.settings to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.settings
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region Provider
create table app_public.voice_providers
(
id uuid not null default uuid_generate_v1mc() primary key,
kind text not null,
name text not null,
credentials jsonb not null,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index voice_providers_number on app_public.voice_providers(name);
alter table app_public.voice_providers
enable row level security;
create policy access_all on app_public.voice_providers to app_admin using (true);
grant update (name, credentials) on app_public.voice_providers to app_admin;
grant select on app_public.voice_providers to app_admin;
grant insert (kind, name, credentials) on app_public.voice_providers to app_admin;
grant delete on app_public.voice_providers to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.voice_providers
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region Voice Line
create table app_public.voice_lines
(
id uuid not null default uuid_generate_v1mc() primary key,
provider_id uuid not null,
provider_line_sid text not null,
number text not null,
language text not null,
voice text not null,
prompt_text text,
prompt_audio jsonb,
audio_prompt_enabled boolean not null default false,
audio_converted_at timestamptz,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
alter table app_public.voice_lines
add constraint voice_lines_provider_id_fkey foreign key ("provider_id") references app_public.voice_providers on delete cascade;
create index on app_public.voice_lines (provider_id);
create index on app_public.voice_lines (provider_line_sid);
create unique index voice_lines_number on app_public.voice_lines(number);
alter table app_public.voice_lines
enable row level security;
create policy access_all on app_public.voice_lines to app_admin using (true);
grant update (prompt_text, prompt_audio, audio_prompt_enabled, language, voice) on app_public.voice_lines to app_admin;
grant select on app_public.voice_lines to app_admin;
grant insert (provider_id, provider_line_sid, number, prompt_text, prompt_audio, audio_prompt_enabled, language, voice) on app_public.voice_lines to app_admin;
grant delete on app_public.voice_lines to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.voice_lines
for each row
execute procedure app_private.tg__timestamps();
create function app_private.tg__voice_line_provider_update() returns trigger as $$
begin
if (TG_OP = 'DELETE') then
perform graphile_worker.add_job('voice-line-delete', json_build_object('voiceLineId', OLD.id, 'providerId', OLD.provider_id, 'providerLineSid', OLD.provider_line_sid));
else
perform graphile_worker.add_job('voice-line-provider-update', json_build_object('voiceLineId', NEW.id));
end if;
return null;
end;
$$ language plpgsql volatile security definer set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__voice_line_provider_update() is
E'This trigger is called to ensure a voice line is connected to twilio properly';
create trigger _101_voice_line_provider_update
after insert or update of provider_line_sid or delete
on app_public.voice_lines
for each row
execute procedure app_private.tg__voice_line_provider_update();
create function app_private.tg__voice_line_prompt_audio_update() returns trigger as $$
begin
perform graphile_worker.add_job('voice-line-audio-update', json_build_object('voiceLineId', NEW.id));
return null;
end;
$$ language plpgsql volatile security definer set search_path to pg_catalog, public, pg_temp;
comment on function app_private.tg__voice_line_prompt_audio_update() is
E'This trigger is called to ensure a voice line is connected to twilio properly';
create trigger _101_voice_line_prompt_audio_update
after insert or update of prompt_audio
on app_public.voice_lines
for each row
execute procedure app_private.tg__voice_line_prompt_audio_update();
-- endregion
-- region Webhooks
create table app_public.webhooks
(
id uuid not null default uuid_generate_v1mc() primary key,
backend_type text not null,
backend_id uuid not null,
name text not null,
endpoint_url text not null,
http_method text not null default 'post',
headers jsonb,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
constraint webhook_http_method_validity check (http_method in ('post', 'put')),
constraint webhook_endpoint_url_validity check (endpoint_url ~ '^https?://[^/]+')
);
create index on app_public.webhooks (backend_type, backend_id);
alter table app_public.webhooks
enable row level security;
create policy access_all on app_public.webhooks to app_admin using (true);
grant update (name, endpoint_url, http_method, headers) on app_public.webhooks to app_admin;
grant select on app_public.webhooks to app_admin;
grant insert (backend_type, backend_id, name, endpoint_url, http_method, headers) on app_public.webhooks to app_admin;
grant delete on app_public.webhooks to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.webhooks
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region WhatsappBots
set transform_null_equals to true;
create table app_public.whatsapp_bots
(
id uuid not null default uuid_generate_v1mc() primary key,
phone_number text not null,
token uuid not null default uuid_generate_v1mc(),
user_id uuid not null,
description text,
auth_info text,
qr_code text,
is_verified boolean not null default false,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index whatsapp_bot_token on app_public.whatsapp_bots(token);
alter table app_public.whatsapp_bots
add constraint whatsapp_bots_user_id_fkey foreign key ("user_id") references app_public.users on delete cascade;
alter table app_public.whatsapp_bots
enable row level security;
create policy access_all on app_public.whatsapp_bots to app_admin using (true);
grant update (phone_number, token, user_id, description, auth_info, qr_code, is_verified) on app_public.whatsapp_bots to app_admin;
grant select on app_public.whatsapp_bots to app_admin;
grant insert (phone_number, token, user_id, description, auth_info, qr_code, is_verified) on app_public.whatsapp_bots to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.whatsapp_bots
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region WhatsappMessages
create table app_public.whatsapp_messages
(
id uuid not null default uuid_generate_v1mc() primary key,
whatsapp_bot_id uuid not null,
wa_message_id text,
wa_message text,
wa_timestamp timestamptz,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index whatsapp_message_whatsapp_bot_id on app_public.whatsapp_messages(whatsapp_bot_id);
alter table app_public.whatsapp_messages
add constraint whatsapp_messages_whatsapp_bot_id_fkey foreign key ("whatsapp_bot_id") references app_public.whatsapp_bots on delete cascade;
alter table app_public.whatsapp_messages
enable row level security;
create policy access_all on app_public.whatsapp_messages to app_admin using (true);
grant update (whatsapp_bot_id, wa_message_id, wa_message, wa_timestamp) on app_public.whatsapp_messages to app_admin;
grant select on app_public.whatsapp_messages to app_admin;
grant insert (whatsapp_bot_id, wa_message_id, wa_message, wa_timestamp) on app_public.whatsapp_messages to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.whatsapp_messages
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region WhatsappAttachments
create table app_public.whatsapp_attachments
(
id uuid not null default uuid_generate_v1mc() primary key,
whatsapp_bot_id uuid not null,
whatsapp_message_id uuid,
attachment bytea,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index whatsapp_attachment_whatsapp_bot_id on app_public.whatsapp_attachments(whatsapp_bot_id);
create unique index whatsapp_attachment_whatsapp_message_id on app_public.whatsapp_attachments(whatsapp_message_id);
alter table app_public.whatsapp_attachments
add constraint whatsapp_attachments_whatsapp_bot_id_fkey foreign key ("whatsapp_bot_id") references app_public.whatsapp_bots on delete cascade;
alter table app_public.whatsapp_attachments
add constraint whatsapp_attachments_whatsapp_message_id_fkey foreign key ("whatsapp_message_id") references app_public.whatsapp_messages on delete cascade;
alter table app_public.whatsapp_attachments
enable row level security;
create policy access_all on app_public.whatsapp_attachments to app_admin using (true);
grant update (whatsapp_bot_id, whatsapp_message_id, attachment) on app_public.whatsapp_attachments to app_admin;
grant select on app_public.whatsapp_attachments to app_admin;
grant insert (whatsapp_bot_id, whatsapp_message_id, attachment) on app_public.whatsapp_attachments to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.whatsapp_attachments
for each row
execute procedure app_private.tg__timestamps();
-- endregion
-- region SignalBots
set transform_null_equals to true;
create table app_public.signal_bots
(
id uuid not null default uuid_generate_v1mc() primary key,
phone_number text not null,
token uuid not null default uuid_generate_v1mc(),
user_id uuid not null,
description text,
auth_info text,
is_verified boolean not null default false,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create unique index signal_bot_token on app_public.signal_bots(token);
alter table app_public.signal_bots
add constraint signal_bots_user_id_fkey foreign key ("user_id") references app_public.users on delete cascade;
alter table app_public.signal_bots
enable row level security;
create policy access_all on app_public.signal_bots to app_admin using (true);
grant update (phone_number, token, user_id, description, auth_info, is_verified) on app_public.signal_bots to app_admin;
grant select on app_public.signal_bots to app_admin;
grant insert (phone_number, token, user_id, description, auth_info, is_verified) on app_public.signal_bots to app_admin;
create trigger _100_timestamps
before insert or update
on app_public.signal_bots
for each row
execute procedure app_private.tg__timestamps();
-- endregion

View file

@ -0,0 +1,10 @@
--! Previous: sha1:b13a5217288f5d349d8d9e3afbd7bb30c0dbad21
--! Hash: sha1:8659f815ff013a793f2e01113a9a61a98c7bd8d5
-- Enter migration here
drop table if exists app_public.whatsapp_attachments cascade;
drop table if exists app_public.whatsapp_messages cascade;
grant delete on app_public.whatsapp_bots to app_admin;
grant delete on app_public.signal_bots to app_admin;

View file

@ -0,0 +1 @@
-- Enter migration here

39
metamigo-db/package.json Normal file
View file

@ -0,0 +1,39 @@
{
"name": "db",
"private": true,
"version": "0.2.0",
"main": "build/main/db/src/index.js",
"author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later",
"dependencies": {
"graphile-migrate": "^1.4.1"
},
"devDependencies": {
"common": "0.2.5",
"@babel/core": "7.20.12",
"@babel/preset-env": "7.20.2",
"@babel/preset-typescript": "7.18.6",
"@types/jest": "^29.2.5",
"eslint": "^8.32.0",
"jest": "^29.3.1",
"jest-junit": "^15.0.0",
"pino-pretty": "^9.1.1",
"prettier": "^2.8.3",
"ts-node": "^10.9.1",
"typedoc": "^0.23.24",
"typescript": "4.9.4"
},
"scripts": {
"build": "tsc -p tsconfig.json",
"build-test": "tsc -p tsconfig.json",
"doc:html": "typedoc src/ --exclude '**/*.test.ts' --exclude '**/*.spec.ts' --name $npm_package_name --readme README.md --target es2019 --mode file --out build/docs",
"doc": "yarn run doc:html",
"fix:lint": "eslint src --ext .ts --fix",
"fix:prettier": "prettier \"src/**/*.ts\" --write",
"worker": "NODE_ENV=development yarn cli worker",
"lint:lint": "eslint src --ext .ts",
"lint:prettier": "prettier \"src/**/*.ts\" --list-different",
"lint": "npm run lint:lint && npm run lint:prettier",
"watch:build": "tsc -p tsconfig.json -w"
}
}

View file

@ -0,0 +1,9 @@
export * from "./settings";
export * from "./signal/bots";
export * from "./whatsapp/bots";
export * from "./whatsapp/messages";
export * from "./whatsapp/attachments";
export * from "./settings";
export * from "./voice/voice-line";
export * from "./voice/voice-provider";
export * from "./webhooks";

View file

@ -0,0 +1,104 @@
/* eslint-disable @typescript-eslint/explicit-module-boundary-types,@typescript-eslint/no-unused-vars,@typescript-eslint/no-explicit-any,prefer-destructuring */
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type SettingId = Flavor<UUID, "Setting Id">;
export interface UnsavedSetting<T> {
name: string;
value: T;
}
export interface SavedSetting<T> extends UnsavedSetting<T> {
id: SettingId;
createdAt: Date;
updatedAt: Date;
}
export const SettingRecord = recordInfo<UnsavedSetting<any>, SavedSetting<any>>(
"app_public",
"settings"
);
export class SettingRecordRepository extends RepositoryBase(SettingRecord) {
async findByName<T>(name: string): Promise<SavedSetting<T> | null> {
return this.db.oneOrNone("SELECT * FROM $1 $2:raw LIMIT 1", [
this.schemaTable,
this.where({ name }),
]);
}
async upsert<T>(name: string, value: T): Promise<SavedSetting<T>> {
return this.db.one(
`INSERT INTO $1 ($2:name) VALUES ($2:csv)
ON CONFLICT (name)
DO UPDATE SET value = EXCLUDED.value RETURNING *`,
[this.schemaTable, this.columnize({ name, value })]
);
}
}
// these helpers let us create type safe setting constants
export interface SettingType<T = any> {
_type: T;
}
export interface SettingInfo<T = any> extends SettingType<T> {
name: string;
}
export function castToSettingInfo(
runtimeData: Omit<SettingInfo, "_type">
): SettingInfo {
return runtimeData as SettingInfo;
}
export function settingInfo<T>(name: string): SettingInfo<T>;
// don't use this signature, use the explicit typed signature
export function settingInfo(name: string) {
return castToSettingInfo({
name,
});
}
export interface ISettingsService {
name: string;
lookup<T>(settingInfo: SettingInfo<T>): Promise<T>;
save<T>(settingInfo: SettingInfo<T>, value: T): Promise<T>;
}
export const SettingsService = (
repo: SettingRecordRepository
): ISettingsService => ({
name: "settingService",
lookup: async <T>(settingInfo: SettingInfo<T>): Promise<T> => {
const s = await repo.findByName<T>(settingInfo.name);
return s.value;
},
save: async <T>(settingInfo: SettingInfo<T>, value: T): Promise<T> => {
const s = await repo.upsert(settingInfo.name, value);
return s.value;
},
});
const _test = async () => {
// here is an example of how to use this module
// it also serves as a compile-time test case
const repo = new SettingRecordRepository({} as any);
// create your own custom setting types!
// the value is serialized as json in the database
type Custom = { foo: string; bar: string };
type CustomUnsavedSetting = UnsavedSetting<Custom>;
type CustomSetting = SavedSetting<Custom>;
const s3: CustomSetting = await repo.findByName("test");
const customValue = { foo: "monkeys", bar: "eggplants" };
let customSetting = { name: "custom", value: customValue };
customSetting = await repo.insert(customSetting);
const value: Custom = customSetting.value;
const MySetting = settingInfo<string>("my-setting");
};

View file

@ -0,0 +1,35 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type SignalBotId = Flavor<UUID, "Signal Bot Id">;
export interface UnsavedSignalBot {
phoneNumber: string;
userId: string;
description: string;
}
export interface SavedSignalBot extends UnsavedSignalBot {
id: SignalBotId;
createdAt: Date;
updatedAt: Date;
token: string;
authInfo: string;
isVerified: boolean;
}
export const SignalBotRecord = recordInfo<UnsavedSignalBot, SavedSignalBot>(
"app_public",
"signal_bots"
);
export class SignalBotRecordRepository extends RepositoryBase(SignalBotRecord) {
async updateAuthInfo(
bot: SavedSignalBot,
authInfo: string | undefined
): Promise<SavedSignalBot> {
return this.db.one(
"UPDATE $1 SET (auth_info, is_verified) = ROW($2, true) WHERE id = $3 RETURNING *",
[this.schemaTable, authInfo, bot.id]
);
}
}

View file

@ -0,0 +1,62 @@
import {
RepositoryBase,
recordInfo,
UUID,
Flavor,
} from "common";
import type { } from "pg-promise";
export type VoiceLineId = Flavor<UUID, "VoiceLine Id">;
export type VoiceLineAudio = {
"audio/webm": string;
"audio/mpeg"?: string;
checksum?: string;
};
export interface UnsavedVoiceLine {
providerId: string;
providerLineSid: string;
number: string;
language: string;
voice: string;
promptText?: string;
promptAudio?: VoiceLineAudio;
audioPromptEnabled: boolean;
audioConvertedAt?: Date;
}
export interface SavedVoiceLine extends UnsavedVoiceLine {
id: VoiceLineId;
createdAt: Date;
updatedAt: Date;
}
export const VoiceLineRecord = recordInfo<UnsavedVoiceLine, SavedVoiceLine>(
"app_public",
"voice_lines"
);
export class VoiceLineRecordRepository extends RepositoryBase(VoiceLineRecord) {
/**
* Fetch all voice lines given the numbers
* @param numbers
*/
async findAllByNumbers(numbers: string[]): Promise<SavedVoiceLine[]> {
return this.db.any(
"SELECT id,provider_id,provider_line_sid,number FROM $1 WHERE number in ($2:csv)",
[this.schemaTable, numbers]
);
}
/**
* Fetch all voice lines given a list of provider line ids
* @param ids
*/
async findAllByProviderLineSids(ids: string[]): Promise<SavedVoiceLine[]> {
return this.db.any(
"SELECT id,provider_id,provider_line_sid,number FROM $1 WHERE provider_line_sid in ($2:csv)",
[this.schemaTable, ids]
);
}
}

View file

@ -0,0 +1,52 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
/*
* VoiceProvider
*
* A provider is a company that provides incoming voice call services
*/
export type VoiceProviderId = Flavor<UUID, "VoiceProvider Id">;
export enum VoiceProviderKinds {
TWILIO = "TWILIO",
}
export type TwilioCredentials = {
accountSid: string;
apiKeySid: string;
apiKeySecret: string;
};
// expand this type later when we support more providers
export type VoiceProviderCredentials = TwilioCredentials;
export interface UnsavedVoiceProvider {
kind: VoiceProviderKinds;
name: string;
credentials: VoiceProviderCredentials;
}
export interface SavedVoiceProvider extends UnsavedVoiceProvider {
id: VoiceProviderId;
createdAt: Date;
updatedAt: Date;
}
export const VoiceProviderRecord = recordInfo<
UnsavedVoiceProvider,
SavedVoiceProvider
>("app_public", "voice_providers");
export class VoiceProviderRecordRepository extends RepositoryBase(
VoiceProviderRecord
) {
async findByTwilioAccountSid(
accountSid: string
): Promise<SavedVoiceProvider | null> {
return this.db.oneOrNone(
"select * from $1 where credentials->>'accountSid' = $2",
[this.schemaTable, accountSid]
);
}
}

View file

@ -0,0 +1,50 @@
import {
RepositoryBase,
recordInfo,
UUID,
Flavor,
} from "common";
/*
* Webhook
*
* A webhook allows external services to be notified when a recorded call is available
*/
export type WebhookId = Flavor<UUID, "Webhook Id">;
export interface HttpHeaders {
header: string;
value: string;
}
export interface UnsavedWebhook {
name: string;
voiceLineId: string;
endpointUrl: string;
httpMethod: "post" | "put";
headers?: HttpHeaders[];
}
export interface SavedWebhook extends UnsavedWebhook {
id: WebhookId;
createdAt: Date;
updatedAt: Date;
}
export const WebhookRecord = recordInfo<UnsavedWebhook, SavedWebhook>(
"app_public",
"webhooks"
);
export class WebhookRecordRepository extends RepositoryBase(WebhookRecord) {
async findAllByBackendId(
backendType: string,
backendId: string
): Promise<SavedWebhook[]> {
return this.db.any(
"select * from $1 where backend_type = $2 and backend_id = $3",
[this.schemaTable, backendType, backendId]
);
}
}

View file

@ -0,0 +1,24 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type WhatsappAttachmentId = Flavor<UUID, "Whatsapp Attachment Id">;
export interface UnsavedWhatsappAttachment {
whatsappBotId: string;
whatsappMessageId: string;
attachment: Buffer;
}
export interface SavedWhatsappAttachment extends UnsavedWhatsappAttachment {
id: WhatsappAttachmentId;
createdAt: Date;
updatedAt: Date;
}
export const WhatsappAttachmentRecord = recordInfo<
UnsavedWhatsappAttachment,
SavedWhatsappAttachment
>("app_public", "whatsapp_attachments");
export class WhatsappAttachmentRecordRepository extends RepositoryBase(
WhatsappAttachmentRecord
) { }

View file

@ -0,0 +1,48 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type WhatsappBotId = Flavor<UUID, "Whatsapp Bot Id">;
export interface UnsavedWhatsappBot {
phoneNumber: string;
userId: string;
description: string;
}
export interface SavedWhatsappBot extends UnsavedWhatsappBot {
id: WhatsappBotId;
createdAt: Date;
updatedAt: Date;
token: string;
authInfo: string;
qrCode: string;
isVerified: boolean;
}
export const WhatsappBotRecord = recordInfo<
UnsavedWhatsappBot,
SavedWhatsappBot
>("app_public", "whatsapp_bots");
export class WhatsappBotRecordRepository extends RepositoryBase(
WhatsappBotRecord
) {
async updateQR(
bot: SavedWhatsappBot,
qrCode: string | undefined
): Promise<SavedWhatsappBot> {
return this.db.one(
"UPDATE $1 SET (qr_code) = ROW($2) WHERE id = $3 RETURNING *",
[this.schemaTable, qrCode, bot.id]
);
}
async updateAuthInfo(
bot: SavedWhatsappBot,
authInfo: string | undefined
): Promise<SavedWhatsappBot> {
return this.db.one(
"UPDATE $1 SET (auth_info, is_verified) = ROW($2, true) WHERE id = $3 RETURNING *",
[this.schemaTable, authInfo, bot.id]
);
}
}

View file

@ -0,0 +1,26 @@
import { RepositoryBase, recordInfo, UUID, Flavor } from "common";
export type WhatsappMessageId = Flavor<UUID, "Whatsapp Message Id">;
export interface UnsavedWhatsappMessage {
whatsappBotId: string;
waMessageId: string;
waTimestamp: Date;
waMessage: string;
attachments?: string[];
}
export interface SavedWhatsappMessage extends UnsavedWhatsappMessage {
id: WhatsappMessageId;
createdAt: Date;
updatedAt: Date;
}
export const WhatsappMessageRecord = recordInfo<
UnsavedWhatsappMessage,
SavedWhatsappMessage
>("app_public", "whatsapp_messages");
export class WhatsappMessageRecordRepository extends RepositoryBase(
WhatsappMessageRecord
) { }

View file

@ -0,0 +1,28 @@
#!/bin/bash
set -eu
psql -Xv ON_ERROR_STOP=1 "${GM_DBURL}" <<EOF
INSERT INTO app_public.users(email, name, user_role, is_active, created_by)
VALUES('abel@guardianproject.info', 'Abel', 'admin'::app_public.role_type, true, 'afterCurrent Hook')
on conflict (email) do nothing;
INSERT INTO app_public.users(email, name, user_role, is_active, created_by)
VALUES('darren@redaranj.com', 'Darren', 'admin'::app_public.role_type, true, 'afterCurrent Hook')
on conflict (email) do nothing;
INSERT INTO app_public.users(email, name, user_role, is_active, created_by)
VALUES('jking@chambana.net', 'Josh', 'admin'::app_public.role_type, true, 'afterCurrent Hook')
on conflict (email) do nothing;
INSERT INTO app_public.settings(name, value)
VALUES('app-setting', to_jsonb('this is a setting value stored as json text'::text))
on conflict (name) do nothing;
EOF
if [[ -f "${PWD}/scripts/afterCurrent-private.sh" ]]; then
# shellcheck source=/dev/null
source "${PWD}/scripts/afterCurrent-private.sh"
fi

View file

@ -0,0 +1,12 @@
REVOKE ALL ON DATABASE :DATABASE_NAME FROM PUBLIC;
GRANT CONNECT ON DATABASE :DATABASE_NAME TO :DATABASE_OWNER;
GRANT CONNECT ON DATABASE :DATABASE_NAME TO :DATABASE_AUTHENTICATOR;
GRANT ALL ON DATABASE :DATABASE_NAME TO :DATABASE_OWNER;
grant app_anonymous to :DATABASE_VISITOR;
grant app_user to :DATABASE_VISITOR;
grant app_admin to :DATABASE_VISITOR;
CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;
CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS citext WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS tablefunc WITH SCHEMA public;

View file

@ -0,0 +1,69 @@
#!/bin/bash
set -eu
DATABASE_HOST=${POSTGRES_HOST:-}
DATABASE_PORT=${POSTGRES_PORT:-5432}
DATABASE_SUPERUSER=${POSTGRES_USER:-postgres}
DATABASE_SUPERUSER_PASSWORD=${POSTGRES_PASSWORD:-metamigo}
export PGPASSWORD=$DATABASE_SUPERUSER_PASSWORD
# this script is run under two circumstances: with a local postgres and a remote postgres
# local postgres: we should use the unix domain socket to connect
# remote postgres: we should pass the --host param
HOST_PARAM="--host="
if [[ ! -z ${DATABASE_HOST} ]]; then
HOST_PARAM="--host=${DATABASE_HOST}"
fi
# wait for postgres process to settle
set +e
echo "pg_isready $HOST_PARAM --username $POSTGRES_USER --dbname template1"
pg_isready "$HOST_PARAM" --username "$POSTGRES_USER" --dbname template1
while ! pg_isready "$HOST_PARAM" --username "$POSTGRES_USER" --dbname template1; do
echo "$(date) - waiting for database to start"
sleep 10
done
set -e
echo
echo
echo "Creating the database and the roles"
# We're using 'template1' because we know it should exist. We should not actually change this database.
psql -Xv ON_ERROR_STOP=1 "$HOST_PARAM" --username "$POSTGRES_USER" --dbname template1 <<EOF
CREATE ROLE ${DATABASE_OWNER} WITH LOGIN PASSWORD '${DATABASE_OWNER_PASSWORD}';
GRANT ${DATABASE_OWNER} TO ${DATABASE_SUPERUSER};
CREATE ROLE ${DATABASE_AUTHENTICATOR} WITH LOGIN PASSWORD '${DATABASE_AUTHENTICATOR_PASSWORD}' NOINHERIT;
CREATE ROLE ${DATABASE_VISITOR};
GRANT ${DATABASE_VISITOR} TO ${DATABASE_AUTHENTICATOR};
-- Create database
CREATE DATABASE ${DATABASE_NAME} OWNER ${DATABASE_OWNER};
-- Database permissions
REVOKE ALL ON DATABASE ${DATABASE_NAME} FROM PUBLIC;
GRANT ALL ON DATABASE ${DATABASE_NAME} TO ${DATABASE_OWNER};
GRANT CONNECT ON DATABASE ${DATABASE_NAME} TO ${DATABASE_AUTHENTICATOR};
EOF
echo
echo
echo "Installing extensions into the database"
psql -Xv ON_ERROR_STOP=1 "$HOST_PARAM" --username "$POSTGRES_USER" --dbname "$DATABASE_NAME" <<EOF
CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;
CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS citext WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS tablefunc WITH SCHEMA public;
EOF
echo
echo
echo "Creating roles in the database"
psql -Xv ON_ERROR_STOP=1 "$HOST_PARAM" --username "$POSTGRES_USER" --dbname "$DATABASE_NAME" <<EOF
CREATE ROLE app_anonymous;
CREATE ROLE app_user WITH IN ROLE app_anonymous;
CREATE ROLE app_admin WITH IN ROLE app_user;
GRANT app_anonymous TO ${DATABASE_AUTHENTICATOR};
GRANT app_admin TO ${DATABASE_AUTHENTICATOR};
EOF

View file

@ -0,0 +1,17 @@
#!/usr/bin/env bash
if [ "$GM_DBURL" = "" ]; then
echo "This script should only be ran from inside graphile-migrate";
exit 1;
fi
export COMPOSE_PROJECT_NAME
# When ran inside docker-compose we need to be able to run a different pg_dump binary
${PG_DUMP:-pg_dump} \
--no-sync \
--schema-only \
--no-owner \
--exclude-schema=graphile_migrate \
--exclude-schema=graphile_worker \
--file=../../data/schema.sql \
"$GM_DBURL"

View file

@ -0,0 +1,31 @@
const { spawn } = require("child_process");
const findWorkspaceRoot = require("find-yarn-workspace-root");
if (process.env.CI) {
process.exit(0);
}
const connectionString = process.env.GM_DBURL;
if (!connectionString) {
console.error(
"This script should only be called from a graphile-migrate action."
);
process.exit(1);
}
spawn(
process.env.PG_DUMP || "pg_dump",
[
"--no-sync",
"--schema-only",
"--no-owner",
"--exclude-schema=graphile_migrate",
"--exclude-schema=graphile_worker",
`--file=${findWorkspaceRoot()}/data/schema.sql`,
connectionString,
],
{
stdio: "inherit",
shell: true,
}
);

View file

@ -0,0 +1,8 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"outDir": "build/main"
},
"include": ["**/*.ts", "**/.*.ts"],
"exclude": ["node_modules", "**/*.spec.ts", "**/*.test.ts"]
}