- Create new @link-stack/logger package wrapping Pino for structured logging - Replace all console.log/error/warn statements across the monorepo - Configure environment-aware logging (pretty-print in dev, JSON in prod) - Add automatic redaction of sensitive fields (passwords, tokens, etc.) - Remove dead commented-out logger file from bridge-worker - Follow Pino's standard argument order (context object first, message second) - Support log levels via LOG_LEVEL environment variable - Export TypeScript types for better IDE support This provides consistent, structured logging across all applications and packages, making debugging easier and production logs more parseable.
96 lines
2.5 KiB
TypeScript
96 lines
2.5 KiB
TypeScript
import * as path from "path";
|
|
import { fileURLToPath } from "url";
|
|
import { promises as fs } from "fs";
|
|
import {
|
|
Kysely,
|
|
Migrator,
|
|
MigrationResult,
|
|
FileMigrationProvider,
|
|
PostgresDialect,
|
|
CamelCasePlugin,
|
|
} from "kysely";
|
|
import pkg from "pg";
|
|
import { createLogger } from "@link-stack/logger";
|
|
|
|
const logger = createLogger('bridge-migrations-migrate');
|
|
const { Pool } = pkg;
|
|
import * as dotenv from "dotenv";
|
|
|
|
interface Database {}
|
|
|
|
export const migrate = async (arg: string) => {
|
|
const __filename = fileURLToPath(import.meta.url);
|
|
const __dirname = path.dirname(__filename);
|
|
if (process.env.NODE_ENV !== "production") {
|
|
dotenv.config({ path: path.join(__dirname, "../.env.local") });
|
|
}
|
|
const db = new Kysely<Database>({
|
|
dialect: new PostgresDialect({
|
|
pool: new Pool({
|
|
host: process.env.DATABASE_HOST,
|
|
database: process.env.DATABASE_NAME,
|
|
port: parseInt(process.env.DATABASE_PORT!),
|
|
user: process.env.DATABASE_USER,
|
|
password: process.env.DATABASE_PASSWORD,
|
|
}),
|
|
}),
|
|
plugins: [new CamelCasePlugin()],
|
|
});
|
|
const migrator = new Migrator({
|
|
db,
|
|
provider: new FileMigrationProvider({
|
|
fs,
|
|
path,
|
|
migrationFolder: path.join(__dirname, "migrations"),
|
|
}),
|
|
});
|
|
|
|
let error: any = null;
|
|
let results: MigrationResult[] = [];
|
|
|
|
if (arg === "up:all") {
|
|
const out = await migrator.migrateToLatest();
|
|
results = out.results ?? [];
|
|
error = out.error;
|
|
} else if (arg === "up:one") {
|
|
const out = await migrator.migrateUp();
|
|
results = out.results ?? [];
|
|
error = out.error;
|
|
} else if (arg === "down:all") {
|
|
const migrations = await migrator.getMigrations();
|
|
for (const _ of migrations) {
|
|
const out = await migrator.migrateDown();
|
|
if (out.results) {
|
|
results = results.concat(out.results);
|
|
error = out.error;
|
|
}
|
|
}
|
|
} else if (arg === "down:one") {
|
|
const out = await migrator.migrateDown();
|
|
if (out.results) {
|
|
results = out.results ?? [];
|
|
error = out.error;
|
|
}
|
|
}
|
|
|
|
results?.forEach((it) => {
|
|
if (it.status === "Success") {
|
|
logger.info(
|
|
`Migration "${it.migrationName} ${it.direction.toLowerCase()}" was executed successfully`,
|
|
);
|
|
} else if (it.status === "Error") {
|
|
logger.error(`Failed to execute migration "${it.migrationName}"`);
|
|
}
|
|
});
|
|
|
|
if (error) {
|
|
logger.error("Failed to migrate");
|
|
logger.error(error);
|
|
process.exit(1);
|
|
}
|
|
|
|
await db.destroy();
|
|
};
|
|
|
|
const arg = process.argv.slice(2).pop();
|
|
migrate(arg as string);
|