Repo cleanup and updates

This commit is contained in:
Darren Clarke 2025-11-10 14:55:22 +01:00 committed by GitHub
parent 3a1063e40e
commit 99f8d7e2eb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
72 changed files with 11857 additions and 16439 deletions

1
.gitignore vendored
View file

@ -29,3 +29,4 @@ signald-state
project.org project.org
**/.openapi-generator/ **/.openapi-generator/
apps/bridge-worker/scripts/* apps/bridge-worker/scripts/*
ENVIRONMENT_VARIABLES_MIGRATION.md

View file

@ -12,9 +12,9 @@ build-all:
TURBO_TEAM: ${TURBO_TEAM} TURBO_TEAM: ${TURBO_TEAM}
ZAMMAD_URL: ${ZAMMAD_URL} ZAMMAD_URL: ${ZAMMAD_URL}
script: script:
- npm install npm@10 -g - corepack enable && corepack prepare pnpm@9.15.4 --activate
- npm install -g turbo - pnpm add -g turbo
- npm ci - pnpm install --frozen-lockfile
- turbo build - turbo build
.docker-build: .docker-build:
@ -191,11 +191,11 @@ zammad-docker-build:
DOCKERFILE_PATH: ./docker/zammad/Dockerfile DOCKERFILE_PATH: ./docker/zammad/Dockerfile
DOCKER_CONTEXT: ./docker/zammad DOCKER_CONTEXT: ./docker/zammad
before_script: before_script:
- apk --update add nodejs npm - apk --update add nodejs
script: script:
- npm install npm@10 -g - corepack enable && corepack prepare pnpm@9.15.4 --activate
- npm install -g turbo - pnpm add -g turbo
- npm ci - pnpm install --frozen-lockfile
- turbo build --force --filter @link-stack/zammad-addon-* - turbo build --force --filter @link-stack/zammad-addon-*
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- DOCKER_BUILDKIT=1 docker build --build-arg EMBEDDED=true --pull --no-cache -t ${DOCKER_NS}:${DOCKER_TAG} -f ${DOCKERFILE_PATH} ${DOCKER_CONTEXT} - DOCKER_BUILDKIT=1 docker build --build-arg EMBEDDED=true --pull --no-cache -t ${DOCKER_NS}:${DOCKER_TAG} -f ${DOCKERFILE_PATH} ${DOCKER_CONTEXT}
@ -213,11 +213,11 @@ zammad-standalone-docker-build:
DOCKERFILE_PATH: ./docker/zammad/Dockerfile DOCKERFILE_PATH: ./docker/zammad/Dockerfile
DOCKER_CONTEXT: ./docker/zammad DOCKER_CONTEXT: ./docker/zammad
before_script: before_script:
- apk --update add nodejs npm - apk --update add nodejs
script: script:
- npm install npm@10 -g - corepack enable && corepack prepare pnpm@9.15.4 --activate
- npm install -g turbo - pnpm add -g turbo
- npm ci - pnpm install --frozen-lockfile
- turbo build --force --filter @link-stack/zammad-addon-* - turbo build --force --filter @link-stack/zammad-addon-*
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- DOCKER_BUILDKIT=1 docker build --pull --no-cache -t ${DOCKER_NS}:${DOCKER_TAG} -f ${DOCKERFILE_PATH} ${DOCKER_CONTEXT} - DOCKER_BUILDKIT=1 docker build --pull --no-cache -t ${DOCKER_NS}:${DOCKER_TAG} -f ${DOCKERFILE_PATH} ${DOCKER_CONTEXT}

View file

@ -2,22 +2,28 @@ FROM node:22-bookworm-slim AS base
FROM base AS builder FROM base AS builder
ARG APP_DIR=/opt/bridge-frontend ARG APP_DIR=/opt/bridge-frontend
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN mkdir -p ${APP_DIR}/ RUN mkdir -p ${APP_DIR}/
RUN npm i -g turbo RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN pnpm add -g turbo
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
COPY . . COPY . .
RUN turbo prune --scope=@link-stack/bridge-frontend --scope=@link-stack/bridge-migrations --docker RUN turbo prune --scope=@link-stack/bridge-frontend --scope=@link-stack/bridge-migrations --docker
FROM base AS installer FROM base AS installer
ARG APP_DIR=/opt/bridge-frontend ARG APP_DIR=/opt/bridge-frontend
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
COPY --from=builder ${APP_DIR}/.gitignore .gitignore COPY --from=builder ${APP_DIR}/.gitignore .gitignore
COPY --from=builder ${APP_DIR}/out/json/ . COPY --from=builder ${APP_DIR}/out/json/ .
COPY --from=builder ${APP_DIR}/out/package-lock.json ./package-lock.json COPY --from=builder ${APP_DIR}/out/pnpm-lock.yaml ./pnpm-lock.yaml
RUN npm ci RUN pnpm install --frozen-lockfile
COPY --from=builder ${APP_DIR}/out/full/ . COPY --from=builder ${APP_DIR}/out/full/ .
RUN npm i -g turbo RUN pnpm add -g turbo
RUN turbo run build --filter=@link-stack/bridge-frontend --filter=@link-stack/bridge-migrations RUN turbo run build --filter=@link-stack/bridge-frontend --filter=@link-stack/bridge-migrations
FROM base AS runner FROM base AS runner
@ -29,6 +35,9 @@ LABEL maintainer="Darren Clarke <darren@redaranj.com>"
LABEL org.label-schema.build-date=$BUILD_DATE LABEL org.label-schema.build-date=$BUILD_DATE
LABEL org.label-schema.version=$VERSION LABEL org.label-schema.version=$VERSION
ENV APP_DIR ${APP_DIR} ENV APP_DIR ${APP_DIR}
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN DEBIAN_FRONTEND=noninteractive apt-get update && \ RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
apt-get install -y --no-install-recommends \ apt-get install -y --no-install-recommends \
dumb-init dumb-init

View file

@ -1,10 +1,6 @@
import GoogleProvider from "next-auth/providers/google"; import GoogleProvider from "next-auth/providers/google";
import { KyselyAdapter } from "@auth/kysely-adapter";
import { db } from "@link-stack/bridge-common";
export const authOptions = { export const authOptions = {
// @ts-ignore
adapter: KyselyAdapter(db),
providers: [ providers: [
GoogleProvider({ GoogleProvider({
clientId: process.env.GOOGLE_CLIENT_ID!, clientId: process.env.GOOGLE_CLIENT_ID!,

View file

@ -1,6 +1,9 @@
import NextAuth from "next-auth"; import NextAuth from "next-auth";
import { authOptions } from "@/app/_lib/authentication"; import { authOptions } from "@/app/_lib/authentication";
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
const handler = NextAuth(authOptions); const handler = NextAuth(authOptions);
export { handler as GET, handler as POST }; export { handler as GET, handler as POST };

View file

@ -2,6 +2,6 @@
set -e set -e
echo "running migrations" echo "running migrations"
(cd ../bridge-migrations/ && npm run migrate:up:all) (cd ../bridge-migrations/ && pnpm run migrate:up:all)
echo "starting bridge-frontend" echo "starting bridge-frontend"
exec dumb-init npm run start exec dumb-init pnpm run start

View file

@ -1,23 +1,81 @@
import { withAuth } from "next-auth/middleware"; import { withAuth } from "next-auth/middleware";
import { NextResponse } from "next/server";
export default withAuth({ export default withAuth(
pages: { function middleware(req) {
signIn: `/login`, const isDev = process.env.NODE_ENV === "development";
const nonce = Buffer.from(crypto.randomUUID()).toString("base64");
// Allow digiresilience.org for embedding documentation
const frameSrcDirective = `frame-src 'self' https://digiresilience.org;`;
const cspHeader = `
default-src 'self';
${frameSrcDirective}
connect-src 'self';
script-src 'self' 'nonce-${nonce}' 'strict-dynamic' ${isDev ? "'unsafe-eval'" : ""};
style-src 'self' 'unsafe-inline';
img-src 'self' blob: data:;
font-src 'self';
object-src 'none';
base-uri 'self';
form-action 'self';
frame-ancestors 'self';
upgrade-insecure-requests;
`;
const contentSecurityPolicyHeaderValue = cspHeader
.replace(/\s{2,}/g, " ")
.trim();
const requestHeaders = new Headers(req.headers);
requestHeaders.set("x-nonce", nonce);
requestHeaders.set(
"Content-Security-Policy",
contentSecurityPolicyHeaderValue,
);
const response = NextResponse.next({
request: {
headers: requestHeaders,
},
});
response.headers.set(
"Content-Security-Policy",
contentSecurityPolicyHeaderValue,
);
// Additional security headers
response.headers.set("X-Frame-Options", "SAMEORIGIN");
response.headers.set("X-Content-Type-Options", "nosniff");
response.headers.set("Referrer-Policy", "strict-origin-when-cross-origin");
response.headers.set("X-XSS-Protection", "1; mode=block");
response.headers.set(
"Permissions-Policy",
"camera=(), microphone=(), geolocation=()"
);
return response;
}, },
callbacks: { {
authorized: ({ token }) => { pages: {
if (process.env.SETUP_MODE === "true") { signIn: `/login`,
return true;
}
if (token?.email) {
return true;
}
return false;
}, },
}, callbacks: {
}); authorized: ({ token }) => {
if (process.env.SETUP_MODE === "true") {
return true;
}
if (token?.email) {
return true;
}
return false;
},
},
}
);
export const config = { export const config = {
matcher: ["/((?!ws|wss|api|_next/static|_next/image|favicon.ico).*)"], matcher: ["/((?!ws|wss|api|_next/static|_next/image|favicon.ico).*)"],

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/bridge-frontend", "name": "@link-stack/bridge-frontend",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "next dev", "dev": "next dev",
@ -18,25 +18,23 @@
"@mui/material": "^6", "@mui/material": "^6",
"@mui/material-nextjs": "^6", "@mui/material-nextjs": "^6",
"@mui/x-license": "^7", "@mui/x-license": "^7",
"@link-stack/bridge-common": "*", "@link-stack/bridge-common": "workspace:*",
"@link-stack/bridge-ui": "*", "@link-stack/bridge-ui": "workspace:*",
"next": "15.5.4", "next": "15.5.4",
"next-auth": "^4.24.11", "next-auth": "^4.24.11",
"react": "19.2.0", "react": "19.2.0",
"react-dom": "19.2.0", "react-dom": "19.2.0",
"sharp": "^0.34.4", "sharp": "^0.34.4",
"tsx": "^4.20.6", "tsx": "^4.20.6",
"@link-stack/ui": "*" "@link-stack/ui": "workspace:*"
}, },
"devDependencies": { "devDependencies": {
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"@types/node": "^24", "@types/node": "^24",
"@types/pg": "^8.15.5", "@types/pg": "^8.15.5",
"@types/react": "^19", "@types/react": "^19",
"@types/react-dom": "^19", "@types/react-dom": "^19",
"@link-stack/eslint-config": "*",
"@link-stack/typescript-config": "*",
"typescript": "^5" "typescript": "^5"
} }
} }

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/bridge-migrations", "name": "@link-stack/bridge-migrations",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"type": "module", "type": "module",
"scripts": { "scripts": {
"migrate:up:all": "tsx migrate.ts up:all", "migrate:up:all": "tsx migrate.ts up:all",
@ -9,7 +9,7 @@
"migrate:down:one": "tsx migrate.ts down:one" "migrate:down:one": "tsx migrate.ts down:one"
}, },
"dependencies": { "dependencies": {
"@link-stack/logger": "*", "@link-stack/logger": "workspace:*",
"dotenv": "^17.2.3", "dotenv": "^17.2.3",
"kysely": "0.27.5", "kysely": "0.27.5",
"pg": "^8.16.3", "pg": "^8.16.3",
@ -18,8 +18,8 @@
"devDependencies": { "devDependencies": {
"@types/node": "^24", "@types/node": "^24",
"@types/pg": "^8.15.5", "@types/pg": "^8.15.5",
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"typescript": "^5" "typescript": "^5"
} }
} }

View file

@ -2,20 +2,26 @@ FROM node:22-bookworm-slim AS base
FROM base AS builder FROM base AS builder
ARG APP_DIR=/opt/bridge-whatsapp ARG APP_DIR=/opt/bridge-whatsapp
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN mkdir -p ${APP_DIR}/ RUN mkdir -p ${APP_DIR}/
RUN npm i -g turbo RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN pnpm add -g turbo
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
COPY . . COPY . .
RUN turbo prune --scope=@link-stack/bridge-whatsapp --docker RUN turbo prune --scope=@link-stack/bridge-whatsapp --docker
FROM base AS installer FROM base AS installer
ARG APP_DIR=/opt/bridge-whatsapp ARG APP_DIR=/opt/bridge-whatsapp
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
COPY --from=builder ${APP_DIR}/out/json/ . COPY --from=builder ${APP_DIR}/out/json/ .
COPY --from=builder ${APP_DIR}/out/full/ . COPY --from=builder ${APP_DIR}/out/full/ .
COPY --from=builder ${APP_DIR}/out/package-lock.json ./package-lock.json COPY --from=builder ${APP_DIR}/out/pnpm-lock.yaml ./pnpm-lock.yaml
RUN npm ci RUN pnpm install --frozen-lockfile
RUN npm i -g turbo RUN pnpm add -g turbo
RUN turbo run build --filter=@link-stack/bridge-whatsapp RUN turbo run build --filter=@link-stack/bridge-whatsapp
FROM base as runner FROM base as runner

View file

@ -2,4 +2,4 @@
set -e set -e
echo "starting bridge-whatsapp" echo "starting bridge-whatsapp"
exec dumb-init npm run start exec dumb-init pnpm run start

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/bridge-whatsapp", "name": "@link-stack/bridge-whatsapp",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"main": "build/main/index.js", "main": "build/main/index.js",
"author": "Darren Clarke <darren@redaranj.com>", "author": "Darren Clarke <darren@redaranj.com>",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
@ -9,15 +9,17 @@
"@hapi/hapi": "^21.4.3", "@hapi/hapi": "^21.4.3",
"@hapipal/schmervice": "^3.0.0", "@hapipal/schmervice": "^3.0.0",
"@hapipal/toys": "^4.0.0", "@hapipal/toys": "^4.0.0",
"@link-stack/logger": "*", "@link-stack/bridge-common": "workspace:*",
"@link-stack/logger": "workspace:*",
"@whiskeysockets/baileys": "^6.7.20", "@whiskeysockets/baileys": "^6.7.20",
"hapi-pino": "^13.0.0", "hapi-pino": "^13.0.0",
"link-preview-js": "^3.1.0" "link-preview-js": "^3.1.0"
}, },
"devDependencies": { "devDependencies": {
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@link-stack/jest-config": "*", "@link-stack/jest-config": "workspace:*",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"@types/long": "^5",
"@types/node": "*", "@types/node": "*",
"dotenv-cli": "^10.0.0", "dotenv-cli": "^10.0.0",
"tsx": "^4.20.6", "tsx": "^4.20.6",

View file

@ -12,6 +12,11 @@ import makeWASocket, {
} from "@whiskeysockets/baileys"; } from "@whiskeysockets/baileys";
import fs from "fs"; import fs from "fs";
import { createLogger } from "@link-stack/logger"; import { createLogger } from "@link-stack/logger";
import {
getMaxAttachmentSize,
getMaxTotalAttachmentSize,
MAX_ATTACHMENTS,
} from "@link-stack/bridge-common";
const logger = createLogger("bridge-whatsapp-service"); const logger = createLogger("bridge-whatsapp-service");
@ -36,7 +41,24 @@ export default class WhatsappService extends Service {
} }
getBotDirectory(id: string): string { getBotDirectory(id: string): string {
return `${this.getBaseDirectory()}/${id}`; // Validate that ID contains only safe characters (alphanumeric, dash, underscore)
if (!/^[a-zA-Z0-9_-]+$/.test(id)) {
throw new Error(`Invalid bot ID format: ${id}`);
}
// Prevent path traversal by checking for suspicious patterns
if (id.includes('..') || id.includes('/') || id.includes('\\')) {
throw new Error(`Path traversal detected in bot ID: ${id}`);
}
const botPath = `${this.getBaseDirectory()}/${id}`;
// Ensure the resolved path is still within the base directory
if (!botPath.startsWith(this.getBaseDirectory())) {
throw new Error(`Invalid bot path: ${botPath}`);
}
return botPath;
} }
getAuthDirectory(id: string): string { getAuthDirectory(id: string): string {
@ -340,9 +362,39 @@ export default class WhatsappService extends Service {
await connection.sendMessage(recipient, { text: message }); await connection.sendMessage(recipient, { text: message });
} }
// Send attachments if provided // Send attachments if provided with size validation
if (attachments && attachments.length > 0) { if (attachments && attachments.length > 0) {
const MAX_ATTACHMENT_SIZE = getMaxAttachmentSize();
const MAX_TOTAL_SIZE = getMaxTotalAttachmentSize();
if (attachments.length > MAX_ATTACHMENTS) {
throw new Error(`Too many attachments: ${attachments.length} (max ${MAX_ATTACHMENTS})`);
}
let totalSize = 0;
for (const attachment of attachments) { for (const attachment of attachments) {
// Calculate size before converting to buffer
const estimatedSize = (attachment.data.length * 3) / 4;
if (estimatedSize > MAX_ATTACHMENT_SIZE) {
logger.warn({
filename: attachment.filename,
size: estimatedSize,
maxSize: MAX_ATTACHMENT_SIZE
}, 'Attachment exceeds size limit, skipping');
continue;
}
totalSize += estimatedSize;
if (totalSize > MAX_TOTAL_SIZE) {
logger.warn({
totalSize,
maxTotalSize: MAX_TOTAL_SIZE
}, 'Total attachment size exceeds limit, skipping remaining');
break;
}
const buffer = Buffer.from(attachment.data, "base64"); const buffer = Buffer.from(attachment.data, "base64");
if (attachment.mime_type.startsWith("image/")) { if (attachment.mime_type.startsWith("image/")) {

View file

@ -8,7 +8,7 @@
"outDir": "build/main", "outDir": "build/main",
"rootDir": "src", "rootDir": "src",
"skipLibCheck": true, "skipLibCheck": true,
"types": ["node", "long"], "types": ["node"],
"lib": ["es2020", "DOM"], "lib": ["es2020", "DOM"],
"composite": true "composite": true
}, },

View file

@ -2,26 +2,35 @@ FROM node:22-bookworm-slim AS base
FROM base AS builder FROM base AS builder
ARG APP_DIR=/opt/bridge-worker ARG APP_DIR=/opt/bridge-worker
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN mkdir -p ${APP_DIR}/ RUN mkdir -p ${APP_DIR}/
RUN npm i -g turbo RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN pnpm add -g turbo
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
COPY . . COPY . .
RUN turbo prune --scope=@link-stack/bridge-worker --docker RUN turbo prune --scope=@link-stack/bridge-worker --docker
FROM base AS installer FROM base AS installer
ARG APP_DIR=/opt/bridge-worker ARG APP_DIR=/opt/bridge-worker
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
COPY --from=builder ${APP_DIR}/out/json/ . COPY --from=builder ${APP_DIR}/out/json/ .
COPY --from=builder ${APP_DIR}/out/full/ . COPY --from=builder ${APP_DIR}/out/full/ .
COPY --from=builder ${APP_DIR}/out/package-lock.json ./package-lock.json COPY --from=builder ${APP_DIR}/out/pnpm-lock.yaml ./pnpm-lock.yaml
RUN npm ci RUN pnpm install --frozen-lockfile
RUN npm i -g turbo RUN pnpm add -g turbo
RUN turbo run build --filter=@link-stack/bridge-worker RUN turbo run build --filter=@link-stack/bridge-worker
FROM base as runner FROM base as runner
ARG BUILD_DATE ARG BUILD_DATE
ARG VERSION ARG VERSION
ARG APP_DIR=/opt/bridge-worker ARG APP_DIR=/opt/bridge-worker
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN mkdir -p ${APP_DIR}/ RUN mkdir -p ${APP_DIR}/
RUN DEBIAN_FRONTEND=noninteractive apt-get update && \ RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
apt-get install -y --no-install-recommends \ apt-get install -y --no-install-recommends \

View file

@ -88,9 +88,6 @@ Required environment variables:
### Common Tasks ### Common Tasks
- `notify-webhooks` - Send webhook notifications - `notify-webhooks` - Send webhook notifications
### Leafcutter Tasks
- `import-leafcutter` - Import data to Leafcutter
- `import-label-studio` - Import Label Studio annotations - `import-label-studio` - Import Label Studio annotations
## Architecture ## Architecture

View file

@ -2,4 +2,4 @@
set -e set -e
echo "starting bridge-worker" echo "starting bridge-worker"
exec dumb-init npm run start exec dumb-init pnpm run start

View file

@ -1,7 +1,6 @@
import type {} from "graphile-config";
import type {} from "graphile-worker"; import type {} from "graphile-worker";
const preset: GraphileConfig.Preset = { const preset: any = {
worker: { worker: {
connectionString: process.env.DATABASE_URL, connectionString: process.env.DATABASE_URL,
maxPoolSize: process.env.BRIDGE_WORKER_POOL_SIZE maxPoolSize: process.env.BRIDGE_WORKER_POOL_SIZE

View file

@ -1,11 +1,6 @@
/* eslint-disable camelcase */ /* eslint-disable camelcase */
// import { SavedVoiceProvider } from "@digiresilience/bridge-db"; // import { SavedVoiceProvider } from "@digiresilience/bridge-db";
import Twilio from "twilio"; import Twilio from "twilio";
import { CallInstance } from "twilio/lib/rest/api/v2010/account/call";
import { Zammad, getOrCreateUser } from "./zammad.js";
import { createLogger } from "@link-stack/logger";
const logger = createLogger('bridge-worker-common');
type SavedVoiceProvider = any; type SavedVoiceProvider = any;
@ -23,51 +18,3 @@ export const twilioClientFor = (
}); });
}; };
export const createZammadTicket = async (
call: CallInstance,
mp3: Buffer,
): Promise<void> => {
const title = `Call from ${call.fromFormatted} at ${call.startTime}`;
const body = `<ul>
<li>Caller: ${call.fromFormatted}</li>
<li>Service Number: ${call.toFormatted}</li>
<li>Call Duration: ${call.duration} seconds</li>
<li>Start Time: ${call.startTime}</li>
<li>End Time: ${call.endTime}</li>
</ul>
<p>See the attached recording.</p>`;
const filename = `${call.sid}-${call.startTime}.mp3`;
const zammad = Zammad(
{
token: "EviH_WL0p6YUlCoIER7noAZEAPsYA_fVU4FZCKdpq525Vmzzvl8d7dNuP_8d-Amb",
},
"https://demo.digiresilience.org",
);
try {
const customer = await getOrCreateUser(zammad, call.fromFormatted);
await zammad.ticket.create({
title,
group: "Finances",
note: "This ticket was created automaticaly from a recorded phone call.",
customer_id: customer.id,
article: {
body,
subject: title,
content_type: "text/html",
type: "note",
attachments: [
{
filename,
data: mp3.toString("base64"),
"mime-type": "audio/mpeg",
},
],
},
});
} catch (error: any) {
if (error.isBoom) {
logger.error({ output: error.output }, 'Zammad ticket creation failed');
throw new Error("Failed to create zamamd ticket");
}
}
};

View file

@ -0,0 +1,272 @@
import { createLogger } from "@link-stack/logger";
const logger = createLogger('formstack-field-mapping');
/**
* Field mapping configuration for Formstack to Zammad integration
*
* This configuration is completely flexible - you define your own internal field names
* and map them to both Formstack source fields and Zammad custom fields.
*/
export interface FieldMappingConfig {
/**
* Map internal field keys to Formstack field names
*
* Required keys (system):
* - formId: The Formstack Form ID field
* - uniqueId: The Formstack submission unique ID field
*
* Optional keys with special behavior:
* - email: Used for user lookup/creation (if provided)
* - phone: Used for user lookup/creation (if provided)
* - signalAccount: Used for Signal-based user lookup (tried first before phone)
* - name: User's full name (can be nested object with first/last, used in user creation)
* - organization: Used in ticket title template placeholder {organization}
* - typeOfSupport: Used in ticket title template placeholder {typeOfSupport}
* - descriptionOfIssue: Used as article subject (defaults to "Support Request" if not provided)
*
* All other keys are completely arbitrary and defined by your form.
*/
sourceFields: Record<string, string>;
/**
* Map Zammad custom field names to internal field keys (from sourceFields)
*
* Example:
* {
* "us_state": "state", // Zammad field "us_state" gets value from sourceFields["state"]
* "zip_code": "zipCode", // Zammad field "zip_code" gets value from sourceFields["zipCode"]
* "custom_field": "myField" // Any custom field mapping
* }
*
* The values in this object must correspond to keys in sourceFields.
*/
zammadFields: Record<string, string>;
/**
* Configuration for ticket creation
*/
ticket: {
/** Zammad group name to assign tickets to */
group: string;
/** Article type name (e.g., "note", "cdr_signal", "email") */
defaultArticleType: string;
/**
* Template for ticket title
* Supports placeholders: {name}, {organization}, {typeOfSupport}
* Placeholders reference internal field keys from sourceFields
*/
titleTemplate?: string;
};
/**
* Configuration for extracting nested field values
*/
nestedFields?: {
/**
* How to extract first/last name from a nested Name field
* Example: { firstNamePath: "first", lastNamePath: "last" }
* for a field like { "Name": { "first": "John", "last": "Doe" } }
*/
name?: {
firstNamePath?: string;
lastNamePath?: string;
};
};
}
let cachedMapping: FieldMappingConfig | null = null;
/**
* Load field mapping configuration from environment variable (REQUIRED)
*/
export function loadFieldMapping(): FieldMappingConfig {
if (cachedMapping) {
return cachedMapping;
}
const configJson = process.env.FORMSTACK_FIELD_MAPPING;
if (!configJson) {
throw new Error(
'FORMSTACK_FIELD_MAPPING environment variable is required. ' +
'Please set it to a JSON string containing your field mapping configuration.'
);
}
logger.info('Loading Formstack field mapping from environment variable');
try {
const config = JSON.parse(configJson) as FieldMappingConfig;
// Validate required sections exist
if (!config.sourceFields || typeof config.sourceFields !== 'object') {
throw new Error('Invalid field mapping configuration: sourceFields must be an object');
}
if (!config.zammadFields || typeof config.zammadFields !== 'object') {
throw new Error('Invalid field mapping configuration: zammadFields must be an object');
}
if (!config.ticket || typeof config.ticket !== 'object') {
throw new Error('Invalid field mapping configuration: ticket must be an object');
}
// Validate required ticket fields
if (!config.ticket.group) {
throw new Error('Invalid field mapping configuration: ticket.group is required');
}
if (!config.ticket.defaultArticleType) {
throw new Error('Invalid field mapping configuration: ticket.defaultArticleType is required');
}
// Validate required source fields
const systemRequiredFields = ['formId', 'uniqueId'];
for (const field of systemRequiredFields) {
if (!config.sourceFields[field]) {
throw new Error(`Invalid field mapping configuration: sourceFields.${field} is required (system field)`);
}
}
// Validate zammadFields reference valid sourceFields
for (const [zammadField, sourceKey] of Object.entries(config.zammadFields)) {
if (!config.sourceFields[sourceKey]) {
logger.warn(
{ zammadField, sourceKey },
'Zammad field maps to non-existent source field key'
);
}
}
logger.info('Successfully loaded Formstack field mapping configuration');
cachedMapping = config;
return cachedMapping;
} catch (error) {
logger.error({
error: error instanceof Error ? error.message : error,
jsonLength: configJson.length
}, 'Failed to parse field mapping configuration');
throw new Error(
`Failed to parse Formstack field mapping JSON: ${error instanceof Error ? error.message : error}`
);
}
}
/**
* Get a field value from formData using the source field name mapping
*/
export function getFieldValue(
formData: any,
internalFieldKey: string,
mapping?: FieldMappingConfig
): any {
const config = mapping || loadFieldMapping();
const sourceFieldName = config.sourceFields[internalFieldKey];
if (!sourceFieldName) {
return undefined;
}
return formData[sourceFieldName];
}
/**
* Get a nested field value (e.g., Name.first)
*/
export function getNestedFieldValue(
fieldValue: any,
path: string | undefined
): any {
if (!path || !fieldValue) {
return undefined;
}
const parts = path.split('.');
let current = fieldValue;
for (const part of parts) {
if (current && typeof current === 'object') {
current = current[part];
} else {
return undefined;
}
}
return current;
}
/**
* Format field value (handle arrays, objects, etc.)
*/
export function formatFieldValue(value: any): string | undefined {
if (value === null || value === undefined || value === '') {
return undefined;
}
if (Array.isArray(value)) {
return value.join(', ');
}
if (typeof value === 'object') {
return JSON.stringify(value);
}
return String(value);
}
/**
* Build ticket title from template and data
* Replaces placeholders like {name}, {organization}, {typeOfSupport} with provided values
*/
export function buildTicketTitle(
mapping: FieldMappingConfig,
data: Record<string, string | undefined>
): string {
const template = mapping.ticket.titleTemplate || '{name}';
let title = template;
// Replace all placeholders in the template
for (const [key, value] of Object.entries(data)) {
const placeholder = `{${key}}`;
if (title.includes(placeholder)) {
if (value) {
title = title.replace(placeholder, value);
} else {
// Remove empty placeholder and surrounding separators
title = title.replace(` - ${placeholder}`, '').replace(`${placeholder} - `, '').replace(placeholder, '');
}
}
}
return title.trim();
}
/**
* Get all Zammad field values from form data using the mapping
* Returns an object with Zammad field names as keys and formatted values
*/
export function getZammadFieldValues(
formData: any,
mapping?: FieldMappingConfig
): Record<string, string> {
const config = mapping || loadFieldMapping();
const result: Record<string, string> = {};
for (const [zammadFieldName, sourceKey] of Object.entries(config.zammadFields)) {
const value = getFieldValue(formData, sourceKey, config);
const formatted = formatFieldValue(value);
if (formatted !== undefined) {
result[zammadFieldName] = formatted;
}
}
return result;
}
/**
* Reset cached mapping (useful for testing)
*/
export function resetMappingCache(): void {
cachedMapping = null;
}

View file

@ -41,7 +41,7 @@ const formatAuth = (credentials: any) => {
return ( return (
"Basic " + "Basic " +
Buffer.from(`${credentials.username}:${credentials.password}`).toString( Buffer.from(`${credentials.username}:${credentials.password}`).toString(
"base64" "base64",
) )
); );
} }
@ -56,7 +56,7 @@ const formatAuth = (credentials: any) => {
export const Zammad = ( export const Zammad = (
credentials: ZammadCredentials, credentials: ZammadCredentials,
host: string, host: string,
opts?: ZammadClientOpts opts?: ZammadClientOpts,
): ZammadClient => { ): ZammadClient => {
const extraHeaders = (opts && opts.headers) || {}; const extraHeaders = (opts && opts.headers) || {};
@ -76,7 +76,9 @@ export const Zammad = (
return result as Ticket; return result as Ticket;
}, },
update: async (id, payload) => { update: async (id, payload) => {
const { payload: result } = await wreck.put(`tickets/${id}`, { payload }); const { payload: result } = await wreck.put(`tickets/${id}`, {
payload,
});
return result as Ticket; return result as Ticket;
}, },
}, },
@ -99,18 +101,30 @@ export const Zammad = (
}; };
export const getUser = async (zammad: ZammadClient, phoneNumber: string) => { export const getUser = async (zammad: ZammadClient, phoneNumber: string) => {
const mungedNumber = phoneNumber.replace("+", ""); // Sanitize phone number: only allow digits and + symbol
const results = await zammad.user.search(`phone:${mungedNumber}`); const mungedNumber = phoneNumber.replace(/[^\d+]/g, "");
// Validate phone number format (10-15 digits, optional + prefix)
if (!/^\+?\d{10,15}$/.test(mungedNumber)) {
throw new Error(`Invalid phone number format: ${phoneNumber}`);
}
// Remove + for search query
const searchNumber = mungedNumber.replace("+", "");
const results = await zammad.user.search(`phone:${searchNumber}`);
if (results.length > 0) return results[0]; if (results.length > 0) return results[0];
return undefined; return undefined;
}; };
export const getOrCreateUser = async (zammad: ZammadClient, phoneNumber: string) => { export const getOrCreateUser = async (
zammad: ZammadClient,
phoneNumber: string,
) => {
const customer = await getUser(zammad, phoneNumber); const customer = await getUser(zammad, phoneNumber);
if (customer) return customer; if (customer) return customer;
return zammad.user.create({ return zammad.user.create({
phone: phoneNumber, phone: phoneNumber,
note: "User created by Grabadora from incoming voice call", note: "User created from incoming voice call",
}); });
}; };

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/bridge-worker", "name": "@link-stack/bridge-worker",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"type": "module", "type": "module",
"main": "build/main/index.js", "main": "build/main/index.js",
"author": "Darren Clarke <darren@redaranj.com>", "author": "Darren Clarke <darren@redaranj.com>",
@ -12,9 +12,9 @@
}, },
"dependencies": { "dependencies": {
"@hapi/wreck": "^18.1.0", "@hapi/wreck": "^18.1.0",
"@link-stack/bridge-common": "*", "@link-stack/bridge-common": "workspace:*",
"@link-stack/logger": "*", "@link-stack/logger": "workspace:*",
"@link-stack/signal-api": "*", "@link-stack/signal-api": "workspace:*",
"fluent-ffmpeg": "^2.1.3", "fluent-ffmpeg": "^2.1.3",
"graphile-worker": "^0.16.6", "graphile-worker": "^0.16.6",
"remeda": "^2.32.0", "remeda": "^2.32.0",
@ -23,8 +23,8 @@
"devDependencies": { "devDependencies": {
"@types/fluent-ffmpeg": "^2.1.27", "@types/fluent-ffmpeg": "^2.1.27",
"dotenv-cli": "^10.0.0", "dotenv-cli": "^10.0.0",
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"typescript": "^5.9.3" "typescript": "^5.9.3"
} }
} }

View file

@ -1,5 +1,14 @@
import { createLogger } from "@link-stack/logger"; import { createLogger } from "@link-stack/logger";
import { Zammad, getUser } from "../../lib/zammad.js"; import { Zammad, getUser } from "../../lib/zammad.js";
import {
loadFieldMapping,
getFieldValue,
getNestedFieldValue,
formatFieldValue,
buildTicketTitle,
getZammadFieldValues,
type FieldMappingConfig,
} from "../../lib/formstack-field-mapping.js";
const logger = createLogger('create-ticket-from-form'); const logger = createLogger('create-ticket-from-form');
@ -13,63 +22,73 @@ const createTicketFromFormTask = async (
): Promise<void> => { ): Promise<void> => {
const { formData, receivedAt } = options; const { formData, receivedAt } = options;
// Load field mapping configuration
const mapping = loadFieldMapping();
// Log only non-PII metadata using configured field names
const formId = getFieldValue(formData, 'formId', mapping);
const uniqueId = getFieldValue(formData, 'uniqueId', mapping);
logger.info({ logger.info({
formData, formId,
uniqueId,
receivedAt, receivedAt,
formDataKeys: Object.keys(formData), fieldCount: Object.keys(formData).length
}, 'Processing Formstack form submission'); }, 'Processing Formstack form submission');
// Extract data from Formstack payload - matching Python ngo-isac-uploader field names // Extract fields using dynamic mapping
const { const nameField = getFieldValue(formData, 'name', mapping);
FormID, const firstName = mapping.nestedFields?.name?.firstNamePath
UniqueID, ? getNestedFieldValue(nameField, mapping.nestedFields.name.firstNamePath) || ''
Name, : '';
Email, const lastName = mapping.nestedFields?.name?.lastNamePath
Phone, ? getNestedFieldValue(nameField, mapping.nestedFields.name.lastNamePath) || ''
'Signal Account': signalAccount, : '';
City,
State,
'Zip Code': zipCode,
'What organization are you affiliated with and/or employed by (if applicable)?': organization,
'What type of support do you wish to receive (to the extent you know)?': typeOfSupport,
'Is there a specific deadline associated with this request (e.g., a legal or legislative deadline)?': specificDeadline,
'Please provide the deadline': deadline,
'Do you have an insurance provider that provides coverage for the types of services you seek (e.g., public official, professional liability insurance, litigation insurance)?': hasInsuranceProvider,
'Have you approached the insurance provider for assistance?': approachedProvider,
'Are you seeking help on behalf of an individual or an organization?': typeOfUser,
'What is the structure of the organization?': orgStructure,
'Are you currently a candidate for elected office, a government officeholder, or a government employee?': governmentAffiliated,
'Where did you hear about the Democracy Protection Network?': whereHeard,
'Do you or the organization work on behalf of any of the following communities or issues? Please select all that apply.': relatedIssues,
'Do you or the organization engage in any of the following types of work? Please select all that apply.': typeOfWork,
'Why are you seeking support? Please briefly describe the circumstances that have brought you to the DPN, including, as applicable, dates, places, and the people or entities involved. We coordinate crisis-response services and some resilience-building services (e.g., assistance establishing good-governance or security practices). If you are seeking resilience-building services, please note that in the text box below.': descriptionOfIssue,
'What is your preferred communication method?': preferredContactMethod,
} = formData;
// Build full name - matching Python pattern
const firstName = Name?.first || '';
const lastName = Name?.last || '';
const fullName = (firstName && lastName) const fullName = (firstName && lastName)
? `${firstName} ${lastName}`.trim() ? `${firstName} ${lastName}`.trim()
: firstName || lastName || 'Unknown'; : firstName || lastName || 'Unknown';
// Build ticket title - exactly matching Python ngo-isac-uploader pattern // Extract well-known fields used for special logic (all optional)
// Pattern: [Name] - [Organization] - [Type of support] const email = getFieldValue(formData, 'email', mapping);
let title = fullName; const phone = getFieldValue(formData, 'phone', mapping);
if (organization) { const signalAccount = getFieldValue(formData, 'signalAccount', mapping);
title += ` - ${organization}`; const organization = getFieldValue(formData, 'organization', mapping);
} const typeOfSupport = getFieldValue(formData, 'typeOfSupport', mapping);
if (typeOfSupport) { const descriptionOfIssue = getFieldValue(formData, 'descriptionOfIssue', mapping);
// Handle array format (Formstack sends arrays for multi-select)
const supportText = Array.isArray(typeOfSupport) ? typeOfSupport.join(', ') : typeOfSupport; // Validate that at least one contact method is provided
title += ` - ${supportText}`; if (!email && !phone && !signalAccount) {
logger.error({ formId, uniqueId }, 'No contact information provided - at least one of email, phone, or signalAccount is required');
throw new Error('At least one contact method (email, phone, or signalAccount) is required for ticket creation');
} }
// Build article body - format all fields as HTML like Python does // Build ticket title using configured template
// Pass all potentially used fields - the template determines which are actually used
const title = buildTicketTitle(mapping, {
name: fullName,
organization: formatFieldValue(organization),
typeOfSupport: formatFieldValue(typeOfSupport),
});
// Build article body - format all fields as HTML
const formatAllFields = (data: any): string => { const formatAllFields = (data: any): string => {
let html = ''; let html = '';
// Add formatted name field first if we have it
if (fullName && fullName !== 'Unknown') {
html += `<strong>Name:</strong><br>${fullName}<br>`;
}
for (const [key, value] of Object.entries(data)) { for (const [key, value] of Object.entries(data)) {
if (key === 'HandshakeKey' || key === 'FormID' || key === 'UniqueID') continue; // Skip metadata fields and name field (we already formatted it above)
const skipFields = [
mapping.sourceFields.formId,
mapping.sourceFields.uniqueId,
mapping.sourceFields.name, // Skip raw name field
'HandshakeKey',
].filter(Boolean);
if (skipFields.includes(key)) continue;
if (value === null || value === undefined || value === '') continue; if (value === null || value === undefined || value === '') continue;
const displayValue = Array.isArray(value) ? value.join(', ') : const displayValue = Array.isArray(value) ? value.join(', ') :
@ -93,130 +112,143 @@ const createTicketFromFormTask = async (
const zammad = Zammad({ token: zammadToken }, zammadUrl); const zammad = Zammad({ token: zammadToken }, zammadUrl);
try { try {
// Look up the article type ID for cdr_signal // Look up the configured article type
let cdrSignalTypeId: number | undefined; let articleTypeId: number | undefined;
try { try {
const articleTypes = await zammad.get('ticket_article_types'); const articleTypes = await zammad.get('ticket_article_types');
const cdrSignalType = articleTypes.find((t: any) => t.name === 'cdr_signal'); const configuredType = articleTypes.find((t: any) => t.name === mapping.ticket.defaultArticleType);
cdrSignalTypeId = cdrSignalType?.id; articleTypeId = configuredType?.id;
if (cdrSignalTypeId) { if (articleTypeId) {
logger.info({ cdrSignalTypeId }, 'Found cdr_signal article type'); logger.info({ articleTypeId, typeName: mapping.ticket.defaultArticleType }, 'Found configured article type');
} else { } else {
logger.warn('cdr_signal article type not found, ticket will use default type'); logger.warn({ typeName: mapping.ticket.defaultArticleType }, 'Configured article type not found, ticket will use default type');
} }
} catch (error: any) { } catch (error: any) {
logger.warn({ error: error.message }, 'Failed to look up cdr_signal article type'); logger.warn({ error: error.message }, 'Failed to look up article type');
} }
// Determine contact method and phone number - matching Python logic // Get or create user
// Priority: Signal > SMS/Phone > Email // Try to find existing user by: signalAccount -> phone -> email
const useSignal = preferredContactMethod?.includes('Signal') || preferredContactMethod?.includes('ignal');
const useSMS = preferredContactMethod?.includes('SMS');
const phoneNumber = useSignal ? signalAccount : (useSMS || Phone) ? Phone : '';
// Get or create user - matching Python pattern
let customer; let customer;
if (phoneNumber) { // Try Signal account first if provided
// Try to find by phone (Signal or regular) if (signalAccount) {
customer = await getUser(zammad, phoneNumber); customer = await getUser(zammad, signalAccount);
if (customer) {
logger.info({ customerId: customer.id, method: 'signal' }, 'Found existing user by Signal account');
}
}
// Fall back to phone if no customer found yet
if (!customer && phone) {
customer = await getUser(zammad, phone);
if (customer) { if (customer) {
logger.info({ customerId: customer.id, method: 'phone' }, 'Found existing user by phone'); logger.info({ customerId: customer.id, method: 'phone' }, 'Found existing user by phone');
} }
} }
if (!customer && Email) { // Fall back to email if no customer found yet
// Search by email if phone search didn't work if (!customer && email) {
const emailResults = await zammad.user.search(`email:${Email}`); // Validate email format before using in search
if (emailResults.length > 0) { const emailRegex = /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$/;
customer = emailResults[0]; if (emailRegex.test(email)) {
logger.info({ customerId: customer.id, method: 'email' }, 'Found existing user by email'); const emailResults = await zammad.user.search(`email:${email}`);
if (emailResults.length > 0) {
customer = emailResults[0];
logger.info({ customerId: customer.id, method: 'email' }, 'Found existing user by email');
}
} else {
logger.warn({ email }, 'Invalid email format provided, skipping email search');
} }
} }
if (!customer) { if (!customer) {
// Create new user - matching Python user creation pattern // Create new user
logger.info('Creating new user from form submission'); logger.info('Creating new user from form submission');
customer = await zammad.user.create({
// Build user data with whatever contact info we have
const userData: any = {
firstname: firstName, firstname: firstName,
lastname: lastName, lastname: lastName,
email: Email || `${UniqueID}@formstack.local`,
phone: phoneNumber || '',
roles: ['Customer'], roles: ['Customer'],
}); };
// Add contact info only if provided
if (email) {
userData.email = email;
}
const userPhone = signalAccount || phone;
if (userPhone) {
userData.phone = userPhone;
}
customer = await zammad.user.create(userData);
} }
logger.info({ logger.info({
customerId: customer.id, customerId: customer.id,
customerEmail: customer.email, email: customer.email,
customerPhone: customer.phone, }, 'Using customer for ticket');
}, 'Customer identified/created');
// Helper function to format field values (handle arrays and null values) // Look up the configured group
const formatFieldValue = (value: any): string | undefined => { const groups = await zammad.get('groups');
if (value === null || value === undefined || value === '') return undefined; const targetGroup = groups.find((g: any) => g.name === mapping.ticket.group);
if (Array.isArray(value)) return value.join(', ');
if (typeof value === 'object') return JSON.stringify(value); if (!targetGroup) {
return String(value); logger.error({ groupName: mapping.ticket.group }, 'Configured group not found');
throw new Error(`Zammad group "${mapping.ticket.group}" not found`);
}
logger.info({ groupId: targetGroup.id, groupName: targetGroup.name }, 'Using configured group');
// Build custom fields using Zammad field mapping
// This dynamically maps all configured fields without hardcoding
const customFields = getZammadFieldValues(formData, mapping);
// Create the ticket
const articleData: any = {
subject: descriptionOfIssue || 'Support Request',
body,
content_type: 'text/html',
internal: false,
}; };
// Create the ticket with custom fields - EXACTLY matching Python ngo-isac-uploader field names if (articleTypeId) {
const ticketData: any = { articleData.type_id = articleTypeId;
}
const ticketData = {
title, title,
group: "Imports", // Matching Python - uses "Imports" group group_id: targetGroup.id,
customer_id: customer.id, customer_id: customer.id,
article: articleData,
// Custom fields - matching Python field names EXACTLY ...customFields,
us_state: formatFieldValue(State),
zip_code: formatFieldValue(zipCode),
city: formatFieldValue(City),
type_of_support: formatFieldValue(typeOfSupport),
specific_deadline: formatFieldValue(specificDeadline),
deadline: formatFieldValue(deadline),
has_insurance_provider: formatFieldValue(hasInsuranceProvider),
approached_provider: formatFieldValue(approachedProvider),
type_of_user: formatFieldValue(typeOfUser),
org_structure: formatFieldValue(orgStructure),
government_affiliated: formatFieldValue(governmentAffiliated),
where_heard: formatFieldValue(whereHeard),
related_issues: formatFieldValue(relatedIssues),
type_of_work: formatFieldValue(typeOfWork),
// Article with all formatted fields
article: {
body,
subject: title,
content_type: "text/html",
type: useSignal ? "cdr_signal" : "note",
from: phoneNumber || Email || 'unknown',
sender: "Customer",
},
}; };
logger.info({
title,
groupId: targetGroup.id,
customerId: customer.id,
hasArticleType: !!articleTypeId,
customFieldCount: Object.keys(customFields).length,
}, 'Creating ticket');
const ticket = await zammad.ticket.create(ticketData); const ticket = await zammad.ticket.create(ticketData);
// Update the ticket with the cdr_signal article type
// This must be done after creation as Zammad doesn't allow setting this field during creation
if (cdrSignalTypeId) {
await zammad.ticket.update(ticket.id, { create_article_type_id: cdrSignalTypeId });
logger.info({ ticketId: ticket.id, cdrSignalTypeId }, 'Updated ticket with cdr_signal article type');
}
logger.info({ logger.info({
ticketId: ticket.id, ticketId: ticket.id,
customerId: customer.id, ticketNumber: ticket.id,
formId: FormID, title,
submissionId: UniqueID, }, 'Successfully created ticket from Formstack submission');
}, 'Zammad ticket created successfully');
} catch (error: any) { } catch (error: any) {
logger.error({ logger.error({
error: error.message, error: error.message,
stack: error.stack, stack: error.stack,
output: error.output, formId,
formId: FormID, uniqueId,
submissionId: UniqueID, }, 'Failed to create ticket from Formstack submission');
}, 'Failed to create Zammad ticket');
throw error; throw error;
} }
}; };

View file

@ -1,4 +1,11 @@
import { db, getWorkerUtils } from "@link-stack/bridge-common"; import {
db,
getWorkerUtils,
getMaxAttachmentSize,
getMaxTotalAttachmentSize,
MAX_ATTACHMENTS,
buildSignalGroupName,
} from "@link-stack/bridge-common";
import { createLogger } from "@link-stack/logger"; import { createLogger } from "@link-stack/logger";
import * as signalApi from "@link-stack/signal-api"; import * as signalApi from "@link-stack/signal-api";
const { Configuration, MessagesApi, GroupsApi } = signalApi; const { Configuration, MessagesApi, GroupsApi } = signalApi;
@ -81,7 +88,7 @@ const sendSignalMessageTask = async ({
// If sending to a phone number and auto-groups is enabled, create a group first // If sending to a phone number and auto-groups is enabled, create a group first
if (enableAutoGroups && !isGroupId && to && conversationId) { if (enableAutoGroups && !isGroupId && to && conversationId) {
try { try {
const groupName = `DPN Support Request: ${conversationId}`; const groupName = buildSignalGroupName(conversationId);
const createGroupResponse = await groupsClient.v1GroupsNumberPost({ const createGroupResponse = await groupsClient.v1GroupsNumberPost({
number: bot.phoneNumber, number: bot.phoneNumber,
data: { data: {
@ -204,16 +211,54 @@ const sendSignalMessageTask = async ({
); );
} }
// Add attachments if provided // Add attachments if provided with size validation
if (attachments && attachments.length > 0) { if (attachments && attachments.length > 0) {
messageData.base64Attachments = attachments.map((att) => att.data); const MAX_ATTACHMENT_SIZE = getMaxAttachmentSize();
logger.debug( const MAX_TOTAL_SIZE = getMaxTotalAttachmentSize();
{
attachmentCount: attachments.length, if (attachments.length > MAX_ATTACHMENTS) {
attachmentNames: attachments.map((att) => att.filename), throw new Error(`Too many attachments: ${attachments.length} (max ${MAX_ATTACHMENTS})`);
}, }
"Including attachments in message",
); let totalSize = 0;
const validatedAttachments = [];
for (const attachment of attachments) {
// Calculate size from base64 string (rough estimate: length * 3/4)
const estimatedSize = (attachment.data.length * 3) / 4;
if (estimatedSize > MAX_ATTACHMENT_SIZE) {
logger.warn({
filename: attachment.filename,
size: estimatedSize,
maxSize: MAX_ATTACHMENT_SIZE
}, 'Attachment exceeds size limit, skipping');
continue;
}
totalSize += estimatedSize;
if (totalSize > MAX_TOTAL_SIZE) {
logger.warn({
totalSize,
maxTotalSize: MAX_TOTAL_SIZE
}, 'Total attachment size exceeds limit, skipping remaining');
break;
}
validatedAttachments.push(attachment.data);
}
if (validatedAttachments.length > 0) {
messageData.base64Attachments = validatedAttachments;
logger.debug(
{
attachmentCount: validatedAttachments.length,
attachmentNames: attachments.slice(0, validatedAttachments.length).map((att) => att.filename),
totalSizeBytes: totalSize
},
"Including attachments in message",
);
}
} }
const response = await messagesClient.v2SendPost({ const response = await messagesClient.v2SendPost({

View file

@ -2,22 +2,28 @@ FROM node:22-bookworm-slim AS base
FROM base AS builder FROM base AS builder
ARG APP_DIR=/opt/link ARG APP_DIR=/opt/link
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN mkdir -p ${APP_DIR}/ RUN mkdir -p ${APP_DIR}/
RUN npm i -g turbo RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN pnpm add -g turbo
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
COPY . . COPY . .
RUN turbo prune --scope=@link-stack/link --scope=@link-stack/bridge-migrations --docker RUN turbo prune --scope=@link-stack/link --scope=@link-stack/bridge-migrations --docker
FROM base AS installer FROM base AS installer
ARG APP_DIR=/opt/link ARG APP_DIR=/opt/link
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
COPY --from=builder ${APP_DIR}/.gitignore .gitignore COPY --from=builder ${APP_DIR}/.gitignore .gitignore
COPY --from=builder ${APP_DIR}/out/json/ . COPY --from=builder ${APP_DIR}/out/json/ .
COPY --from=builder ${APP_DIR}/out/package-lock.json ./package-lock.json COPY --from=builder ${APP_DIR}/out/pnpm-lock.yaml ./pnpm-lock.yaml
RUN npm ci RUN pnpm install --frozen-lockfile
COPY --from=builder ${APP_DIR}/out/full/ . COPY --from=builder ${APP_DIR}/out/full/ .
RUN npm i -g turbo RUN pnpm add -g turbo
ENV ZAMMAD_URL http://zammad-nginx:8080 ENV ZAMMAD_URL http://zammad-nginx:8080
RUN turbo run build --filter=@link-stack/link --filter=@link-stack/bridge-migrations RUN turbo run build --filter=@link-stack/link --filter=@link-stack/bridge-migrations
@ -30,6 +36,9 @@ LABEL maintainer="Darren Clarke <darren@redaranj.com>"
LABEL org.label-schema.build-date=$BUILD_DATE LABEL org.label-schema.build-date=$BUILD_DATE
LABEL org.label-schema.version=$VERSION LABEL org.label-schema.version=$VERSION
ENV APP_DIR ${APP_DIR} ENV APP_DIR ${APP_DIR}
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN DEBIAN_FRONTEND=noninteractive apt-get update && \ RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
apt-get install -y --no-install-recommends \ apt-get install -y --no-install-recommends \
dumb-init dumb-init

View file

@ -4,13 +4,12 @@ The main CDR (Center for Digital Resilience) Link application - a streamlined he
## Overview ## Overview
CDR Link provides a unified dashboard for managing support tickets, communication channels, and data analytics. It integrates multiple services including Zammad (ticketing), Bridge (multi-channel messaging), Leafcutter (data visualization), and OpenSearch. CDR Link provides a unified dashboard for managing support tickets, communication channels, and data analytics. It integrates multiple services including Zammad (ticketing), Bridge (multi-channel messaging), and OpenSearch.
## Features ## Features
- **Simplified Helpdesk Interface**: Streamlined UI for Zammad ticket management - **Simplified Helpdesk Interface**: Streamlined UI for Zammad ticket management
- **Multi-Channel Communication**: Integration with Signal, WhatsApp, Facebook, and Voice channels - **Multi-Channel Communication**: Integration with Signal, WhatsApp, Facebook, and Voice channels
- **Data Visualization**: Embedded Leafcutter analytics and reporting
- **User Management**: Role-based access control with Google OAuth - **User Management**: Role-based access control with Google OAuth
- **Search**: Integrated OpenSearch for advanced queries - **Search**: Integrated OpenSearch for advanced queries
- **Label Studio Integration**: For data annotation workflows - **Label Studio Integration**: For data annotation workflows
@ -69,7 +68,6 @@ Key environment variables required:
- `/overview/[overview]` - Ticket overview pages - `/overview/[overview]` - Ticket overview pages
- `/tickets/[id]` - Individual ticket view/edit - `/tickets/[id]` - Individual ticket view/edit
- `/admin/bridge` - Bridge configuration management - `/admin/bridge` - Bridge configuration management
- `/leafcutter` - Data visualization dashboard
- `/opensearch` - Search dashboard - `/opensearch` - Search dashboard
- `/zammad` - Direct Zammad access - `/zammad` - Direct Zammad access
- `/profile` - User profile management - `/profile` - User profile management
@ -104,6 +102,5 @@ docker-compose -f docker/compose/link.yml up
- **Zammad**: GraphQL queries for ticket data - **Zammad**: GraphQL queries for ticket data
- **Bridge Services**: REST APIs for channel management - **Bridge Services**: REST APIs for channel management
- **Leafcutter**: Embedded iframe integration
- **OpenSearch**: Direct dashboard embedding - **OpenSearch**: Direct dashboard embedding
- **Redis**: Session and cache storage - **Redis**: Session and cache storage

View file

@ -7,13 +7,11 @@ import { SetupModeWarning } from "./SetupModeWarning";
interface InternalLayoutProps extends PropsWithChildren { interface InternalLayoutProps extends PropsWithChildren {
setupModeActive: boolean; setupModeActive: boolean;
leafcutterEnabled: boolean;
} }
export const InternalLayout: FC<InternalLayoutProps> = ({ export const InternalLayout: FC<InternalLayoutProps> = ({
children, children,
setupModeActive, setupModeActive,
leafcutterEnabled,
}) => { }) => {
const [open, setOpen] = useState(true); const [open, setOpen] = useState(true);
@ -24,7 +22,6 @@ export const InternalLayout: FC<InternalLayoutProps> = ({
<Sidebar <Sidebar
open={open} open={open}
setOpen={setOpen} setOpen={setOpen}
leafcutterEnabled={leafcutterEnabled}
/> />
<Grid <Grid
item item

View file

@ -176,13 +176,11 @@ const MenuItem = ({
interface SidebarProps { interface SidebarProps {
open: boolean; open: boolean;
setOpen: (open: boolean) => void; setOpen: (open: boolean) => void;
leafcutterEnabled?: boolean;
} }
export const Sidebar: FC<SidebarProps> = ({ export const Sidebar: FC<SidebarProps> = ({
open, open,
setOpen, setOpen,
leafcutterEnabled = false,
}) => { }) => {
const pathname = usePathname(); const pathname = usePathname();
const { data: session } = useSession(); const { data: session } = useSession();
@ -372,11 +370,11 @@ export const Sidebar: FC<SidebarProps> = ({
}} }}
> >
<MenuItem <MenuItem
name="Dashboards" name="Dashboard"
href="/dashboards" href="/"
Icon={InsightsIcon} Icon={InsightsIcon}
iconSize={20} iconSize={20}
selected={pathname.startsWith("/dashboards")} selected={pathname === "/"}
open={open} open={open}
/> />
<MenuItem <MenuItem

View file

@ -4,6 +4,8 @@ import { FC } from "react";
import { Grid } from "@mui/material"; import { Grid } from "@mui/material";
import Iframe from "react-iframe"; import Iframe from "react-iframe";
const docsUrl = "https://digiresilience.org/docs/link/about/";
export const DocsWrapper: FC = () => ( export const DocsWrapper: FC = () => (
<Grid <Grid
container container
@ -17,7 +19,7 @@ export const DocsWrapper: FC = () => (
> >
<Iframe <Iframe
id="docs" id="docs"
url={"https://digiresilience.org/docs/link/about/"} url={docsUrl}
width="100%" width="100%"
height="100%" height="100%"
frameBorder={0} frameBorder={0}

View file

@ -12,12 +12,10 @@ type LayoutProps = {
export default function Layout({ children }: LayoutProps) { export default function Layout({ children }: LayoutProps) {
const setupModeActive = process.env.SETUP_MODE === "true"; const setupModeActive = process.env.SETUP_MODE === "true";
const leafcutterEnabled = process.env.LEAFCUTTER_ENABLED === "true";
return ( return (
<InternalLayout <InternalLayout
setupModeActive={setupModeActive} setupModeActive={setupModeActive}
leafcutterEnabled={leafcutterEnabled}
> >
{children} {children}
</InternalLayout> </InternalLayout>

View file

@ -2,7 +2,7 @@ import { Metadata } from "next";
import { DefaultDashboard } from "./_components/DefaultDashboard"; import { DefaultDashboard } from "./_components/DefaultDashboard";
export const metadata: Metadata = { export const metadata: Metadata = {
title: "CDR Link - Home", title: "CDR Link - Dashboard",
}; };
export default async function Page() { export default async function Page() {

View file

@ -10,7 +10,6 @@ import {
import Google from "next-auth/providers/google"; import Google from "next-auth/providers/google";
import Credentials from "next-auth/providers/credentials"; import Credentials from "next-auth/providers/credentials";
import Apple from "next-auth/providers/apple"; import Apple from "next-auth/providers/apple";
import { Redis } from "ioredis";
import AzureADProvider from "next-auth/providers/azure-ad"; import AzureADProvider from "next-auth/providers/azure-ad";
import { createLogger } from "@link-stack/logger"; import { createLogger } from "@link-stack/logger";
@ -30,7 +29,7 @@ const fetchRoles = async () => {
}; };
const fetchUser = async (email: string) => { const fetchUser = async (email: string) => {
const url = `${process.env.ZAMMAD_URL}/api/v1/users/search?query=login:${email}&limit=1`; const url = `${process.env.ZAMMAD_URL}/api/v1/users/search?query=${encodeURIComponent(`login:${email}`)}&limit=1`;
const res = await fetch(url, { headers }); const res = await fetch(url, { headers });
const users = await res.json(); const users = await res.json();
const user = users?.[0]; const user = users?.[0];
@ -124,9 +123,9 @@ if (process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET) {
export const authOptions: NextAuthOptions = { export const authOptions: NextAuthOptions = {
pages: { pages: {
signIn: "/link/login", signIn: "/login",
error: "/link/login", error: "/login",
signOut: "/link/logout", signOut: "/logout",
}, },
providers, providers,
session: { session: {
@ -139,11 +138,6 @@ export const authOptions: NextAuthOptions = {
return roles.includes("admin") || roles.includes("agent"); return roles.includes("admin") || roles.includes("agent");
}, },
session: async ({ session, token }) => { session: async ({ session, token }) => {
// const redis = new Redis(process.env.REDIS_URL);
// const isInvalidated = await redis.get(`invalidated:${token.sub}`);
// if (isInvalidated) {
// return null;
// }
// @ts-ignore // @ts-ignore
session.user.roles = token.roles ?? []; session.user.roles = token.roles ?? [];
// @ts-ignore // @ts-ignore

View file

@ -1,40 +0,0 @@
import { createLogger } from "@link-stack/logger";
const logger = createLogger('link-utils');
export const fetchLeafcutter = async (url: string, options: any) => {
/*
const headers = {
'X-Opensearch-Username': process.env.OPENSEARCH_USER!,
'X-Opensearch-Password': process.env.OPENSEARCH_PASSWORD!,
'X-Leafcutter-User': token.email.toLowerCase()
};
*/
const fetchData = async (url: string, options: any) => {
try {
const res = await fetch(url, options);
const json = await res.json();
return json;
} catch (error) {
logger.error({ error }, "Error occurred");
return null;
}
};
const data = await fetchData(url, options);
if (!data) {
const csrfURL = `${process.env.NEXT_PUBLIC_LEAFCUTTER_URL}/api/auth/csrf`;
const csrfData = await fetchData(csrfURL, {});
const authURL = `${process.env.NEXT_PUBLIC_LEAFCUTTER_URL}/api/auth/callback/credentials`;
const authData = await fetchData(authURL, { method: "POST" });
if (!authData) {
return null;
} else {
return await fetchData(url, options);
}
} else {
return data;
}
};

View file

@ -1 +1,4 @@
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
export { receiveMessage as POST } from "@link-stack/bridge-ui"; export { receiveMessage as POST } from "@link-stack/bridge-ui";

View file

@ -1 +1,4 @@
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
export { relinkBot as POST } from "@link-stack/bridge-ui"; export { relinkBot as POST } from "@link-stack/bridge-ui";

View file

@ -1 +1,4 @@
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
export { getBot as GET } from "@link-stack/bridge-ui"; export { getBot as GET } from "@link-stack/bridge-ui";

View file

@ -1 +1,4 @@
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
export { sendMessage as POST } from "@link-stack/bridge-ui"; export { sendMessage as POST } from "@link-stack/bridge-ui";

View file

@ -1,3 +1,6 @@
import { handleWebhook } from "@link-stack/bridge-ui"; import { handleWebhook } from "@link-stack/bridge-ui";
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
export { handleWebhook as GET, handleWebhook as POST }; export { handleWebhook as GET, handleWebhook as POST };

View file

@ -1,6 +1,9 @@
import NextAuth from "next-auth"; import NextAuth from "next-auth";
import { authOptions } from "@/app/_lib/authentication"; import { authOptions } from "@/app/_lib/authentication";
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
const handler = NextAuth(authOptions); const handler = NextAuth(authOptions);
export { handler as GET, handler as POST }; export { handler as GET, handler as POST };

View file

@ -1,11 +1,17 @@
import { NextRequest, NextResponse } from "next/server"; import { NextRequest, NextResponse } from "next/server";
import { createLogger } from "@link-stack/logger"; import { createLogger } from "@link-stack/logger";
import { getWorkerUtils } from "@link-stack/bridge-common"; import { getWorkerUtils } from "@link-stack/bridge-common";
import { timingSafeEqual } from "crypto";
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
const logger = createLogger('formstack-webhook'); const logger = createLogger('formstack-webhook');
export async function POST(req: NextRequest): Promise<NextResponse> { export async function POST(req: NextRequest): Promise<NextResponse> {
try { try {
const clientIp = req.headers.get('x-forwarded-for') || req.headers.get('x-real-ip') || 'unknown';
// Get the shared secret from environment variable // Get the shared secret from environment variable
const expectedSecret = process.env.FORMSTACK_SHARED_SECRET; const expectedSecret = process.env.FORMSTACK_SHARED_SECRET;
@ -21,19 +27,47 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
const body = await req.json(); const body = await req.json();
const receivedSecret = body.HandshakeKey; const receivedSecret = body.HandshakeKey;
// Verify the shared secret // Validate that secret is provided
if (receivedSecret !== expectedSecret) { if (!receivedSecret || typeof receivedSecret !== 'string') {
logger.warn({ receivedSecret }, 'Invalid shared secret received'); logger.warn({ clientIp }, 'Missing or invalid HandshakeKey');
return NextResponse.json( return NextResponse.json(
{ error: "Unauthorized" }, { error: "Unauthorized" },
{ status: 401 } { status: 401 }
); );
} }
// Log the entire webhook payload to see the data structure // Use timing-safe comparison to prevent timing attacks
const expectedBuffer = Buffer.from(expectedSecret);
const receivedBuffer = Buffer.from(receivedSecret);
let secretsMatch = false;
if (expectedBuffer.length === receivedBuffer.length) {
try {
secretsMatch = timingSafeEqual(expectedBuffer, receivedBuffer);
} catch (e) {
secretsMatch = false;
}
}
if (!secretsMatch) {
logger.warn({
secretMatch: false,
timestamp: new Date().toISOString(),
userAgent: req.headers.get('user-agent'),
clientIp
}, 'Invalid shared secret received');
return NextResponse.json(
{ error: "Unauthorized" },
{ status: 401 }
);
}
// Log webhook receipt with non-PII metadata only
logger.info({ logger.info({
payload: body, formId: body.FormID,
headers: Object.fromEntries(req.headers.entries()), uniqueId: body.UniqueID,
timestamp: new Date().toISOString(),
fieldCount: Object.keys(body).length
}, 'Received Formstack webhook'); }, 'Received Formstack webhook');
// Enqueue a bridge-worker task to process this form submission // Enqueue a bridge-worker task to process this form submission

View file

@ -1,12 +1,6 @@
import { NextRequest, NextResponse } from "next/server"; import { NextRequest, NextResponse } from "next/server";
import { Redis } from "ioredis";
import { getToken } from "next-auth/jwt";
export async function POST(request: NextRequest) { export async function POST(request: NextRequest) {
const token = await getToken({
req: request,
secret: process.env.NEXTAUTH_SECRET,
});
const allCookies = request.cookies.getAll(); const allCookies = request.cookies.getAll();
const zammadURL = process.env.ZAMMAD_URL ?? "http://zammad-nginx:8080"; const zammadURL = process.env.ZAMMAD_URL ?? "http://zammad-nginx:8080";
const signOutURL = `${zammadURL}/api/v1/signout`; const signOutURL = `${zammadURL}/api/v1/signout`;
@ -18,7 +12,21 @@ export async function POST(request: NextRequest) {
.join("; "), .join("; "),
}; };
await fetch(signOutURL, { headers }); // Add timeout to prevent hanging requests
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 5000); // 5 second timeout
try {
await fetch(signOutURL, {
headers,
signal: controller.signal
});
} catch (error) {
// Log but don't fail logout if Zammad signout fails
console.error('Zammad signout failed:', error);
} finally {
clearTimeout(timeout);
}
const cookiePrefixesToRemove = ["_zammad"]; const cookiePrefixesToRemove = ["_zammad"];
const response = NextResponse.json({ message: "ok" }); const response = NextResponse.json({ message: "ok" });
@ -31,8 +39,5 @@ export async function POST(request: NextRequest) {
} }
} }
const redis = new Redis(process.env.REDIS_URL);
await redis.setex(`invalidated:${token.sub}`, 24 * 60 * 60, "1");
return response; return response;
} }

View file

@ -2,6 +2,6 @@
set -e set -e
echo "running migrations" echo "running migrations"
(cd ../bridge-migrations/ && npm run migrate:up:all) (cd ../bridge-migrations/ && pnpm run migrate:up:all)
echo "starting link" echo "starting link"
exec dumb-init npm run start exec dumb-init pnpm run start

View file

@ -52,19 +52,44 @@ const checkRewrites = async (request: NextRequestWithAuth) => {
}; };
if (request.nextUrl.pathname.startsWith("/dashboards")) { if (request.nextUrl.pathname.startsWith("/dashboards")) {
return rewriteURL( // Extract the path after /dashboards and append to OpenSearch URL
request, let path = request.nextUrl.pathname.slice("/dashboards".length);
`${linkBaseURL}/dashboards`, if (path.startsWith("/")) {
opensearchBaseURL, path = path.slice(1);
headers, }
); const search = request.nextUrl.search;
const destinationURL = `${opensearchBaseURL}/${path}${search}`;
logger.debug({
pathname: request.nextUrl.pathname,
path,
search,
destinationURL
}, "OpenSearch proxy");
const requestHeaders = new Headers(request.headers);
requestHeaders.delete("x-forwarded-user");
requestHeaders.delete("x-forwarded-roles");
requestHeaders.delete("connection");
for (const [key, value] of Object.entries(headers)) {
requestHeaders.set(key, value as string);
}
return NextResponse.rewrite(new URL(destinationURL), {
request: { headers: requestHeaders },
});
} }
const isDev = process.env.NODE_ENV === "development"; const isDev = process.env.NODE_ENV === "development";
const nonce = Buffer.from(crypto.randomUUID()).toString("base64"); const nonce = Buffer.from(crypto.randomUUID()).toString("base64");
// Allow digiresilience.org for embedding documentation
const frameSrcDirective = `frame-src 'self' https://digiresilience.org;`;
const cspHeader = ` const cspHeader = `
default-src 'self'; default-src 'self';
frame-src 'self' https://digiresilience.org; ${frameSrcDirective}
connect-src 'self'; connect-src 'self';
script-src 'self' 'nonce-${nonce}' 'strict-dynamic' ${isDev ? "'unsafe-eval'" : ""}; script-src 'self' 'nonce-${nonce}' 'strict-dynamic' ${isDev ? "'unsafe-eval'" : ""};
style-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline';
@ -98,6 +123,16 @@ const checkRewrites = async (request: NextRequestWithAuth) => {
contentSecurityPolicyHeaderValue, contentSecurityPolicyHeaderValue,
); );
// Additional security headers
response.headers.set("X-Frame-Options", "SAMEORIGIN");
response.headers.set("X-Content-Type-Options", "nosniff");
response.headers.set("Referrer-Policy", "strict-origin-when-cross-origin");
response.headers.set("X-XSS-Protection", "1; mode=block");
response.headers.set(
"Permissions-Policy",
"camera=(), microphone=(), geolocation=()"
);
return response; return response;
} }

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/link", "name": "@link-stack/link",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "next dev -H 0.0.0.0", "dev": "next dev -H 0.0.0.0",
@ -16,10 +16,10 @@
"@emotion/react": "^11.14.0", "@emotion/react": "^11.14.0",
"@emotion/server": "^11.11.0", "@emotion/server": "^11.11.0",
"@emotion/styled": "^11.14.1", "@emotion/styled": "^11.14.1",
"@link-stack/bridge-common": "*", "@link-stack/bridge-common": "workspace:*",
"@link-stack/bridge-ui": "*", "@link-stack/bridge-ui": "workspace:*",
"@link-stack/logger": "*", "@link-stack/logger": "workspace:*",
"@link-stack/ui": "*", "@link-stack/ui": "workspace:*",
"@mui/icons-material": "^6", "@mui/icons-material": "^6",
"@mui/material": "^6", "@mui/material": "^6",
"@mui/material-nextjs": "^6", "@mui/material-nextjs": "^6",
@ -41,9 +41,8 @@
"sharp": "^0.34.4" "sharp": "^0.34.4"
}, },
"devDependencies": { "devDependencies": {
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@types/node": "^24.7.0", "@types/node": "^24.7.0",
"@types/react": "19.2.2", "@types/react": "19.2.2"
"@types/uuid": "^11.0.0"
} }
} }

View file

@ -4,6 +4,7 @@ x-global-vars: &common-global-variables
x-bridge-vars: &common-bridge-variables x-bridge-vars: &common-bridge-variables
DATABASE_HOST: "postgresql" DATABASE_HOST: "postgresql"
DATABASE_NAME: "cdr" DATABASE_NAME: "cdr"
DATABASE_PORT: "5432"
DATABASE_USER: ${DATABASE_USER} DATABASE_USER: ${DATABASE_USER}
DATABASE_ROOT_OWNER: "root" DATABASE_ROOT_OWNER: "root"
DATABASE_ROOT_PASSWORD: ${BRIDGE_DATABASE_ROOT_PASSWORD} DATABASE_ROOT_PASSWORD: ${BRIDGE_DATABASE_ROOT_PASSWORD}
@ -24,19 +25,10 @@ x-bridge-vars: &common-bridge-variables
LOG_LEVEL: "debug" LOG_LEVEL: "debug"
ZAMMAD_API_TOKEN: ${ZAMMAD_API_TOKEN} ZAMMAD_API_TOKEN: ${ZAMMAD_API_TOKEN}
ZAMMAD_URL: ${ZAMMAD_URL} ZAMMAD_URL: ${ZAMMAD_URL}
FORMSTACK_SHARED_SECRET: ${FORMSTACK_SHARED_SECRET}
FORMSTACK_FIELD_MAPPING: ${FORMSTACK_FIELD_MAPPING}
services: services:
bridge-frontend:
build:
context: ../../
dockerfile: ./apps/bridge-frontend/Dockerfile
container_name: bridge-frontend
image: registry.gitlab.com/digiresilience/link/link-stack/bridge-frontend:${LINK_STACK_VERSION}
restart: ${RESTART}
ports:
- 8006:3000
environment: *common-bridge-variables
bridge-worker: bridge-worker:
build: build:
context: ../../ context: ../../

View file

@ -1,7 +1,7 @@
# frozen_string_literal: true # frozen_string_literal: true
# uninstall # uninstall
package_names = %w[Hardening Leafcutter Bridge] package_names = %w[Hardening Bridge]
package_names.each do |name| package_names.each do |name|
puts "Attempting to uninstall #{name} package..." puts "Attempting to uninstall #{name} package..."

15985
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,17 +1,17 @@
{ {
"name": "@link-stack", "name": "@link-stack",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"description": "Link from the Center for Digital Resilience", "description": "Link from the Center for Digital Resilience",
"scripts": { "scripts": {
"dev": "dotenv -- turbo dev", "dev": "dotenv -- turbo dev",
"build": "dotenv -- turbo build", "build": "dotenv -- turbo build",
"migrate": "dotenv -- npm run migrate --workspace=database", "migrate": "dotenv -- pnpm --filter database run migrate",
"lint": "dotenv turbo lint", "lint": "dotenv turbo lint",
"update-version": "find . -name 'package.json' -exec sed -i -E 's/\"version\": \"[^\"]+\"/\"version\": \"3.2.0b3\"/' {} +", "update-version": "node --experimental-strip-types scripts/update-version.ts",
"upgrade:setup": "npm i -g npm-check-updates", "upgrade:setup": "pnpm i -g npm-check-updates",
"upgrade:check": "ncu && ncu -ws", "upgrade:check": "ncu && ncu -ws",
"upgrade": "ncu -u && ncu -ws -u && npm i", "upgrade": "ncu -u && ncu -ws -u && pnpm i",
"clean": "rm -f package-lock.json && rm -rf node_modules && rm -rf .turbo && rm -rf apps/*/node_modules && rm -rf apps/*/package-lock.json && rm -rf apps/*/.next && rm -rf packages/*/node_modules && rm -rf apps/*/.next && rm -rf packages/*/.turbo && rm -rf packages/*/build && rm -rf docker/zammad/addons/*", "clean": "rm -f pnpm-lock.yaml && rm -rf node_modules && rm -rf .turbo && rm -rf apps/*/node_modules && rm -rf apps/*/.next && rm -rf packages/*/node_modules && rm -rf apps/*/.next && rm -rf packages/*/.turbo && rm -rf packages/*/build && rm -rf docker/zammad/addons/*",
"docker:all:up": "node docker/scripts/docker.js all up", "docker:all:up": "node docker/scripts/docker.js all up",
"docker:all:down": "node docker/scripts/docker.js all down", "docker:all:down": "node docker/scripts/docker.js all down",
"docker:all:build": "node docker/scripts/docker.js all build", "docker:all:build": "node docker/scripts/docker.js all build",
@ -34,16 +34,11 @@
"docker:bridge:build": "node docker/scripts/docker.js bridge build", "docker:bridge:build": "node docker/scripts/docker.js bridge build",
"docker:zammad:restart": "docker restart zammad-railsserver zammad-scheduler" "docker:zammad:restart": "docker restart zammad-railsserver zammad-scheduler"
}, },
"workspaces": [
"apps/*",
"packages/*",
"database/*"
],
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://gitlab.com/digiresilience/link/link-stack.git" "url": "git+https://gitlab.com/digiresilience/link/link-stack.git"
}, },
"packageManager": "npm@11.6.1", "packageManager": "pnpm@9.15.4",
"author": "Darren Clarke", "author": "Darren Clarke",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"devDependencies": { "devDependencies": {
@ -57,15 +52,17 @@
"turbo": "^2.5.8", "turbo": "^2.5.8",
"typescript": "latest" "typescript": "latest"
}, },
"overrides": { "pnpm": {
"react": "19.2.0", "overrides": {
"react-dom": "19.2.0", "react": "19.2.0",
"@types/react": "19.2.2", "react-dom": "19.2.0",
"@types/react-dom": "19.2.1", "@types/react": "19.2.2",
"@mui/material": "^6.5.0" "@types/react-dom": "19.2.1",
"@mui/material": "^6.5.0"
}
}, },
"engines": { "engines": {
"npm": ">=10", "pnpm": ">=9",
"node": ">=20" "node": ">=20"
} }
} }

View file

@ -9,3 +9,12 @@ export type {
User, User,
} from "./lib/database.js"; } from "./lib/database.js";
export { getWorkerUtils } from "./lib/utils.js"; export { getWorkerUtils } from "./lib/utils.js";
export {
getMaxAttachmentSize,
getMaxTotalAttachmentSize,
MAX_ATTACHMENTS,
} from "./lib/config/attachments.js";
export {
getSignalAutoGroupNameTemplate,
buildSignalGroupName,
} from "./lib/config/signal.js";

View file

@ -0,0 +1,36 @@
/**
* Attachment size configuration for messaging channels
*
* Environment variables:
* - BRIDGE_MAX_ATTACHMENT_SIZE_MB: Maximum size for a single attachment in MB (default: 50)
*/
/**
* Get the maximum attachment size in bytes from environment variable
* Defaults to 50MB if not set
*/
export function getMaxAttachmentSize(): number {
const envValue = process.env.BRIDGE_MAX_ATTACHMENT_SIZE_MB;
const sizeInMB = envValue ? parseInt(envValue, 10) : 50;
// Validate the value
if (isNaN(sizeInMB) || sizeInMB <= 0) {
console.warn(`Invalid BRIDGE_MAX_ATTACHMENT_SIZE_MB value: ${envValue}, using default 50MB`);
return 50 * 1024 * 1024;
}
return sizeInMB * 1024 * 1024;
}
/**
* Get the maximum total size for all attachments in a message
* This is 4x the single attachment size
*/
export function getMaxTotalAttachmentSize(): number {
return getMaxAttachmentSize() * 4;
}
/**
* Maximum number of attachments per message
*/
export const MAX_ATTACHMENTS = 10;

View file

@ -0,0 +1,29 @@
/**
* Signal configuration
*
* Environment variables:
* - SIGNAL_AUTO_GROUP_NAME_TEMPLATE: Template for auto-created group names (default: "Support Request: {conversationId}")
* Available placeholders: {conversationId}
*/
/**
* Get the Signal auto-group name template from environment variable
* Defaults to "Support Request: {conversationId}" if not set
*/
export function getSignalAutoGroupNameTemplate(): string {
const template = process.env.SIGNAL_AUTO_GROUP_NAME_TEMPLATE;
if (!template) {
return "Support Request: {conversationId}";
}
return template;
}
/**
* Build a Signal group name from the template and conversation ID
*/
export function buildSignalGroupName(conversationId: string): string {
const template = getSignalAutoGroupNameTemplate();
return template.replace('{conversationId}', conversationId);
}

View file

@ -138,15 +138,57 @@ export type VoiceLine = Selectable<Database["VoiceLine"]>;
export type Webhook = Selectable<Database["Webhook"]>; export type Webhook = Selectable<Database["Webhook"]>;
export type User = Selectable<Database["User"]>; export type User = Selectable<Database["User"]>;
export const db = new KyselyAuth<Database>({ // Lazy database initialization to avoid errors during build time
dialect: new PostgresDialect({ let _db: KyselyAuth<Database> | undefined;
pool: new Pool({
host: process.env.DATABASE_HOST, function getDb(): KyselyAuth<Database> {
database: process.env.DATABASE_NAME, if (_db) {
port: parseInt(process.env.DATABASE_PORT!), return _db;
user: process.env.DATABASE_USER, }
password: process.env.DATABASE_PASSWORD,
}), // Validate environment variables
}) as any, const DATABASE_HOST = process.env.DATABASE_HOST;
plugins: [new CamelCasePlugin() as any], const DATABASE_NAME = process.env.DATABASE_NAME;
const DATABASE_PORT = process.env.DATABASE_PORT;
const DATABASE_USER = process.env.DATABASE_USER;
const DATABASE_PASSWORD = process.env.DATABASE_PASSWORD;
if (!DATABASE_HOST || !DATABASE_NAME || !DATABASE_PORT || !DATABASE_USER || !DATABASE_PASSWORD) {
throw new Error('Missing required database environment variables: DATABASE_HOST, DATABASE_NAME, DATABASE_PORT, DATABASE_USER, DATABASE_PASSWORD');
}
const port = parseInt(DATABASE_PORT, 10);
if (isNaN(port) || port < 1 || port > 65535) {
throw new Error(`Invalid DATABASE_PORT: ${DATABASE_PORT}. Must be a number between 1 and 65535.`);
}
_db = new KyselyAuth<Database>({
dialect: new PostgresDialect({
pool: new Pool({
host: DATABASE_HOST,
database: DATABASE_NAME,
port,
user: DATABASE_USER,
password: DATABASE_PASSWORD,
}),
}) as any,
plugins: [new CamelCasePlugin() as any],
});
return _db;
}
// Export db as a getter that lazily initializes the database
export const db = new Proxy({} as KyselyAuth<Database>, {
get(_target, prop) {
const instance = getDb();
const value = (instance as any)[prop];
// If it's a function, bind it to the actual instance to preserve 'this' context
if (typeof value === 'function') {
return value.bind(instance);
}
return value;
},
}); });

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/bridge-common", "name": "@link-stack/bridge-common",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"main": "build/main/index.js", "main": "build/main/index.js",
"type": "module", "type": "module",
"author": "Darren Clarke <darren@redaranj.com>", "author": "Darren Clarke <darren@redaranj.com>",
@ -15,8 +15,9 @@
"pg": "^8.16.3" "pg": "^8.16.3"
}, },
"devDependencies": { "devDependencies": {
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"@types/pg": "^8.15.5",
"typescript": "^5.9.3" "typescript": "^5.9.3"
} }
} }

View file

@ -1,13 +1,13 @@
{ {
"name": "@link-stack/bridge-ui", "name": "@link-stack/bridge-ui",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"scripts": { "scripts": {
"build": "tsc -p tsconfig.json" "build": "tsc -p tsconfig.json"
}, },
"dependencies": { "dependencies": {
"@link-stack/bridge-common": "*", "@link-stack/bridge-common": "workspace:*",
"@link-stack/signal-api": "*", "@link-stack/signal-api": "workspace:*",
"@link-stack/ui": "*", "@link-stack/ui": "workspace:*",
"@mui/material": "^6", "@mui/material": "^6",
"@mui/x-data-grid-pro": "^7", "@mui/x-data-grid-pro": "^7",
"kysely": "0.27.5", "kysely": "0.27.5",

View file

@ -38,8 +38,6 @@ export const colors: any = {
helpYellow: "#fff4d5", helpYellow: "#fff4d5",
dwcDarkBlue: "#191847", dwcDarkBlue: "#191847",
hazyMint: "#ecf7f8", hazyMint: "#ecf7f8",
leafcutterElectricBlue: "#4d6aff",
leafcutterLightBlue: "#fafbfd",
waterbearElectricPurple: "#332c83", waterbearElectricPurple: "#332c83",
waterbearLightSmokePurple: "#eff3f8", waterbearLightSmokePurple: "#eff3f8",
bumpedPurple: "#212058", bumpedPurple: "#212058",

View file

@ -0,0 +1,2 @@
// Placeholder entry point for eslint-config package
module.exports = {};

View file

@ -1,10 +1,11 @@
{ {
"name": "@link-stack/eslint-config", "name": "@link-stack/eslint-config",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"description": "amigo's eslint config", "description": "amigo's eslint config",
"main": "index.js",
"author": "Abel Luck <abel@guardianproject.info>", "author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"private": false, "private": true,
"scripts": { "scripts": {
"fmt": "prettier \"profile/**/*.js\" --write" "fmt": "prettier \"profile/**/*.js\" --write"
}, },

View file

@ -0,0 +1,2 @@
// Placeholder entry point for jest-config package
module.exports = {};

View file

@ -1,10 +1,11 @@
{ {
"name": "@link-stack/jest-config", "name": "@link-stack/jest-config",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"description": "", "description": "",
"main": "index.js",
"author": "Abel Luck <abel@guardianproject.info>", "author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"private": false, "private": true,
"engines": { "engines": {
"node": ">=14" "node": ">=14"
}, },

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/logger", "name": "@link-stack/logger",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"description": "Shared logging utility for Link Stack monorepo", "description": "Shared logging utility for Link Stack monorepo",
"main": "./dist/index.js", "main": "./dist/index.js",
"module": "./dist/index.mjs", "module": "./dist/index.mjs",
@ -23,8 +23,8 @@
"pino-pretty": "^13.1.1" "pino-pretty": "^13.1.1"
}, },
"devDependencies": { "devDependencies": {
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"@types/node": "^24.7.0", "@types/node": "^24.7.0",
"eslint": "^9.37.0", "eslint": "^9.37.0",
"tsup": "^8.5.0", "tsup": "^8.5.0",

View file

@ -17,6 +17,7 @@ export const getPinoConfig = (): LoggerOptions => {
timestamp: () => `,"timestamp":"${new Date(Date.now()).toISOString()}"`, timestamp: () => `,"timestamp":"${new Date(Date.now()).toISOString()}"`,
redact: { redact: {
paths: [ paths: [
// Top-level sensitive fields
'password', 'password',
'token', 'token',
'secret', 'secret',
@ -24,11 +25,35 @@ export const getPinoConfig = (): LoggerOptions => {
'apiKey', 'apiKey',
'authorization', 'authorization',
'cookie', 'cookie',
'HandshakeKey',
'receivedSecret',
'access_token',
'refresh_token',
'zammadCsrfToken',
'clientSecret',
// Nested sensitive fields (one level)
'*.password', '*.password',
'*.token', '*.token',
'*.secret', '*.secret',
'*.api_key', '*.api_key',
'*.apiKey', '*.apiKey',
'*.authorization',
'*.cookie',
'*.access_token',
'*.refresh_token',
'*.zammadCsrfToken',
'*.HandshakeKey',
'*.receivedSecret',
'*.clientSecret',
// Common nested patterns
'payload.HandshakeKey',
'headers.authorization',
'headers.cookie',
'headers.Authorization',
'headers.Cookie',
'credentials.password',
'credentials.secret',
'credentials.token',
], ],
censor: '[REDACTED]', censor: '[REDACTED]',
}, },

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/signal-api", "name": "@link-stack/signal-api",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"type": "module", "type": "module",
"main": "build/index.js", "main": "build/index.js",
"exports": { "exports": {
@ -13,8 +13,8 @@
}, },
"devDependencies": { "devDependencies": {
"@openapitools/openapi-generator-cli": "^2.24.0", "@openapitools/openapi-generator-cli": "^2.24.0",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@types/node": "^24", "@types/node": "^24",
"typescript": "^5" "typescript": "^5"
} }

View file

@ -0,0 +1,2 @@
// Placeholder entry point for typescript-config package
module.exports = {};

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/typescript-config", "name": "@link-stack/typescript-config",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"description": "Shared TypeScript config", "description": "Shared TypeScript config",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"author": "Abel Luck <abel@guardianproject.info>", "author": "Abel Luck <abel@guardianproject.info>",

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/ui", "name": "@link-stack/ui",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"description": "", "description": "",
"scripts": { "scripts": {
"build": "tsc -p tsconfig.json" "build": "tsc -p tsconfig.json"

View file

@ -43,8 +43,6 @@ export const colors: any = {
helpYellow: "#fff4d5", helpYellow: "#fff4d5",
dwcDarkBlue: "#191847", dwcDarkBlue: "#191847",
hazyMint: "#ecf7f8", hazyMint: "#ecf7f8",
leafcutterElectricBlue: "#4d6aff",
leafcutterLightBlue: "#fafbfd",
waterbearElectricPurple: "#332c83", waterbearElectricPurple: "#332c83",
waterbearLightSmokePurple: "#eff3f8", waterbearLightSmokePurple: "#eff3f8",
bumpedPurple: "#212058", bumpedPurple: "#212058",

View file

@ -1,14 +1,14 @@
{ {
"name": "@link-stack/zammad-addon-bridge", "name": "@link-stack/zammad-addon-bridge",
"displayName": "Bridge", "displayName": "Bridge",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"description": "An addon that adds CDR Bridge channels to Zammad.", "description": "An addon that adds CDR Bridge channels to Zammad.",
"scripts": { "scripts": {
"build": "node '../../node_modules/@link-stack/zammad-addon-common/dist/build.js'", "build": "node '../zammad-addon-common/dist/build.js'",
"migrate": "node '../../node_modules/@link-stack/zammad-addon-common/dist/migrate.js'" "migrate": "node '../zammad-addon-common/dist/migrate.js'"
}, },
"dependencies": { "dependencies": {
"@link-stack/zammad-addon-common": "*" "@link-stack/zammad-addon-common": "workspace:*"
}, },
"author": "", "author": "",
"license": "AGPL-3.0-or-later" "license": "AGPL-3.0-or-later"

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/zammad-addon-common", "name": "@link-stack/zammad-addon-common",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"description": "", "description": "",
"bin": { "bin": {
"zpm-build": "./dist/build.js", "zpm-build": "./dist/build.js",
@ -16,7 +16,7 @@
"author": "", "author": "",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"dependencies": { "dependencies": {
"@link-stack/logger": "*", "@link-stack/logger": "workspace:*",
"glob": "^11.0.3" "glob": "^11.0.3"
} }
} }

View file

@ -1,14 +1,14 @@
{ {
"name": "@link-stack/zammad-addon-hardening", "name": "@link-stack/zammad-addon-hardening",
"displayName": "Hardening", "displayName": "Hardening",
"version": "3.2.0b3", "version": "3.3.0-beta.1",
"description": "A Zammad addon that hardens a Zammad instance according to CDR's needs.", "description": "A Zammad addon that hardens a Zammad instance according to CDR's needs.",
"scripts": { "scripts": {
"build": "node '../../node_modules/@link-stack/zammad-addon-common/dist/build.js'", "build": "node '../zammad-addon-common/dist/build.js'",
"migrate": "node '../../node_modules/@link-stack/zammad-addon-common/dist/migrate.js'" "migrate": "node '../zammad-addon-common/dist/migrate.js'"
}, },
"dependencies": { "dependencies": {
"@link-stack/zammad-addon-common": "*" "@link-stack/zammad-addon-common": "workspace:*"
}, },
"author": "", "author": "",
"license": "AGPL-3.0-or-later" "license": "AGPL-3.0-or-later"

10664
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load diff

3
pnpm-workspace.yaml Normal file
View file

@ -0,0 +1,3 @@
packages:
- 'apps/*'
- 'packages/*'

115
scripts/update-version.ts Normal file
View file

@ -0,0 +1,115 @@
#!/usr/bin/env node --experimental-strip-types
/**
* Updates version numbers across all package.json files in the monorepo
* Usage: node scripts/update-version.ts <version>
* Example: node scripts/update-version.ts 3.3.0-beta.1
*/
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
interface UpdateResult {
success: true;
oldVersion: string;
newVersion: string;
}
interface UpdateError {
success: false;
error: string;
}
type UpdatePackageResult = UpdateResult | UpdateError;
// Get version from command line args
const newVersion = process.argv[2];
if (!newVersion) {
console.error('Error: Version number required');
console.error('Usage: node scripts/update-version.ts <version>');
console.error('Example: node scripts/update-version.ts 3.3.0-beta.1');
process.exit(1);
}
// Validate version format (basic check)
const versionRegex = /^\d+\.\d+\.\d+(-[a-zA-Z0-9.]+)?$/;
if (!versionRegex.test(newVersion)) {
console.error(`Error: Invalid version format: ${newVersion}`);
console.error('Expected format: X.Y.Z or X.Y.Z-suffix (e.g., 3.3.0-beta.1)');
process.exit(1);
}
/**
* Recursively find all package.json files
*/
function findPackageJsonFiles(dir: string, files: string[] = []): string[] {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
// Skip node_modules, .git, and other common directories
if (entry.isDirectory()) {
if (!['node_modules', '.git', '.next', '.turbo', 'build', 'dist'].includes(entry.name)) {
findPackageJsonFiles(fullPath, files);
}
} else if (entry.name === 'package.json') {
files.push(fullPath);
}
}
return files;
}
/**
* Update version in a package.json file
*/
function updatePackageVersion(filePath: string, version: string): UpdatePackageResult {
try {
const content = fs.readFileSync(filePath, 'utf8');
const pkg = JSON.parse(content);
const oldVersion = pkg.version;
pkg.version = version;
// Write back with same formatting (2 spaces, newline at end)
fs.writeFileSync(filePath, JSON.stringify(pkg, null, 2) + '\n', 'utf8');
return { success: true, oldVersion, newVersion: version };
} catch (error) {
return { success: false, error: error instanceof Error ? error.message : String(error) };
}
}
// Main execution
console.log(`Updating all package.json files to version ${newVersion}...\n`);
const rootDir = path.join(__dirname, '..');
const packageFiles = findPackageJsonFiles(rootDir);
let successCount = 0;
let failureCount = 0;
for (const filePath of packageFiles) {
const relativePath = path.relative(rootDir, filePath);
const result = updatePackageVersion(filePath, newVersion);
if (result.success) {
console.log(`${relativePath}: ${result.oldVersion}${result.newVersion}`);
successCount++;
} else {
console.error(`${relativePath}: ${result.error}`);
failureCount++;
}
}
console.log(`\nSummary: ${successCount} updated, ${failureCount} failed`);
if (failureCount > 0) {
process.exit(1);
}