Compare commits

..

17 commits

Author SHA1 Message Date
Darren Clarke
69cb77b8f4 Merge branch 'fix/docker-build-issues' into 'main'
Fix Docker-in-Docker connectivity for GitLab CI

See merge request digiresilience/link/link-stack!23
2026-01-13 15:45:38 +01:00
Darren Clarke
9081d23a5f Fix Docker-in-Docker connectivity for GitLab CI 2026-01-13 15:36:25 +01:00
Darren Clarke
78d2ff66b2 Pin baileys to exact version 6.7.21 2025-12-19 11:37:15 +01:00
Darren Clarke
83dd0eaadc Remove redundant corepack enable from CI 2025-12-17 22:44:05 +01:00
Darren Clarke
41b825c1c7 Update deps 2025-12-17 15:35:28 +01:00
Darren Clarke
b59f588efd Update version 2025-12-06 08:00:15 +01:00
Darren Clarke
b3cf97d102 Update dependencies 2025-12-06 07:57:54 +01:00
Darren Clarke
31eb1d92b4 Fix for sending to WhatsApp user IDs 2025-12-04 13:40:04 +01:00
irl
b82d3cc726 Dummy commit to make CI run again 2025-11-23 10:31:03 +00:00
Darren Clarke
ed807ee645 Update version to 3.3.2 2025-11-22 10:57:18 +01:00
Darren Clarke
91eb32ff49 Update dockerfile copy files 2025-11-22 10:56:13 +01:00
Darren Clarke
2d20d60ddb Update version to 3.3.1 2025-11-21 17:36:14 +01:00
Darren Clarke
3caf1a5ec1 Update version 2025-11-21 17:33:12 +01:00
Darren Clarke
82985e0b9a Remove attachment-related initializer 2025-11-21 17:29:21 +01:00
Darren Clarke
42a7fd4214 Merge branch 'merge-next-release-to-main' into 'main'
WhatsApp/Signal/Formstack/admin updates

See merge request digiresilience/link/link-stack!22
2025-11-21 14:55:28 +01:00
Darren Clarke
d0cc5a21de WhatsApp/Signal/Formstack/admin updates 2025-11-21 14:55:28 +01:00
Darren Clarke
bcecf61a46 Update deps 2025-03-24 12:13:31 +01:00
91 changed files with 12554 additions and 16726 deletions

2
.gitignore vendored
View file

@ -29,3 +29,5 @@ signald-state
project.org project.org
**/.openapi-generator/ **/.openapi-generator/
apps/bridge-worker/scripts/* apps/bridge-worker/scripts/*
ENVIRONMENT_VARIABLES_MIGRATION.md
local-scripts/*

View file

@ -1,4 +1,4 @@
image: node:20-bookworm-slim image: node:22-bookworm-slim
stages: stages:
- build - build
@ -11,35 +11,41 @@ build-all:
TURBO_TOKEN: ${TURBO_TOKEN} TURBO_TOKEN: ${TURBO_TOKEN}
TURBO_TEAM: ${TURBO_TEAM} TURBO_TEAM: ${TURBO_TEAM}
ZAMMAD_URL: ${ZAMMAD_URL} ZAMMAD_URL: ${ZAMMAD_URL}
PNPM_HOME: "/pnpm"
script: script:
- npm install npm@10 -g - export PATH="$PNPM_HOME:$PATH"
- npm install -g turbo - corepack enable && corepack prepare pnpm@9.15.4 --activate
- npm ci - pnpm add -g turbo
- pnpm install --frozen-lockfile
- turbo build - turbo build
.docker-build: .docker-build:
image: registry.gitlab.com/digiresilience/link/link-stack/buildx:${CI_COMMIT_REF_NAME} image: registry.gitlab.com/digiresilience/link/link-stack/buildx:main
services: services:
- docker:dind - docker:dind
stage: docker-build stage: docker-build
variables: variables:
DOCKER_HOST: tcp://docker:2375
DOCKER_TLS_CERTDIR: ""
DOCKER_TAG: ${CI_COMMIT_SHORT_SHA} DOCKER_TAG: ${CI_COMMIT_SHORT_SHA}
DOCKER_CONTEXT: . BUILD_CONTEXT: .
only: only:
- main - main
- develop - develop
- tags - tags
script: script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- DOCKER_BUILDKIT=1 docker build --pull --no-cache -t ${DOCKER_NS}:${DOCKER_TAG} -f ${DOCKERFILE_PATH} ${DOCKER_CONTEXT} - DOCKER_BUILDKIT=1 docker build --pull --no-cache -t ${DOCKER_NS}:${DOCKER_TAG} -f ${DOCKERFILE_PATH} ${BUILD_CONTEXT}
- docker push ${DOCKER_NS}:${DOCKER_TAG} - docker push ${DOCKER_NS}:${DOCKER_TAG}
.docker-release: .docker-release:
image: registry.gitlab.com/digiresilience/link/link-stack/buildx:${CI_COMMIT_REF_NAME} image: registry.gitlab.com/digiresilience/link/link-stack/buildx:main
services: services:
- docker:dind - docker:dind
stage: docker-release stage: docker-release
variables: variables:
DOCKER_HOST: tcp://docker:2375
DOCKER_TLS_CERTDIR: ""
DOCKER_TAG: ${CI_COMMIT_SHORT_SHA} DOCKER_TAG: ${CI_COMMIT_SHORT_SHA}
DOCKER_TAG_NEW: ${CI_COMMIT_REF_NAME} DOCKER_TAG_NEW: ${CI_COMMIT_REF_NAME}
only: only:
@ -189,16 +195,17 @@ zammad-docker-build:
variables: variables:
DOCKER_NS: ${CI_REGISTRY}/digiresilience/link/link-stack/zammad DOCKER_NS: ${CI_REGISTRY}/digiresilience/link/link-stack/zammad
DOCKERFILE_PATH: ./docker/zammad/Dockerfile DOCKERFILE_PATH: ./docker/zammad/Dockerfile
DOCKER_CONTEXT: ./docker/zammad BUILD_CONTEXT: ./docker/zammad
PNPM_HOME: "/pnpm"
before_script: before_script:
- apk --update add nodejs npm - export PATH="$PNPM_HOME:$PATH"
- corepack enable && corepack prepare pnpm@9.15.4 --activate
script: script:
- npm install npm@10 -g - pnpm add -g turbo
- npm install -g turbo - pnpm install --frozen-lockfile
- npm ci
- turbo build --force --filter @link-stack/zammad-addon-* - turbo build --force --filter @link-stack/zammad-addon-*
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- DOCKER_BUILDKIT=1 docker build --build-arg EMBEDDED=true --pull --no-cache -t ${DOCKER_NS}:${DOCKER_TAG} -f ${DOCKERFILE_PATH} ${DOCKER_CONTEXT} - DOCKER_BUILDKIT=1 docker build --build-arg EMBEDDED=true --pull --no-cache -t ${DOCKER_NS}:${DOCKER_TAG} -f ${DOCKERFILE_PATH} ${BUILD_CONTEXT}
- docker push ${DOCKER_NS}:${DOCKER_TAG} - docker push ${DOCKER_NS}:${DOCKER_TAG}
zammad-docker-release: zammad-docker-release:
@ -211,16 +218,17 @@ zammad-standalone-docker-build:
variables: variables:
DOCKER_NS: ${CI_REGISTRY}/digiresilience/link/link-stack/zammad-standalone DOCKER_NS: ${CI_REGISTRY}/digiresilience/link/link-stack/zammad-standalone
DOCKERFILE_PATH: ./docker/zammad/Dockerfile DOCKERFILE_PATH: ./docker/zammad/Dockerfile
DOCKER_CONTEXT: ./docker/zammad BUILD_CONTEXT: ./docker/zammad
PNPM_HOME: "/pnpm"
before_script: before_script:
- apk --update add nodejs npm - export PATH="$PNPM_HOME:$PATH"
- corepack enable && corepack prepare pnpm@9.15.4 --activate
script: script:
- npm install npm@10 -g - pnpm add -g turbo
- npm install -g turbo - pnpm install --frozen-lockfile
- npm ci
- turbo build --force --filter @link-stack/zammad-addon-* - turbo build --force --filter @link-stack/zammad-addon-*
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- DOCKER_BUILDKIT=1 docker build --pull --no-cache -t ${DOCKER_NS}:${DOCKER_TAG} -f ${DOCKERFILE_PATH} ${DOCKER_CONTEXT} - DOCKER_BUILDKIT=1 docker build --pull --no-cache -t ${DOCKER_NS}:${DOCKER_TAG} -f ${DOCKERFILE_PATH} ${BUILD_CONTEXT}
- docker push ${DOCKER_NS}:${DOCKER_TAG} - docker push ${DOCKER_NS}:${DOCKER_TAG}
zammad-standalone-docker-release: zammad-standalone-docker-release:

View file

@ -20,3 +20,4 @@ We use [Turborepo](https://turbo.build) to manage development and building of th
To run a single package: To run a single package:
- `turbo dev --filter @link-stack/link` - `turbo dev --filter @link-stack/link`

View file

@ -2,22 +2,28 @@ FROM node:22-bookworm-slim AS base
FROM base AS builder FROM base AS builder
ARG APP_DIR=/opt/bridge-frontend ARG APP_DIR=/opt/bridge-frontend
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN mkdir -p ${APP_DIR}/ RUN mkdir -p ${APP_DIR}/
RUN npm i -g turbo RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN pnpm add -g turbo
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
COPY . . COPY . .
RUN turbo prune --scope=@link-stack/bridge-frontend --scope=@link-stack/bridge-migrations --docker RUN turbo prune --scope=@link-stack/bridge-frontend --scope=@link-stack/bridge-migrations --docker
FROM base AS installer FROM base AS installer
ARG APP_DIR=/opt/bridge-frontend ARG APP_DIR=/opt/bridge-frontend
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
COPY --from=builder ${APP_DIR}/.gitignore .gitignore COPY --from=builder ${APP_DIR}/.gitignore .gitignore
COPY --from=builder ${APP_DIR}/out/json/ . COPY --from=builder ${APP_DIR}/out/json/ .
COPY --from=builder ${APP_DIR}/out/package-lock.json ./package-lock.json COPY --from=builder ${APP_DIR}/out/pnpm-lock.yaml ./pnpm-lock.yaml
RUN npm ci RUN pnpm install --frozen-lockfile
COPY --from=builder ${APP_DIR}/out/full/ . COPY --from=builder ${APP_DIR}/out/full/ .
RUN npm i -g turbo RUN pnpm add -g turbo
RUN turbo run build --filter=@link-stack/bridge-frontend --filter=@link-stack/bridge-migrations RUN turbo run build --filter=@link-stack/bridge-frontend --filter=@link-stack/bridge-migrations
FROM base AS runner FROM base AS runner
@ -29,6 +35,9 @@ LABEL maintainer="Darren Clarke <darren@redaranj.com>"
LABEL org.label-schema.build-date=$BUILD_DATE LABEL org.label-schema.build-date=$BUILD_DATE
LABEL org.label-schema.version=$VERSION LABEL org.label-schema.version=$VERSION
ENV APP_DIR ${APP_DIR} ENV APP_DIR ${APP_DIR}
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN DEBIAN_FRONTEND=noninteractive apt-get update && \ RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
apt-get install -y --no-install-recommends \ apt-get install -y --no-install-recommends \
dumb-init dumb-init

View file

@ -1,10 +1,6 @@
import GoogleProvider from "next-auth/providers/google"; import GoogleProvider from "next-auth/providers/google";
import { KyselyAdapter } from "@auth/kysely-adapter";
import { db } from "@link-stack/bridge-common";
export const authOptions = { export const authOptions = {
// @ts-ignore
adapter: KyselyAdapter(db),
providers: [ providers: [
GoogleProvider({ GoogleProvider({
clientId: process.env.GOOGLE_CLIENT_ID!, clientId: process.env.GOOGLE_CLIENT_ID!,

View file

@ -1,6 +1,9 @@
import NextAuth from "next-auth"; import NextAuth from "next-auth";
import { authOptions } from "@/app/_lib/authentication"; import { authOptions } from "@/app/_lib/authentication";
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
const handler = NextAuth(authOptions); const handler = NextAuth(authOptions);
export { handler as GET, handler as POST }; export { handler as GET, handler as POST };

View file

@ -2,6 +2,6 @@
set -e set -e
echo "running migrations" echo "running migrations"
(cd ../bridge-migrations/ && npm run migrate:up:all) (cd ../bridge-migrations/ && pnpm run migrate:up:all)
echo "starting bridge-frontend" echo "starting bridge-frontend"
exec dumb-init npm run start exec dumb-init pnpm run start

View file

@ -1,23 +1,81 @@
import { withAuth } from "next-auth/middleware"; import { withAuth } from "next-auth/middleware";
import { NextResponse } from "next/server";
export default withAuth({ export default withAuth(
pages: { function middleware(req) {
signIn: `/login`, const isDev = process.env.NODE_ENV === "development";
const nonce = Buffer.from(crypto.randomUUID()).toString("base64");
// Allow digiresilience.org for embedding documentation
const frameSrcDirective = `frame-src 'self' https://digiresilience.org;`;
const cspHeader = `
default-src 'self';
${frameSrcDirective}
connect-src 'self';
script-src 'self' 'nonce-${nonce}' 'strict-dynamic' ${isDev ? "'unsafe-eval'" : ""};
style-src 'self' 'unsafe-inline';
img-src 'self' blob: data:;
font-src 'self';
object-src 'none';
base-uri 'self';
form-action 'self';
frame-ancestors 'self';
upgrade-insecure-requests;
`;
const contentSecurityPolicyHeaderValue = cspHeader
.replace(/\s{2,}/g, " ")
.trim();
const requestHeaders = new Headers(req.headers);
requestHeaders.set("x-nonce", nonce);
requestHeaders.set(
"Content-Security-Policy",
contentSecurityPolicyHeaderValue,
);
const response = NextResponse.next({
request: {
headers: requestHeaders,
},
});
response.headers.set(
"Content-Security-Policy",
contentSecurityPolicyHeaderValue,
);
// Additional security headers
response.headers.set("X-Frame-Options", "SAMEORIGIN");
response.headers.set("X-Content-Type-Options", "nosniff");
response.headers.set("Referrer-Policy", "strict-origin-when-cross-origin");
response.headers.set("X-XSS-Protection", "1; mode=block");
response.headers.set(
"Permissions-Policy",
"camera=(), microphone=(), geolocation=()"
);
return response;
}, },
callbacks: { {
authorized: ({ token }) => { pages: {
if (process.env.SETUP_MODE === "true") { signIn: `/login`,
return true;
}
if (token?.email) {
return true;
}
return false;
}, },
}, callbacks: {
}); authorized: ({ token }) => {
if (process.env.SETUP_MODE === "true") {
return true;
}
if (token?.email) {
return true;
}
return false;
},
},
}
);
export const config = { export const config = {
matcher: ["/((?!ws|wss|api|_next/static|_next/image|favicon.ico).*)"], matcher: ["/((?!ws|wss|api|_next/static|_next/image|favicon.ico).*)"],

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/bridge-frontend", "name": "@link-stack/bridge-frontend",
"version": "3.2.0b3", "version": "3.3.5",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "next dev", "dev": "next dev",
@ -18,25 +18,23 @@
"@mui/material": "^6", "@mui/material": "^6",
"@mui/material-nextjs": "^6", "@mui/material-nextjs": "^6",
"@mui/x-license": "^7", "@mui/x-license": "^7",
"@link-stack/bridge-common": "*", "@link-stack/bridge-common": "workspace:*",
"@link-stack/bridge-ui": "*", "@link-stack/bridge-ui": "workspace:*",
"next": "15.5.4", "next": "15.5.9",
"next-auth": "^4.24.11", "next-auth": "^4.24.11",
"react": "19.2.0", "react": "19.2.0",
"react-dom": "19.2.0", "react-dom": "19.2.0",
"sharp": "^0.34.4", "sharp": "^0.34.4",
"tsx": "^4.20.6", "tsx": "^4.20.6",
"@link-stack/ui": "*" "@link-stack/ui": "workspace:*"
}, },
"devDependencies": { "devDependencies": {
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"@types/node": "^24", "@types/node": "^24",
"@types/pg": "^8.15.5", "@types/pg": "^8.15.5",
"@types/react": "^19", "@types/react": "^19",
"@types/react-dom": "^19", "@types/react-dom": "^19",
"@link-stack/eslint-config": "*",
"@link-stack/typescript-config": "*",
"typescript": "^5" "typescript": "^5"
} }
} }

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/bridge-migrations", "name": "@link-stack/bridge-migrations",
"version": "3.2.0b3", "version": "3.3.5",
"type": "module", "type": "module",
"scripts": { "scripts": {
"migrate:up:all": "tsx migrate.ts up:all", "migrate:up:all": "tsx migrate.ts up:all",
@ -9,7 +9,7 @@
"migrate:down:one": "tsx migrate.ts down:one" "migrate:down:one": "tsx migrate.ts down:one"
}, },
"dependencies": { "dependencies": {
"@link-stack/logger": "*", "@link-stack/logger": "workspace:*",
"dotenv": "^17.2.3", "dotenv": "^17.2.3",
"kysely": "0.27.5", "kysely": "0.27.5",
"pg": "^8.16.3", "pg": "^8.16.3",
@ -18,8 +18,8 @@
"devDependencies": { "devDependencies": {
"@types/node": "^24", "@types/node": "^24",
"@types/pg": "^8.15.5", "@types/pg": "^8.15.5",
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"typescript": "^5" "typescript": "^5"
} }
} }

View file

@ -2,30 +2,39 @@ FROM node:22-bookworm-slim AS base
FROM base AS builder FROM base AS builder
ARG APP_DIR=/opt/bridge-whatsapp ARG APP_DIR=/opt/bridge-whatsapp
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN mkdir -p ${APP_DIR}/ RUN mkdir -p ${APP_DIR}/
RUN npm i -g turbo RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN pnpm add -g turbo
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
COPY . . COPY . .
RUN turbo prune --scope=@link-stack/bridge-whatsapp --docker RUN turbo prune --scope=@link-stack/bridge-whatsapp --docker
FROM base AS installer FROM base AS installer
ARG APP_DIR=/opt/bridge-whatsapp ARG APP_DIR=/opt/bridge-whatsapp
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
COPY --from=builder ${APP_DIR}/out/json/ . COPY --from=builder ${APP_DIR}/out/json/ .
COPY --from=builder ${APP_DIR}/out/full/ . COPY --from=builder ${APP_DIR}/out/full/ .
COPY --from=builder ${APP_DIR}/out/package-lock.json ./package-lock.json COPY --from=builder ${APP_DIR}/out/pnpm-lock.yaml ./pnpm-lock.yaml
RUN npm ci RUN pnpm install --frozen-lockfile
RUN npm i -g turbo RUN pnpm add -g turbo
RUN turbo run build --filter=@link-stack/bridge-whatsapp RUN turbo run build --filter=@link-stack/bridge-whatsapp
FROM base as runner FROM base as runner
ARG BUILD_DATE ARG BUILD_DATE
ARG VERSION ARG VERSION
ARG APP_DIR=/opt/bridge-whatsapp ARG APP_DIR=/opt/bridge-whatsapp
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN mkdir -p ${APP_DIR}/ RUN mkdir -p ${APP_DIR}/
RUN DEBIAN_FRONTEND=noninteractive apt-get update && \ RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
apt-get install -y --no-install-recommends \ apt-get install -y --no-install-recommends \
dumb-init dumb-init
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
COPY --from=installer ${APP_DIR} ./ COPY --from=installer ${APP_DIR} ./
RUN chown -R node:node ${APP_DIR} RUN chown -R node:node ${APP_DIR}

View file

@ -2,4 +2,4 @@
set -e set -e
echo "starting bridge-whatsapp" echo "starting bridge-whatsapp"
exec dumb-init npm run start exec dumb-init pnpm run start

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/bridge-whatsapp", "name": "@link-stack/bridge-whatsapp",
"version": "3.2.0b3", "version": "3.3.5",
"main": "build/main/index.js", "main": "build/main/index.js",
"author": "Darren Clarke <darren@redaranj.com>", "author": "Darren Clarke <darren@redaranj.com>",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
@ -9,15 +9,17 @@
"@hapi/hapi": "^21.4.3", "@hapi/hapi": "^21.4.3",
"@hapipal/schmervice": "^3.0.0", "@hapipal/schmervice": "^3.0.0",
"@hapipal/toys": "^4.0.0", "@hapipal/toys": "^4.0.0",
"@link-stack/logger": "*", "@link-stack/bridge-common": "workspace:*",
"@whiskeysockets/baileys": "^6.7.20", "@link-stack/logger": "workspace:*",
"@whiskeysockets/baileys": "6.7.21",
"hapi-pino": "^13.0.0", "hapi-pino": "^13.0.0",
"link-preview-js": "^3.1.0" "link-preview-js": "^3.1.0"
}, },
"devDependencies": { "devDependencies": {
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@link-stack/jest-config": "*", "@link-stack/jest-config": "workspace:*",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"@types/long": "^5",
"@types/node": "*", "@types/node": "*",
"dotenv-cli": "^10.0.0", "dotenv-cli": "^10.0.0",
"tsx": "^4.20.6", "tsx": "^4.20.6",

View file

@ -12,6 +12,11 @@ import makeWASocket, {
} from "@whiskeysockets/baileys"; } from "@whiskeysockets/baileys";
import fs from "fs"; import fs from "fs";
import { createLogger } from "@link-stack/logger"; import { createLogger } from "@link-stack/logger";
import {
getMaxAttachmentSize,
getMaxTotalAttachmentSize,
MAX_ATTACHMENTS,
} from "@link-stack/bridge-common";
const logger = createLogger("bridge-whatsapp-service"); const logger = createLogger("bridge-whatsapp-service");
@ -21,11 +26,7 @@ export default class WhatsappService extends Service {
connections: { [key: string]: any } = {}; connections: { [key: string]: any } = {};
loginConnections: { [key: string]: any } = {}; loginConnections: { [key: string]: any } = {};
static browserDescription: [string, string, string] = [ static browserDescription: [string, string, string] = ["Bridge", "Chrome", "2.0"];
"Bridge",
"Chrome",
"2.0",
];
constructor(server: Server, options: never) { constructor(server: Server, options: never) {
super(server, options); super(server, options);
@ -36,7 +37,24 @@ export default class WhatsappService extends Service {
} }
getBotDirectory(id: string): string { getBotDirectory(id: string): string {
return `${this.getBaseDirectory()}/${id}`; // Validate that ID contains only safe characters (alphanumeric, dash, underscore)
if (!/^[a-zA-Z0-9_-]+$/.test(id)) {
throw new Error(`Invalid bot ID format: ${id}`);
}
// Prevent path traversal by checking for suspicious patterns
if (id.includes("..") || id.includes("/") || id.includes("\\")) {
throw new Error(`Path traversal detected in bot ID: ${id}`);
}
const botPath = `${this.getBaseDirectory()}/${id}`;
// Ensure the resolved path is still within the base directory
if (!botPath.startsWith(this.getBaseDirectory())) {
throw new Error(`Invalid bot path: ${botPath}`);
}
return botPath;
} }
getAuthDirectory(id: string): string { getAuthDirectory(id: string): string {
@ -80,20 +98,14 @@ export default class WhatsappService extends Service {
auth: state, auth: state,
generateHighQualityLinkPreview: false, generateHighQualityLinkPreview: false,
msgRetryCounterMap, msgRetryCounterMap,
shouldIgnoreJid: (jid) => shouldIgnoreJid: (jid) => isJidBroadcast(jid) || isJidStatusBroadcast(jid),
isJidBroadcast(jid) || isJidStatusBroadcast(jid),
}); });
let pause = 5000; let pause = 5000;
socket.ev.process(async (events) => { socket.ev.process(async (events) => {
if (events["connection.update"]) { if (events["connection.update"]) {
const update = events["connection.update"]; const update = events["connection.update"];
const { const { connection: connectionState, lastDisconnect, qr, isNewLogin } = update;
connection: connectionState,
lastDisconnect,
qr,
isNewLogin,
} = update;
if (qr) { if (qr) {
logger.info("got qr code"); logger.info("got qr code");
const botDirectory = this.getBotDirectory(botID); const botDirectory = this.getBotDirectory(botID);
@ -108,8 +120,7 @@ export default class WhatsappService extends Service {
logger.info("opened connection"); logger.info("opened connection");
} else if (connectionState === "close") { } else if (connectionState === "close") {
logger.info({ lastDisconnect }, "connection closed"); logger.info({ lastDisconnect }, "connection closed");
const disconnectStatusCode = (lastDisconnect?.error as any)?.output const disconnectStatusCode = (lastDisconnect?.error as any)?.output?.statusCode;
?.statusCode;
if (disconnectStatusCode === DisconnectReason.restartRequired) { if (disconnectStatusCode === DisconnectReason.restartRequired) {
logger.info("reconnecting after got new login"); logger.info("reconnecting after got new login");
await this.createConnection(botID, server, options); await this.createConnection(botID, server, options);
@ -152,10 +163,7 @@ export default class WhatsappService extends Service {
const verifiedFile = `${directory}/verified`; const verifiedFile = `${directory}/verified`;
if (fs.existsSync(verifiedFile)) { if (fs.existsSync(verifiedFile)) {
const { version, isLatest } = await fetchLatestBaileysVersion(); const { version, isLatest } = await fetchLatestBaileysVersion();
logger.info( logger.info({ version: version.join("."), isLatest }, "using WA version");
{ version: version.join("."), isLatest },
"using WA version",
);
await this.createConnection(botID, this.server, { await this.createConnection(botID, this.server, {
browser: WhatsappService.browserDescription, browser: WhatsappService.browserDescription,
@ -166,10 +174,7 @@ export default class WhatsappService extends Service {
} }
} }
private async queueMessage( private async queueMessage(botID: string, webMessageInfo: proto.IWebMessageInfo) {
botID: string,
webMessageInfo: proto.IWebMessageInfo,
) {
const { const {
key: { id, fromMe, remoteJid }, key: { id, fromMe, remoteJid },
message, message,
@ -182,11 +187,9 @@ export default class WhatsappService extends Service {
"Message field", "Message field",
); );
} }
const isValidMessage = const isValidMessage = message && remoteJid !== "status@broadcast" && !fromMe;
message && remoteJid !== "status@broadcast" && !fromMe;
if (isValidMessage) { if (isValidMessage) {
const { audioMessage, documentMessage, imageMessage, videoMessage } = const { audioMessage, documentMessage, imageMessage, videoMessage } = message;
message;
const isMediaMessage = const isMediaMessage =
audioMessage || documentMessage || imageMessage || videoMessage; audioMessage || documentMessage || imageMessage || videoMessage;
@ -266,10 +269,7 @@ export default class WhatsappService extends Service {
} }
} }
private async queueUnreadMessages( private async queueUnreadMessages(botID: string, messages: proto.IWebMessageInfo[]) {
botID: string,
messages: proto.IWebMessageInfo[],
) {
for await (const message of messages) { for await (const message of messages) {
await this.queueMessage(botID, message); await this.queueMessage(botID, message);
} }
@ -312,10 +312,7 @@ export default class WhatsappService extends Service {
} }
} }
async register( async register(botID: string, callback?: AuthCompleteCallback): Promise<void> {
botID: string,
callback?: AuthCompleteCallback,
): Promise<void> {
const { version } = await fetchLatestBaileysVersion(); const { version } = await fetchLatestBaileysVersion();
await this.createConnection( await this.createConnection(
botID, botID,
@ -333,16 +330,57 @@ export default class WhatsappService extends Service {
attachments?: Array<{ data: string; filename: string; mime_type: string }>, attachments?: Array<{ data: string; filename: string; mime_type: string }>,
): Promise<void> { ): Promise<void> {
const connection = this.connections[botID]?.socket; const connection = this.connections[botID]?.socket;
const recipient = `${phoneNumber.replace(/\D+/g, "")}@s.whatsapp.net`; const digits = phoneNumber.replace(/\D+/g, "");
// LIDs are 15+ digits, phone numbers with country code are typically 10-14 digits
const suffix = digits.length > 14 ? "@lid" : "@s.whatsapp.net";
const recipient = `${digits}${suffix}`;
// Send text message if provided // Send text message if provided
if (message) { if (message) {
await connection.sendMessage(recipient, { text: message }); await connection.sendMessage(recipient, { text: message });
} }
// Send attachments if provided // Send attachments if provided with size validation
if (attachments && attachments.length > 0) { if (attachments && attachments.length > 0) {
const MAX_ATTACHMENT_SIZE = getMaxAttachmentSize();
const MAX_TOTAL_SIZE = getMaxTotalAttachmentSize();
if (attachments.length > MAX_ATTACHMENTS) {
throw new Error(
`Too many attachments: ${attachments.length} (max ${MAX_ATTACHMENTS})`,
);
}
let totalSize = 0;
for (const attachment of attachments) { for (const attachment of attachments) {
// Calculate size before converting to buffer
const estimatedSize = (attachment.data.length * 3) / 4;
if (estimatedSize > MAX_ATTACHMENT_SIZE) {
logger.warn(
{
filename: attachment.filename,
size: estimatedSize,
maxSize: MAX_ATTACHMENT_SIZE,
},
"Attachment exceeds size limit, skipping",
);
continue;
}
totalSize += estimatedSize;
if (totalSize > MAX_TOTAL_SIZE) {
logger.warn(
{
totalSize,
maxTotalSize: MAX_TOTAL_SIZE,
},
"Total attachment size exceeds limit, skipping remaining",
);
break;
}
const buffer = Buffer.from(attachment.data, "base64"); const buffer = Buffer.from(attachment.data, "base64");
if (attachment.mime_type.startsWith("image/")) { if (attachment.mime_type.startsWith("image/")) {

View file

@ -8,7 +8,7 @@
"outDir": "build/main", "outDir": "build/main",
"rootDir": "src", "rootDir": "src",
"skipLibCheck": true, "skipLibCheck": true,
"types": ["node", "long"], "types": ["node"],
"lib": ["es2020", "DOM"], "lib": ["es2020", "DOM"],
"composite": true "composite": true
}, },

View file

@ -2,26 +2,35 @@ FROM node:22-bookworm-slim AS base
FROM base AS builder FROM base AS builder
ARG APP_DIR=/opt/bridge-worker ARG APP_DIR=/opt/bridge-worker
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN mkdir -p ${APP_DIR}/ RUN mkdir -p ${APP_DIR}/
RUN npm i -g turbo RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN pnpm add -g turbo
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
COPY . . COPY . .
RUN turbo prune --scope=@link-stack/bridge-worker --docker RUN turbo prune --scope=@link-stack/bridge-worker --docker
FROM base AS installer FROM base AS installer
ARG APP_DIR=/opt/bridge-worker ARG APP_DIR=/opt/bridge-worker
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
COPY --from=builder ${APP_DIR}/out/json/ . COPY --from=builder ${APP_DIR}/out/json/ .
COPY --from=builder ${APP_DIR}/out/full/ . COPY --from=builder ${APP_DIR}/out/full/ .
COPY --from=builder ${APP_DIR}/out/package-lock.json ./package-lock.json COPY --from=builder ${APP_DIR}/out/pnpm-lock.yaml ./pnpm-lock.yaml
RUN npm ci RUN pnpm install --frozen-lockfile
RUN npm i -g turbo RUN pnpm add -g turbo
RUN turbo run build --filter=@link-stack/bridge-worker RUN turbo run build --filter=@link-stack/bridge-worker
FROM base as runner FROM base as runner
ARG BUILD_DATE ARG BUILD_DATE
ARG VERSION ARG VERSION
ARG APP_DIR=/opt/bridge-worker ARG APP_DIR=/opt/bridge-worker
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN mkdir -p ${APP_DIR}/ RUN mkdir -p ${APP_DIR}/
RUN DEBIAN_FRONTEND=noninteractive apt-get update && \ RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
apt-get install -y --no-install-recommends \ apt-get install -y --no-install-recommends \

View file

@ -88,9 +88,6 @@ Required environment variables:
### Common Tasks ### Common Tasks
- `notify-webhooks` - Send webhook notifications - `notify-webhooks` - Send webhook notifications
### Leafcutter Tasks
- `import-leafcutter` - Import data to Leafcutter
- `import-label-studio` - Import Label Studio annotations - `import-label-studio` - Import Label Studio annotations
## Architecture ## Architecture

View file

@ -1 +1,2 @@
*/1 * * * * fetch-signal-messages ?max=1&id=fetchSignalMessagesCron {"scheduleTasks": "true"} */1 * * * * fetch-signal-messages ?max=1&id=fetchSignalMessagesCron {"scheduleTasks": "true"}
*/2 * * * * check-group-membership ?max=1&id=checkGroupMembershipCron {}

View file

@ -2,4 +2,4 @@
set -e set -e
echo "starting bridge-worker" echo "starting bridge-worker"
exec dumb-init npm run start exec dumb-init pnpm run start

View file

@ -1,7 +1,6 @@
import type {} from "graphile-config";
import type {} from "graphile-worker"; import type {} from "graphile-worker";
const preset: GraphileConfig.Preset = { const preset: any = {
worker: { worker: {
connectionString: process.env.DATABASE_URL, connectionString: process.env.DATABASE_URL,
maxPoolSize: process.env.BRIDGE_WORKER_POOL_SIZE maxPoolSize: process.env.BRIDGE_WORKER_POOL_SIZE

View file

@ -3,7 +3,7 @@ import { createLogger } from "@link-stack/logger";
import * as path from "path"; import * as path from "path";
import { fileURLToPath } from "url"; import { fileURLToPath } from "url";
const logger = createLogger('bridge-worker'); const logger = createLogger("bridge-worker");
const __filename = fileURLToPath(import.meta.url); const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename); const __dirname = path.dirname(__filename);
@ -32,6 +32,15 @@ const main = async () => {
}; };
main().catch((err) => { main().catch((err) => {
logger.error({ error: err }, 'Worker failed to start'); logger.error(
{
error: err,
message: err.message,
stack: err.stack,
name: err.name,
},
"Worker failed to start",
);
console.error("Full error:", err);
process.exit(1); process.exit(1);
}); });

View file

@ -1,11 +1,6 @@
/* eslint-disable camelcase */ /* eslint-disable camelcase */
// import { SavedVoiceProvider } from "@digiresilience/bridge-db"; // import { SavedVoiceProvider } from "@digiresilience/bridge-db";
import Twilio from "twilio"; import Twilio from "twilio";
import { CallInstance } from "twilio/lib/rest/api/v2010/account/call";
import { Zammad, getOrCreateUser } from "./zammad.js";
import { createLogger } from "@link-stack/logger";
const logger = createLogger('bridge-worker-common');
type SavedVoiceProvider = any; type SavedVoiceProvider = any;
@ -23,51 +18,3 @@ export const twilioClientFor = (
}); });
}; };
export const createZammadTicket = async (
call: CallInstance,
mp3: Buffer,
): Promise<void> => {
const title = `Call from ${call.fromFormatted} at ${call.startTime}`;
const body = `<ul>
<li>Caller: ${call.fromFormatted}</li>
<li>Service Number: ${call.toFormatted}</li>
<li>Call Duration: ${call.duration} seconds</li>
<li>Start Time: ${call.startTime}</li>
<li>End Time: ${call.endTime}</li>
</ul>
<p>See the attached recording.</p>`;
const filename = `${call.sid}-${call.startTime}.mp3`;
const zammad = Zammad(
{
token: "EviH_WL0p6YUlCoIER7noAZEAPsYA_fVU4FZCKdpq525Vmzzvl8d7dNuP_8d-Amb",
},
"https://demo.digiresilience.org",
);
try {
const customer = await getOrCreateUser(zammad, call.fromFormatted);
await zammad.ticket.create({
title,
group: "Finances",
note: "This ticket was created automaticaly from a recorded phone call.",
customer_id: customer.id,
article: {
body,
subject: title,
content_type: "text/html",
type: "note",
attachments: [
{
filename,
data: mp3.toString("base64"),
"mime-type": "audio/mpeg",
},
],
},
});
} catch (error: any) {
if (error.isBoom) {
logger.error({ output: error.output }, 'Zammad ticket creation failed');
throw new Error("Failed to create zamamd ticket");
}
}
};

View file

@ -0,0 +1,272 @@
import { createLogger } from "@link-stack/logger";
const logger = createLogger('formstack-field-mapping');
/**
* Field mapping configuration for Formstack to Zammad integration
*
* This configuration is completely flexible - you define your own internal field names
* and map them to both Formstack source fields and Zammad custom fields.
*/
export interface FieldMappingConfig {
/**
* Map internal field keys to Formstack field names
*
* Required keys (system):
* - formId: The Formstack Form ID field
* - uniqueId: The Formstack submission unique ID field
*
* Optional keys with special behavior:
* - email: Used for user lookup/creation (if provided)
* - phone: Used for user lookup/creation (if provided)
* - signalAccount: Used for Signal-based user lookup (tried first before phone)
* - name: User's full name (can be nested object with first/last, used in user creation)
* - organization: Used in ticket title template placeholder {organization}
* - typeOfSupport: Used in ticket title template placeholder {typeOfSupport}
* - descriptionOfIssue: Used as article subject (defaults to "Support Request" if not provided)
*
* All other keys are completely arbitrary and defined by your form.
*/
sourceFields: Record<string, string>;
/**
* Map Zammad custom field names to internal field keys (from sourceFields)
*
* Example:
* {
* "us_state": "state", // Zammad field "us_state" gets value from sourceFields["state"]
* "zip_code": "zipCode", // Zammad field "zip_code" gets value from sourceFields["zipCode"]
* "custom_field": "myField" // Any custom field mapping
* }
*
* The values in this object must correspond to keys in sourceFields.
*/
zammadFields: Record<string, string>;
/**
* Configuration for ticket creation
*/
ticket: {
/** Zammad group name to assign tickets to */
group: string;
/** Article type name (e.g., "note", "cdr_signal", "email") */
defaultArticleType: string;
/**
* Template for ticket title
* Supports placeholders: {name}, {organization}, {typeOfSupport}
* Placeholders reference internal field keys from sourceFields
*/
titleTemplate?: string;
};
/**
* Configuration for extracting nested field values
*/
nestedFields?: {
/**
* How to extract first/last name from a nested Name field
* Example: { firstNamePath: "first", lastNamePath: "last" }
* for a field like { "Name": { "first": "John", "last": "Doe" } }
*/
name?: {
firstNamePath?: string;
lastNamePath?: string;
};
};
}
let cachedMapping: FieldMappingConfig | null = null;
/**
* Load field mapping configuration from environment variable (REQUIRED)
*/
export function loadFieldMapping(): FieldMappingConfig {
if (cachedMapping) {
return cachedMapping;
}
const configJson = process.env.FORMSTACK_FIELD_MAPPING;
if (!configJson) {
throw new Error(
'FORMSTACK_FIELD_MAPPING environment variable is required. ' +
'Please set it to a JSON string containing your field mapping configuration.'
);
}
logger.info('Loading Formstack field mapping from environment variable');
try {
const config = JSON.parse(configJson) as FieldMappingConfig;
// Validate required sections exist
if (!config.sourceFields || typeof config.sourceFields !== 'object') {
throw new Error('Invalid field mapping configuration: sourceFields must be an object');
}
if (!config.zammadFields || typeof config.zammadFields !== 'object') {
throw new Error('Invalid field mapping configuration: zammadFields must be an object');
}
if (!config.ticket || typeof config.ticket !== 'object') {
throw new Error('Invalid field mapping configuration: ticket must be an object');
}
// Validate required ticket fields
if (!config.ticket.group) {
throw new Error('Invalid field mapping configuration: ticket.group is required');
}
if (!config.ticket.defaultArticleType) {
throw new Error('Invalid field mapping configuration: ticket.defaultArticleType is required');
}
// Validate required source fields
const systemRequiredFields = ['formId', 'uniqueId'];
for (const field of systemRequiredFields) {
if (!config.sourceFields[field]) {
throw new Error(`Invalid field mapping configuration: sourceFields.${field} is required (system field)`);
}
}
// Validate zammadFields reference valid sourceFields
for (const [zammadField, sourceKey] of Object.entries(config.zammadFields)) {
if (!config.sourceFields[sourceKey]) {
logger.warn(
{ zammadField, sourceKey },
'Zammad field maps to non-existent source field key'
);
}
}
logger.info('Successfully loaded Formstack field mapping configuration');
cachedMapping = config;
return cachedMapping;
} catch (error) {
logger.error({
error: error instanceof Error ? error.message : error,
jsonLength: configJson.length
}, 'Failed to parse field mapping configuration');
throw new Error(
`Failed to parse Formstack field mapping JSON: ${error instanceof Error ? error.message : error}`
);
}
}
/**
* Get a field value from formData using the source field name mapping
*/
export function getFieldValue(
formData: any,
internalFieldKey: string,
mapping?: FieldMappingConfig
): any {
const config = mapping || loadFieldMapping();
const sourceFieldName = config.sourceFields[internalFieldKey];
if (!sourceFieldName) {
return undefined;
}
return formData[sourceFieldName];
}
/**
* Get a nested field value (e.g., Name.first)
*/
export function getNestedFieldValue(
fieldValue: any,
path: string | undefined
): any {
if (!path || !fieldValue) {
return undefined;
}
const parts = path.split('.');
let current = fieldValue;
for (const part of parts) {
if (current && typeof current === 'object') {
current = current[part];
} else {
return undefined;
}
}
return current;
}
/**
* Format field value (handle arrays, objects, etc.)
*/
export function formatFieldValue(value: any): string | undefined {
if (value === null || value === undefined || value === '') {
return undefined;
}
if (Array.isArray(value)) {
return value.join(', ');
}
if (typeof value === 'object') {
return JSON.stringify(value);
}
return String(value);
}
/**
* Build ticket title from template and data
* Replaces placeholders like {name}, {organization}, {typeOfSupport} with provided values
*/
export function buildTicketTitle(
mapping: FieldMappingConfig,
data: Record<string, string | undefined>
): string {
const template = mapping.ticket.titleTemplate || '{name}';
let title = template;
// Replace all placeholders in the template
for (const [key, value] of Object.entries(data)) {
const placeholder = `{${key}}`;
if (title.includes(placeholder)) {
if (value) {
title = title.replace(placeholder, value);
} else {
// Remove empty placeholder and surrounding separators
title = title.replace(` - ${placeholder}`, '').replace(`${placeholder} - `, '').replace(placeholder, '');
}
}
}
return title.trim();
}
/**
* Get all Zammad field values from form data using the mapping
* Returns an object with Zammad field names as keys and formatted values
*/
export function getZammadFieldValues(
formData: any,
mapping?: FieldMappingConfig
): Record<string, string> {
const config = mapping || loadFieldMapping();
const result: Record<string, string> = {};
for (const [zammadFieldName, sourceKey] of Object.entries(config.zammadFields)) {
const value = getFieldValue(formData, sourceKey, config);
const formatted = formatFieldValue(value);
if (formatted !== undefined) {
result[zammadFieldName] = formatted;
}
}
return result;
}
/**
* Reset cached mapping (useful for testing)
*/
export function resetMappingCache(): void {
cachedMapping = null;
}

View file

@ -41,7 +41,7 @@ const formatAuth = (credentials: any) => {
return ( return (
"Basic " + "Basic " +
Buffer.from(`${credentials.username}:${credentials.password}`).toString( Buffer.from(`${credentials.username}:${credentials.password}`).toString(
"base64" "base64",
) )
); );
} }
@ -56,7 +56,7 @@ const formatAuth = (credentials: any) => {
export const Zammad = ( export const Zammad = (
credentials: ZammadCredentials, credentials: ZammadCredentials,
host: string, host: string,
opts?: ZammadClientOpts opts?: ZammadClientOpts,
): ZammadClient => { ): ZammadClient => {
const extraHeaders = (opts && opts.headers) || {}; const extraHeaders = (opts && opts.headers) || {};
@ -76,7 +76,9 @@ export const Zammad = (
return result as Ticket; return result as Ticket;
}, },
update: async (id, payload) => { update: async (id, payload) => {
const { payload: result } = await wreck.put(`tickets/${id}`, { payload }); const { payload: result } = await wreck.put(`tickets/${id}`, {
payload,
});
return result as Ticket; return result as Ticket;
}, },
}, },
@ -98,19 +100,72 @@ export const Zammad = (
}; };
}; };
/**
* Sanitizes phone number to E.164 format: +15554446666
* Strips all non-digit characters except +, ensures + prefix
* @param phoneNumber - Raw phone number (e.g., "(555) 444-6666", "5554446666", "+1 555 444 6666")
* @returns E.164 formatted phone number (e.g., "+15554446666")
* @throws Error if phone number is invalid
*/
export const sanitizePhoneNumber = (phoneNumber: string): string => {
// Remove all characters except digits and +
let cleaned = phoneNumber.replace(/[^\d+]/g, "");
// Ensure it starts with +
if (!cleaned.startsWith("+")) {
// Assume US/Canada if no country code (11 digits starting with 1, or 10 digits)
if (cleaned.length === 10) {
cleaned = "+1" + cleaned;
} else if (cleaned.length === 11 && cleaned.startsWith("1")) {
cleaned = "+" + cleaned;
} else if (cleaned.length >= 10) {
// International number without +, add it
cleaned = "+" + cleaned;
}
}
// Validate E.164 format: + followed by 10-15 digits
if (!/^\+\d{10,15}$/.test(cleaned)) {
throw new Error(`Invalid phone number format: ${phoneNumber}`);
}
return cleaned;
};
export const getUser = async (zammad: ZammadClient, phoneNumber: string) => { export const getUser = async (zammad: ZammadClient, phoneNumber: string) => {
const mungedNumber = phoneNumber.replace("+", ""); // Sanitize to E.164 format
const results = await zammad.user.search(`phone:${mungedNumber}`); const sanitized = sanitizePhoneNumber(phoneNumber);
// Remove + for Zammad search query
const searchNumber = sanitized.replace("+", "");
// Try sanitized format first (e.g., "6464229653" for "+16464229653")
let results = await zammad.user.search(`phone:${searchNumber}`);
if (results.length > 0) return results[0]; if (results.length > 0) return results[0];
// Fall back to searching for original input (handles legacy formatted numbers)
// This ensures we can find users with "(646) 422-9653" format in database
const originalCleaned = phoneNumber.replace(/[^\d+]/g, "").replace("+", "");
if (originalCleaned !== searchNumber) {
results = await zammad.user.search(`phone:${originalCleaned}`);
if (results.length > 0) return results[0];
}
return undefined; return undefined;
}; };
export const getOrCreateUser = async (zammad: ZammadClient, phoneNumber: string) => { export const getOrCreateUser = async (
zammad: ZammadClient,
phoneNumber: string,
) => {
const customer = await getUser(zammad, phoneNumber); const customer = await getUser(zammad, phoneNumber);
if (customer) return customer; if (customer) return customer;
// Sanitize phone number to E.164 format before storing
const sanitized = sanitizePhoneNumber(phoneNumber);
return zammad.user.create({ return zammad.user.create({
phone: phoneNumber, phone: sanitized,
note: "User created by Grabadora from incoming voice call", note: "User created from incoming voice call",
}); });
}; };

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/bridge-worker", "name": "@link-stack/bridge-worker",
"version": "3.2.0b3", "version": "3.3.5",
"type": "module", "type": "module",
"main": "build/main/index.js", "main": "build/main/index.js",
"author": "Darren Clarke <darren@redaranj.com>", "author": "Darren Clarke <darren@redaranj.com>",
@ -12,9 +12,9 @@
}, },
"dependencies": { "dependencies": {
"@hapi/wreck": "^18.1.0", "@hapi/wreck": "^18.1.0",
"@link-stack/bridge-common": "*", "@link-stack/bridge-common": "workspace:*",
"@link-stack/logger": "*", "@link-stack/logger": "workspace:*",
"@link-stack/signal-api": "*", "@link-stack/signal-api": "workspace:*",
"fluent-ffmpeg": "^2.1.3", "fluent-ffmpeg": "^2.1.3",
"graphile-worker": "^0.16.6", "graphile-worker": "^0.16.6",
"remeda": "^2.32.0", "remeda": "^2.32.0",
@ -23,8 +23,8 @@
"devDependencies": { "devDependencies": {
"@types/fluent-ffmpeg": "^2.1.27", "@types/fluent-ffmpeg": "^2.1.27",
"dotenv-cli": "^10.0.0", "dotenv-cli": "^10.0.0",
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"typescript": "^5.9.3" "typescript": "^5.9.3"
} }
} }

View file

@ -0,0 +1,121 @@
#!/usr/bin/env node
/**
* Check Signal group membership status and update Zammad tickets
*
* This task queries the Signal CLI API to check if users have joined
* their assigned groups. When a user joins (moves from pendingInvites to members),
* it updates the ticket's group_joined flag in Zammad.
*
* Note: This task sends webhooks for all group members every time it runs.
* The Zammad webhook handler is idempotent and will ignore duplicate notifications
* if group_joined is already true.
*/
import { db, getWorkerUtils } from "@link-stack/bridge-common";
import { createLogger } from "@link-stack/logger";
import * as signalApi from "@link-stack/signal-api";
const logger = createLogger("check-group-membership");
const { Configuration, GroupsApi } = signalApi;
interface CheckGroupMembershipTaskOptions {
// Optional: Check specific group. If not provided, checks all groups with group_joined=false
groupId?: string;
botToken?: string;
}
const checkGroupMembershipTask = async (
options: CheckGroupMembershipTaskOptions = {},
): Promise<void> => {
const config = new Configuration({
basePath: process.env.BRIDGE_SIGNAL_URL,
});
const groupsClient = new GroupsApi(config);
const worker = await getWorkerUtils();
// Get all Signal bots
const bots = await db.selectFrom("SignalBot").selectAll().execute();
for (const bot of bots) {
try {
logger.debug(
{ botId: bot.id, phoneNumber: bot.phoneNumber },
"Checking groups for bot",
);
// Get all groups for this bot
const groups = await groupsClient.v1GroupsNumberGet({
number: bot.phoneNumber,
});
logger.debug(
{ botId: bot.id, groupCount: groups.length },
"Retrieved groups from Signal CLI",
);
// For each group, check if we have tickets waiting for members to join
for (const group of groups) {
if (!group.id || !group.internalId) {
logger.debug({ groupName: group.name }, "Skipping group without ID");
continue;
}
// Log info about each group temporarily for debugging
logger.info(
{
groupId: group.id,
groupName: group.name,
membersCount: group.members?.length || 0,
members: group.members,
pendingInvitesCount: group.pendingInvites?.length || 0,
pendingInvites: group.pendingInvites,
pendingRequestsCount: group.pendingRequests?.length || 0,
},
"Checking group membership",
);
// Notify Zammad about each member who has joined
// This handles both cases:
// 1. New contacts who must accept invite (they move from pendingInvites to members)
// 2. Existing contacts who are auto-added (they appear directly in members)
if (group.members && group.members.length > 0) {
for (const memberPhone of group.members) {
// Check if this member was previously pending
// We'll send the webhook and let Zammad decide if it needs to update
await worker.addJob("common/notify-webhooks", {
backendId: bot.id,
payload: {
event: "group_member_joined",
group_id: group.id,
member_phone: memberPhone,
timestamp: new Date().toISOString(),
},
});
logger.info(
{
groupId: group.id,
memberPhone,
},
"Notified Zammad about group member",
);
}
}
}
} catch (error: any) {
logger.error(
{
botId: bot.id,
error: error.message,
stack: error.stack,
},
"Error checking group membership for bot",
);
}
}
logger.info("Completed group membership check");
};
export default checkGroupMembershipTask;

View file

@ -2,7 +2,7 @@ import { db, getWorkerUtils } from "@link-stack/bridge-common";
import { createLogger } from "@link-stack/logger"; import { createLogger } from "@link-stack/logger";
import * as signalApi from "@link-stack/signal-api"; import * as signalApi from "@link-stack/signal-api";
const logger = createLogger('fetch-signal-messages'); const logger = createLogger("fetch-signal-messages");
const { Configuration, MessagesApi, AttachmentsApi } = signalApi; const { Configuration, MessagesApi, AttachmentsApi } = signalApi;
const config = new Configuration({ const config = new Configuration({
@ -28,13 +28,13 @@ const fetchAttachments = async (attachments: any[] | undefined) => {
let defaultFilename = name; let defaultFilename = name;
if (!defaultFilename) { if (!defaultFilename) {
// Check if id already has an extension // Check if id already has an extension
const hasExtension = id.includes('.'); const hasExtension = id.includes(".");
if (hasExtension) { if (hasExtension) {
// ID already includes extension // ID already includes extension
defaultFilename = id; defaultFilename = id;
} else { } else {
// Add extension based on content type // Add extension based on content type
const extension = contentType?.split('/')[1] || 'bin'; const extension = contentType?.split("/")[1] || "bin";
defaultFilename = `${id}.${extension}`; defaultFilename = `${id}.${extension}`;
} }
} }
@ -64,7 +64,22 @@ const processMessage = async ({
message: msg, message: msg,
}: ProcessMessageArgs): Promise<Record<string, any>[]> => { }: ProcessMessageArgs): Promise<Record<string, any>[]> => {
const { envelope } = msg; const { envelope } = msg;
const { source, sourceUuid, dataMessage } = envelope; const { source, sourceUuid, dataMessage, syncMessage, receiptMessage, typingMessage } =
envelope;
// Log all envelope types to understand what events we're receiving
logger.info(
{
source,
sourceUuid,
hasDataMessage: !!dataMessage,
hasSyncMessage: !!syncMessage,
hasReceiptMessage: !!receiptMessage,
hasTypingMessage: !!typingMessage,
envelopeKeys: Object.keys(envelope),
},
"Received Signal envelope",
);
const isGroup = !!( const isGroup = !!(
dataMessage?.groupV2 || dataMessage?.groupV2 ||
@ -72,23 +87,69 @@ const processMessage = async ({
dataMessage?.groupInfo dataMessage?.groupInfo
); );
// Check if this is a group membership change event
const groupInfo = dataMessage?.groupInfo;
if (groupInfo) {
logger.info(
{
type: groupInfo.type,
groupId: groupInfo.groupId,
source,
groupInfoKeys: Object.keys(groupInfo),
fullGroupInfo: groupInfo,
},
"Received group info event",
);
// If user joined the group, notify Zammad
if (groupInfo.type === "JOIN" || groupInfo.type === "JOINED") {
const worker = await getWorkerUtils();
const groupId = groupInfo.groupId
? `group.${Buffer.from(groupInfo.groupId).toString("base64")}`
: null;
if (groupId) {
await worker.addJob("common/notify-webhooks", {
backendId: id,
payload: {
event: "group_member_joined",
group_id: groupId,
member_phone: source,
timestamp: new Date().toISOString(),
},
});
logger.info(
{
groupId,
memberPhone: source,
},
"User joined Signal group, notifying Zammad",
);
}
}
}
if (!dataMessage) return []; if (!dataMessage) return [];
const { attachments } = dataMessage; const { attachments } = dataMessage;
const rawTimestamp = dataMessage?.timestamp; const rawTimestamp = dataMessage?.timestamp;
logger.debug({ logger.debug(
sourceUuid, {
source, sourceUuid,
rawTimestamp, source,
hasGroupV2: !!dataMessage?.groupV2, rawTimestamp,
hasGroupContext: !!dataMessage?.groupContext, hasGroupV2: !!dataMessage?.groupV2,
hasGroupInfo: !!dataMessage?.groupInfo, hasGroupContext: !!dataMessage?.groupContext,
isGroup, hasGroupInfo: !!dataMessage?.groupInfo,
groupV2Id: dataMessage?.groupV2?.id, isGroup,
groupContextType: dataMessage?.groupContext?.type, groupV2Id: dataMessage?.groupV2?.id,
groupInfoType: dataMessage?.groupInfo?.type, groupContextType: dataMessage?.groupContext?.type,
}, 'Processing message'); groupInfoType: dataMessage?.groupInfo?.type,
},
"Processing message",
);
const timestamp = new Date(rawTimestamp); const timestamp = new Date(rawTimestamp);
const formattedAttachments = await fetchAttachments(attachments); const formattedAttachments = await fetchAttachments(attachments);
@ -165,7 +226,7 @@ const fetchSignalMessagesTask = async ({
number: phoneNumber, number: phoneNumber,
}); });
logger.debug({ botId: id, phoneNumber }, 'Fetching messages for bot'); logger.debug({ botId: id, phoneNumber }, "Fetching messages for bot");
for (const message of messages) { for (const message of messages) {
const formattedMessages = await processMessage({ const formattedMessages = await processMessage({
@ -175,19 +236,19 @@ const fetchSignalMessagesTask = async ({
}); });
for (const formattedMessage of formattedMessages) { for (const formattedMessage of formattedMessages) {
if (formattedMessage.to !== formattedMessage.from) { if (formattedMessage.to !== formattedMessage.from) {
logger.debug({ logger.debug(
messageId: formattedMessage.messageId, {
from: formattedMessage.from, messageId: formattedMessage.messageId,
to: formattedMessage.to, from: formattedMessage.from,
isGroup: formattedMessage.isGroup, to: formattedMessage.to,
hasMessage: !!formattedMessage.message, isGroup: formattedMessage.isGroup,
hasAttachment: !!formattedMessage.attachment, hasMessage: !!formattedMessage.message,
}, 'Creating job for message'); hasAttachment: !!formattedMessage.attachment,
},
await worker.addJob( "Creating job for message",
"signal/receive-signal-message",
formattedMessage,
); );
await worker.addJob("signal/receive-signal-message", formattedMessage);
} }
} }
} }

View file

@ -1,7 +1,17 @@
import { createLogger } from "@link-stack/logger"; import { createLogger } from "@link-stack/logger";
import { Zammad, getUser } from "../../lib/zammad.js"; import { db } from "@link-stack/bridge-common";
import { Zammad, getUser, sanitizePhoneNumber } from "../../lib/zammad.js";
import {
loadFieldMapping,
getFieldValue,
getNestedFieldValue,
formatFieldValue,
buildTicketTitle,
getZammadFieldValues,
type FieldMappingConfig,
} from "../../lib/formstack-field-mapping.js";
const logger = createLogger('create-ticket-from-form'); const logger = createLogger("create-ticket-from-form");
export interface CreateTicketFromFormOptions { export interface CreateTicketFromFormOptions {
formData: any; formData: any;
@ -13,67 +23,112 @@ const createTicketFromFormTask = async (
): Promise<void> => { ): Promise<void> => {
const { formData, receivedAt } = options; const { formData, receivedAt } = options;
logger.info({ // Load field mapping configuration
formData, const mapping = loadFieldMapping();
receivedAt,
formDataKeys: Object.keys(formData),
}, 'Processing Formstack form submission');
// Extract data from Formstack payload - matching Python ngo-isac-uploader field names // Log only non-PII metadata using configured field names
const { const formId = getFieldValue(formData, "formId", mapping);
FormID, const uniqueId = getFieldValue(formData, "uniqueId", mapping);
UniqueID,
Name,
Email,
Phone,
'Signal Account': signalAccount,
City,
State,
'Zip Code': zipCode,
'What organization are you affiliated with and/or employed by (if applicable)?': organization,
'What type of support do you wish to receive (to the extent you know)?': typeOfSupport,
'Is there a specific deadline associated with this request (e.g., a legal or legislative deadline)?': specificDeadline,
'Please provide the deadline': deadline,
'Do you have an insurance provider that provides coverage for the types of services you seek (e.g., public official, professional liability insurance, litigation insurance)?': hasInsuranceProvider,
'Have you approached the insurance provider for assistance?': approachedProvider,
'Are you seeking help on behalf of an individual or an organization?': typeOfUser,
'What is the structure of the organization?': orgStructure,
'Are you currently a candidate for elected office, a government officeholder, or a government employee?': governmentAffiliated,
'Where did you hear about the Democracy Protection Network?': whereHeard,
'Do you or the organization work on behalf of any of the following communities or issues? Please select all that apply.': relatedIssues,
'Do you or the organization engage in any of the following types of work? Please select all that apply.': typeOfWork,
'Why are you seeking support? Please briefly describe the circumstances that have brought you to the DPN, including, as applicable, dates, places, and the people or entities involved. We coordinate crisis-response services and some resilience-building services (e.g., assistance establishing good-governance or security practices). If you are seeking resilience-building services, please note that in the text box below.': descriptionOfIssue,
'What is your preferred communication method?': preferredContactMethod,
} = formData;
// Build full name - matching Python pattern logger.info(
const firstName = Name?.first || ''; {
const lastName = Name?.last || ''; formId,
const fullName = (firstName && lastName) uniqueId,
? `${firstName} ${lastName}`.trim() receivedAt,
: firstName || lastName || 'Unknown'; fieldCount: Object.keys(formData).length,
},
"Processing Formstack form submission",
);
// Build ticket title - exactly matching Python ngo-isac-uploader pattern // Extract fields using dynamic mapping
// Pattern: [Name] - [Organization] - [Type of support] const nameField = getFieldValue(formData, "name", mapping);
let title = fullName; const firstName = mapping.nestedFields?.name?.firstNamePath
if (organization) { ? getNestedFieldValue(nameField, mapping.nestedFields.name.firstNamePath) || ""
title += ` - ${organization}`; : "";
} const lastName = mapping.nestedFields?.name?.lastNamePath
if (typeOfSupport) { ? getNestedFieldValue(nameField, mapping.nestedFields.name.lastNamePath) || ""
// Handle array format (Formstack sends arrays for multi-select) : "";
const supportText = Array.isArray(typeOfSupport) ? typeOfSupport.join(', ') : typeOfSupport; const fullName =
title += ` - ${supportText}`; firstName && lastName
? `${firstName} ${lastName}`.trim()
: firstName || lastName || "Unknown";
// Extract well-known fields used for special logic (all optional)
const email = getFieldValue(formData, "email", mapping);
const rawPhone = getFieldValue(formData, "phone", mapping);
const rawSignalAccount = getFieldValue(formData, "signalAccount", mapping);
const organization = getFieldValue(formData, "organization", mapping);
const typeOfSupport = getFieldValue(formData, "typeOfSupport", mapping);
const descriptionOfIssue = getFieldValue(formData, "descriptionOfIssue", mapping);
// Sanitize phone numbers to E.164 format (+15554446666)
let phone: string | undefined;
if (rawPhone) {
try {
phone = sanitizePhoneNumber(rawPhone);
logger.info({ rawPhone, sanitized: phone }, "Sanitized phone number");
} catch (error: any) {
logger.warn({ rawPhone, error: error.message }, "Invalid phone number format, ignoring");
phone = undefined;
}
} }
// Build article body - format all fields as HTML like Python does let signalAccount: string | undefined;
if (rawSignalAccount) {
try {
signalAccount = sanitizePhoneNumber(rawSignalAccount);
logger.info({ rawSignalAccount, sanitized: signalAccount }, "Sanitized signal account");
} catch (error: any) {
logger.warn({ rawSignalAccount, error: error.message }, "Invalid signal account format, ignoring");
signalAccount = undefined;
}
}
// Validate that at least one contact method is provided
if (!email && !phone && !signalAccount) {
logger.error(
{ formId, uniqueId },
"No contact information provided - at least one of email, phone, or signalAccount is required",
);
throw new Error(
"At least one contact method (email, phone, or signalAccount) is required for ticket creation",
);
}
// Build ticket title using configured template
// Pass all potentially used fields - the template determines which are actually used
const title = buildTicketTitle(mapping, {
name: fullName,
organization: formatFieldValue(organization),
typeOfSupport: formatFieldValue(typeOfSupport),
});
// Build article body - format all fields as HTML
const formatAllFields = (data: any): string => { const formatAllFields = (data: any): string => {
let html = ''; let html = "";
for (const [key, value] of Object.entries(data)) {
if (key === 'HandshakeKey' || key === 'FormID' || key === 'UniqueID') continue;
if (value === null || value === undefined || value === '') continue;
const displayValue = Array.isArray(value) ? value.join(', ') : // Add formatted name field first if we have it
typeof value === 'object' ? JSON.stringify(value) : value; if (fullName && fullName !== "Unknown") {
html += `<strong>Name:</strong><br>${fullName}<br>`;
}
for (const [key, value] of Object.entries(data)) {
// Skip metadata fields and name field (we already formatted it above)
const skipFields = [
mapping.sourceFields.formId,
mapping.sourceFields.uniqueId,
mapping.sourceFields.name, // Skip raw name field
"HandshakeKey",
].filter(Boolean);
if (skipFields.includes(key)) continue;
if (value === null || value === undefined || value === "") continue;
const displayValue = Array.isArray(value)
? value.join(", ")
: typeof value === "object"
? JSON.stringify(value)
: value;
html += `<strong>${key}:</strong><br>${displayValue}<br>`; html += `<strong>${key}:</strong><br>${displayValue}<br>`;
} }
return html; return html;
@ -82,141 +137,298 @@ const createTicketFromFormTask = async (
const body = formatAllFields(formData); const body = formatAllFields(formData);
// Get Zammad configuration from environment // Get Zammad configuration from environment
const zammadUrl = process.env.ZAMMAD_URL || 'http://zammad-nginx:8080'; const zammadUrl = process.env.ZAMMAD_URL || "http://zammad-nginx:8080";
const zammadToken = process.env.ZAMMAD_API_TOKEN; const zammadToken = process.env.ZAMMAD_API_TOKEN;
if (!zammadToken) { if (!zammadToken) {
logger.error('ZAMMAD_API_TOKEN environment variable is not configured'); logger.error("ZAMMAD_API_TOKEN environment variable is not configured");
throw new Error('ZAMMAD_API_TOKEN is required'); throw new Error("ZAMMAD_API_TOKEN is required");
} }
const zammad = Zammad({ token: zammadToken }, zammadUrl); const zammad = Zammad({ token: zammadToken }, zammadUrl);
try { try {
// Look up the article type ID for cdr_signal // Look up the configured article type
let cdrSignalTypeId: number | undefined; let articleTypeId: number | undefined;
try { try {
const articleTypes = await zammad.get('ticket_article_types'); const articleTypes = await zammad.get("ticket_article_types");
const cdrSignalType = articleTypes.find((t: any) => t.name === 'cdr_signal'); const configuredType = articleTypes.find(
cdrSignalTypeId = cdrSignalType?.id; (t: any) => t.name === mapping.ticket.defaultArticleType,
if (cdrSignalTypeId) { );
logger.info({ cdrSignalTypeId }, 'Found cdr_signal article type'); articleTypeId = configuredType?.id;
if (articleTypeId) {
logger.info(
{ articleTypeId, typeName: mapping.ticket.defaultArticleType },
"Found configured article type",
);
} else { } else {
logger.warn('cdr_signal article type not found, ticket will use default type'); logger.warn(
{ typeName: mapping.ticket.defaultArticleType },
"Configured article type not found, ticket will use default type",
);
} }
} catch (error: any) { } catch (error: any) {
logger.warn({ error: error.message }, 'Failed to look up cdr_signal article type'); logger.warn({ error: error.message }, "Failed to look up article type");
} }
// Determine contact method and phone number - matching Python logic // Get or create user
// Priority: Signal > SMS/Phone > Email // Try to find existing user by: phone -> email
const useSignal = preferredContactMethod?.includes('Signal') || preferredContactMethod?.includes('ignal'); // Note: We can't search by Signal account since Signal group IDs aren't phone numbers
const useSMS = preferredContactMethod?.includes('SMS');
const phoneNumber = useSignal ? signalAccount : (useSMS || Phone) ? Phone : '';
// Get or create user - matching Python pattern
let customer; let customer;
if (phoneNumber) { // Try phone if provided
// Try to find by phone (Signal or regular) if (phone) {
customer = await getUser(zammad, phoneNumber); customer = await getUser(zammad, phone);
if (customer) { if (customer) {
logger.info({ customerId: customer.id, method: 'phone' }, 'Found existing user by phone'); logger.info(
{ customerId: customer.id, method: "phone" },
"Found existing user by phone",
);
} }
} }
if (!customer && Email) { // Fall back to email if no customer found yet
// Search by email if phone search didn't work if (!customer && email) {
const emailResults = await zammad.user.search(`email:${Email}`); // Validate email format before using in search
if (emailResults.length > 0) { const emailRegex = /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$/;
customer = emailResults[0]; if (emailRegex.test(email)) {
logger.info({ customerId: customer.id, method: 'email' }, 'Found existing user by email'); const emailResults = await zammad.user.search(`email:${email}`);
if (emailResults.length > 0) {
customer = emailResults[0];
logger.info(
{ customerId: customer.id, method: "email" },
"Found existing user by email",
);
}
} else {
logger.warn({ email }, "Invalid email format provided, skipping email search");
} }
} }
if (!customer) { if (!customer) {
// Create new user - matching Python user creation pattern // Create new user
logger.info('Creating new user from form submission'); logger.info("Creating new user from form submission");
customer = await zammad.user.create({
// Build user data with whatever contact info we have
const userData: any = {
firstname: firstName, firstname: firstName,
lastname: lastName, lastname: lastName,
email: Email || `${UniqueID}@formstack.local`, roles: ["Customer"],
phone: phoneNumber || '', };
roles: ['Customer'],
}); // Add contact info only if provided
if (email) {
userData.email = email;
}
// Use phone number if provided (don't use Signal group ID as phone)
if (phone) {
userData.phone = phone;
}
customer = await zammad.user.create(userData);
} }
logger.info({ logger.info(
customerId: customer.id, {
customerEmail: customer.email, customerId: customer.id,
customerPhone: customer.phone, email: customer.email,
}, 'Customer identified/created'); },
"Using customer for ticket",
);
// Helper function to format field values (handle arrays and null values) // Look up the configured group
const formatFieldValue = (value: any): string | undefined => { const groups = await zammad.get("groups");
if (value === null || value === undefined || value === '') return undefined; const targetGroup = groups.find((g: any) => g.name === mapping.ticket.group);
if (Array.isArray(value)) return value.join(', ');
if (typeof value === 'object') return JSON.stringify(value); if (!targetGroup) {
return String(value); logger.error({ groupName: mapping.ticket.group }, "Configured group not found");
throw new Error(`Zammad group "${mapping.ticket.group}" not found`);
}
logger.info(
{ groupId: targetGroup.id, groupName: targetGroup.name },
"Using configured group",
);
// Build custom fields using Zammad field mapping
// This dynamically maps all configured fields without hardcoding
const customFields = getZammadFieldValues(formData, mapping);
// Check if this is a Signal ticket
let signalArticleType = null;
let signalChannelId = null;
let signalBotToken = null;
if (signalAccount) {
try {
logger.info({ signalAccount }, "Looking up Signal channel and article type");
// Look up Signal channels from Zammad (admin-only endpoint)
// Note: bot_token is NOT included in this response for security reasons
const channels = await zammad.get("cdr_signal_channels");
if (channels.length > 0) {
const zammadChannel = channels[0]; // Use first active Signal channel
signalChannelId = zammadChannel.id;
logger.info(
{
channelId: zammadChannel.id,
phoneNumber: zammadChannel.phone_number,
},
"Found active Signal channel from Zammad",
);
// Look up the bot_token from our own cdr database using the phone number
const signalBot = await db
.selectFrom("SignalBot")
.selectAll()
.where("phoneNumber", "=", zammadChannel.phone_number)
.executeTakeFirst();
if (signalBot) {
signalBotToken = signalBot.token;
logger.info(
{ botId: signalBot.id, phoneNumber: signalBot.phoneNumber },
"Found Signal bot token from cdr database",
);
} else {
logger.warn(
{ phoneNumber: zammadChannel.phone_number },
"Signal bot not found in cdr database",
);
}
} else {
logger.warn("No active Signal channels found");
}
// Look up cdr_signal article type
const articleTypes = await zammad.get("ticket_article_types");
signalArticleType = articleTypes.find((t: any) => t.name === "cdr_signal");
if (!signalArticleType) {
logger.warn("Signal article type (cdr_signal) not found, using default type");
} else {
logger.info(
{ articleTypeId: signalArticleType.id },
"Found Signal article type",
);
}
} catch (error: any) {
logger.warn(
{ error: error.message },
"Failed to look up Signal article type, creating regular ticket",
);
}
}
// Create the ticket
const articleData: any = {
subject: descriptionOfIssue || "Support Request",
body,
content_type: "text/html",
internal: false,
}; };
// Create the ticket with custom fields - EXACTLY matching Python ngo-isac-uploader field names // Use Signal article type if available, otherwise use configured default
if (signalArticleType) {
articleData.type_id = signalArticleType.id;
logger.info({ typeId: signalArticleType.id }, "Using Signal article type");
// IMPORTANT: Set sender to "Customer" for Signal tickets created from Formstack
// This prevents the article from being echoed back to the user via Signal
// (enqueue_communicate_cdr_signal_job only sends if sender != 'Customer')
articleData.sender = "Customer";
} else if (articleTypeId) {
articleData.type_id = articleTypeId;
}
const ticketData: any = { const ticketData: any = {
title, title,
group: "Imports", // Matching Python - uses "Imports" group group_id: targetGroup.id,
customer_id: customer.id, customer_id: customer.id,
article: articleData,
// Custom fields - matching Python field names EXACTLY ...customFields,
us_state: formatFieldValue(State),
zip_code: formatFieldValue(zipCode),
city: formatFieldValue(City),
type_of_support: formatFieldValue(typeOfSupport),
specific_deadline: formatFieldValue(specificDeadline),
deadline: formatFieldValue(deadline),
has_insurance_provider: formatFieldValue(hasInsuranceProvider),
approached_provider: formatFieldValue(approachedProvider),
type_of_user: formatFieldValue(typeOfUser),
org_structure: formatFieldValue(orgStructure),
government_affiliated: formatFieldValue(governmentAffiliated),
where_heard: formatFieldValue(whereHeard),
related_issues: formatFieldValue(relatedIssues),
type_of_work: formatFieldValue(typeOfWork),
// Article with all formatted fields
article: {
body,
subject: title,
content_type: "text/html",
type: useSignal ? "cdr_signal" : "note",
from: phoneNumber || Email || 'unknown',
sender: "Customer",
},
}; };
// Add Signal preferences if we have Signal channel and article type
// Note: signalAccount from Formstack is the phone number the user typed in
// Groups are added later via update_group webhook from bridge-worker
if (signalChannelId && signalBotToken && signalArticleType && signalAccount) {
ticketData.preferences = {
channel_id: signalChannelId,
cdr_signal: {
bot_token: signalBotToken,
chat_id: signalAccount, // Use Signal phone number as chat_id
},
};
logger.info(
{
channelId: signalChannelId,
chatId: signalAccount,
},
"Adding Signal preferences to ticket",
);
}
logger.info(
{
title,
groupId: targetGroup.id,
customerId: customer.id,
hasArticleType: !!articleTypeId || !!signalArticleType,
isSignalTicket: !!signalArticleType && !!signalAccount,
customFieldCount: Object.keys(customFields).length,
},
"Creating ticket",
);
const ticket = await zammad.ticket.create(ticketData); const ticket = await zammad.ticket.create(ticketData);
// Update the ticket with the cdr_signal article type // Set create_article_type_id for Signal tickets to enable proper replies
// This must be done after creation as Zammad doesn't allow setting this field during creation if (signalArticleType && signalChannelId) {
if (cdrSignalTypeId) { try {
await zammad.ticket.update(ticket.id, { create_article_type_id: cdrSignalTypeId }); await zammad.ticket.update(ticket.id, {
logger.info({ ticketId: ticket.id, cdrSignalTypeId }, 'Updated ticket with cdr_signal article type'); create_article_type_id: signalArticleType.id,
});
logger.info(
{
ticketId: ticket.id,
articleTypeId: signalArticleType.id,
},
"Set create_article_type_id for Signal ticket",
);
} catch (error: any) {
logger.warn(
{
error: error.message,
ticketId: ticket.id,
},
"Failed to set create_article_type_id, ticket may not support Signal replies",
);
}
} }
logger.info({ logger.info(
ticketId: ticket.id, {
customerId: customer.id, ticketId: ticket.id,
formId: FormID, ticketNumber: ticket.id,
submissionId: UniqueID, title,
}, 'Zammad ticket created successfully'); isSignalTicket: !!signalChannelId,
},
"Successfully created ticket from Formstack submission",
);
} catch (error: any) { } catch (error: any) {
logger.error({ logger.error(
error: error.message, {
stack: error.stack, error: error.message,
output: error.output, stack: error.stack,
formId: FormID, formId,
submissionId: UniqueID, uniqueId,
}, 'Failed to create Zammad ticket'); },
"Failed to create ticket from Formstack submission",
);
throw error; throw error;
} }
}; };

View file

@ -1,4 +1,11 @@
import { db, getWorkerUtils } from "@link-stack/bridge-common"; import {
db,
getWorkerUtils,
getMaxAttachmentSize,
getMaxTotalAttachmentSize,
MAX_ATTACHMENTS,
buildSignalGroupName,
} from "@link-stack/bridge-common";
import { createLogger } from "@link-stack/logger"; import { createLogger } from "@link-stack/logger";
import * as signalApi from "@link-stack/signal-api"; import * as signalApi from "@link-stack/signal-api";
const { Configuration, MessagesApi, GroupsApi } = signalApi; const { Configuration, MessagesApi, GroupsApi } = signalApi;
@ -58,10 +65,9 @@ const sendSignalMessageTask = async ({
try { try {
// Check if 'to' is a group ID (UUID format, group.base64 format, or base64) vs phone number // Check if 'to' is a group ID (UUID format, group.base64 format, or base64) vs phone number
const isUUID = const isUUID = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(
/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test( to,
to, );
);
const isGroupPrefix = to.startsWith("group."); const isGroupPrefix = to.startsWith("group.");
const isBase64 = /^[A-Za-z0-9+/]+=*$/.test(to) && to.length > 20; // Base64 internal_id const isBase64 = /^[A-Za-z0-9+/]+=*$/.test(to) && to.length > 20; // Base64 internal_id
const isGroupId = isUUID || isGroupPrefix || isBase64; const isGroupId = isUUID || isGroupPrefix || isBase64;
@ -72,8 +78,7 @@ const sendSignalMessageTask = async ({
to, to,
isGroupId, isGroupId,
enableAutoGroups, enableAutoGroups,
shouldCreateGroup: shouldCreateGroup: enableAutoGroups && !isGroupId && to && conversationId,
enableAutoGroups && !isGroupId && to && conversationId,
}, },
"Recipient analysis", "Recipient analysis",
); );
@ -81,7 +86,7 @@ const sendSignalMessageTask = async ({
// If sending to a phone number and auto-groups is enabled, create a group first // If sending to a phone number and auto-groups is enabled, create a group first
if (enableAutoGroups && !isGroupId && to && conversationId) { if (enableAutoGroups && !isGroupId && to && conversationId) {
try { try {
const groupName = `DPN Support Request: ${conversationId}`; const groupName = buildSignalGroupName(conversationId);
const createGroupResponse = await groupsClient.v1GroupsNumberPost({ const createGroupResponse = await groupsClient.v1GroupsNumberPost({
number: bot.phoneNumber, number: bot.phoneNumber,
data: { data: {
@ -133,6 +138,7 @@ const sendSignalMessageTask = async ({
); );
// Notify Zammad about the new group ID via webhook // Notify Zammad about the new group ID via webhook
// Set group_joined: false initially - will be updated when user accepts invitation
await worker.addJob("common/notify-webhooks", { await worker.addJob("common/notify-webhooks", {
backendId: bot.id, backendId: bot.id,
payload: { payload: {
@ -141,6 +147,7 @@ const sendSignalMessageTask = async ({
original_recipient: to, original_recipient: to,
group_id: finalTo, group_id: finalTo,
internal_group_id: internalId, internal_group_id: internalId,
group_joined: false,
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
}, },
}); });
@ -148,8 +155,7 @@ const sendSignalMessageTask = async ({
} catch (groupError) { } catch (groupError) {
logger.error( logger.error(
{ {
error: error: groupError instanceof Error ? groupError.message : groupError,
groupError instanceof Error ? groupError.message : groupError,
to, to,
conversationId, conversationId,
}, },
@ -204,16 +210,64 @@ const sendSignalMessageTask = async ({
); );
} }
// Add attachments if provided // Add attachments if provided with size validation
if (attachments && attachments.length > 0) { if (attachments && attachments.length > 0) {
messageData.base64Attachments = attachments.map((att) => att.data); const MAX_ATTACHMENT_SIZE = getMaxAttachmentSize();
logger.debug( const MAX_TOTAL_SIZE = getMaxTotalAttachmentSize();
{
attachmentCount: attachments.length, if (attachments.length > MAX_ATTACHMENTS) {
attachmentNames: attachments.map((att) => att.filename), throw new Error(
}, `Too many attachments: ${attachments.length} (max ${MAX_ATTACHMENTS})`,
"Including attachments in message", );
); }
let totalSize = 0;
const validatedAttachments = [];
for (const attachment of attachments) {
// Calculate size from base64 string (rough estimate: length * 3/4)
const estimatedSize = (attachment.data.length * 3) / 4;
if (estimatedSize > MAX_ATTACHMENT_SIZE) {
logger.warn(
{
filename: attachment.filename,
size: estimatedSize,
maxSize: MAX_ATTACHMENT_SIZE,
},
"Attachment exceeds size limit, skipping",
);
continue;
}
totalSize += estimatedSize;
if (totalSize > MAX_TOTAL_SIZE) {
logger.warn(
{
totalSize,
maxTotalSize: MAX_TOTAL_SIZE,
},
"Total attachment size exceeds limit, skipping remaining",
);
break;
}
validatedAttachments.push(attachment.data);
}
if (validatedAttachments.length > 0) {
messageData.base64Attachments = validatedAttachments;
logger.debug(
{
attachmentCount: validatedAttachments.length,
attachmentNames: attachments
.slice(0, validatedAttachments.length)
.map((att) => att.filename),
totalSizeBytes: totalSize,
},
"Including attachments in message",
);
}
} }
const response = await messagesClient.v2SendPost({ const response = await messagesClient.v2SendPost({

View file

@ -2,22 +2,28 @@ FROM node:22-bookworm-slim AS base
FROM base AS builder FROM base AS builder
ARG APP_DIR=/opt/link ARG APP_DIR=/opt/link
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN mkdir -p ${APP_DIR}/ RUN mkdir -p ${APP_DIR}/
RUN npm i -g turbo RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN pnpm add -g turbo
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
COPY . . COPY . .
RUN turbo prune --scope=@link-stack/link --scope=@link-stack/bridge-migrations --docker RUN turbo prune --scope=@link-stack/link --scope=@link-stack/bridge-migrations --docker
FROM base AS installer FROM base AS installer
ARG APP_DIR=/opt/link ARG APP_DIR=/opt/link
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
WORKDIR ${APP_DIR} WORKDIR ${APP_DIR}
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
COPY --from=builder ${APP_DIR}/.gitignore .gitignore COPY --from=builder ${APP_DIR}/.gitignore .gitignore
COPY --from=builder ${APP_DIR}/out/json/ . COPY --from=builder ${APP_DIR}/out/json/ .
COPY --from=builder ${APP_DIR}/out/package-lock.json ./package-lock.json COPY --from=builder ${APP_DIR}/out/pnpm-lock.yaml ./pnpm-lock.yaml
RUN npm ci RUN pnpm install --frozen-lockfile
COPY --from=builder ${APP_DIR}/out/full/ . COPY --from=builder ${APP_DIR}/out/full/ .
RUN npm i -g turbo RUN pnpm add -g turbo
ENV ZAMMAD_URL http://zammad-nginx:8080 ENV ZAMMAD_URL http://zammad-nginx:8080
RUN turbo run build --filter=@link-stack/link --filter=@link-stack/bridge-migrations RUN turbo run build --filter=@link-stack/link --filter=@link-stack/bridge-migrations
@ -30,6 +36,9 @@ LABEL maintainer="Darren Clarke <darren@redaranj.com>"
LABEL org.label-schema.build-date=$BUILD_DATE LABEL org.label-schema.build-date=$BUILD_DATE
LABEL org.label-schema.version=$VERSION LABEL org.label-schema.version=$VERSION
ENV APP_DIR ${APP_DIR} ENV APP_DIR ${APP_DIR}
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable && corepack prepare pnpm@9.15.4 --activate
RUN DEBIAN_FRONTEND=noninteractive apt-get update && \ RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
apt-get install -y --no-install-recommends \ apt-get install -y --no-install-recommends \
dumb-init dumb-init

View file

@ -4,13 +4,12 @@ The main CDR (Center for Digital Resilience) Link application - a streamlined he
## Overview ## Overview
CDR Link provides a unified dashboard for managing support tickets, communication channels, and data analytics. It integrates multiple services including Zammad (ticketing), Bridge (multi-channel messaging), Leafcutter (data visualization), and OpenSearch. CDR Link provides a unified dashboard for managing support tickets, communication channels, and data analytics. It integrates multiple services including Zammad (ticketing), Bridge (multi-channel messaging), and OpenSearch.
## Features ## Features
- **Simplified Helpdesk Interface**: Streamlined UI for Zammad ticket management - **Simplified Helpdesk Interface**: Streamlined UI for Zammad ticket management
- **Multi-Channel Communication**: Integration with Signal, WhatsApp, Facebook, and Voice channels - **Multi-Channel Communication**: Integration with Signal, WhatsApp, Facebook, and Voice channels
- **Data Visualization**: Embedded Leafcutter analytics and reporting
- **User Management**: Role-based access control with Google OAuth - **User Management**: Role-based access control with Google OAuth
- **Search**: Integrated OpenSearch for advanced queries - **Search**: Integrated OpenSearch for advanced queries
- **Label Studio Integration**: For data annotation workflows - **Label Studio Integration**: For data annotation workflows
@ -69,7 +68,6 @@ Key environment variables required:
- `/overview/[overview]` - Ticket overview pages - `/overview/[overview]` - Ticket overview pages
- `/tickets/[id]` - Individual ticket view/edit - `/tickets/[id]` - Individual ticket view/edit
- `/admin/bridge` - Bridge configuration management - `/admin/bridge` - Bridge configuration management
- `/leafcutter` - Data visualization dashboard
- `/opensearch` - Search dashboard - `/opensearch` - Search dashboard
- `/zammad` - Direct Zammad access - `/zammad` - Direct Zammad access
- `/profile` - User profile management - `/profile` - User profile management
@ -104,6 +102,5 @@ docker-compose -f docker/compose/link.yml up
- **Zammad**: GraphQL queries for ticket data - **Zammad**: GraphQL queries for ticket data
- **Bridge Services**: REST APIs for channel management - **Bridge Services**: REST APIs for channel management
- **Leafcutter**: Embedded iframe integration
- **OpenSearch**: Direct dashboard embedding - **OpenSearch**: Direct dashboard embedding
- **Redis**: Session and cache storage - **Redis**: Session and cache storage

View file

@ -7,13 +7,11 @@ import { SetupModeWarning } from "./SetupModeWarning";
interface InternalLayoutProps extends PropsWithChildren { interface InternalLayoutProps extends PropsWithChildren {
setupModeActive: boolean; setupModeActive: boolean;
leafcutterEnabled: boolean;
} }
export const InternalLayout: FC<InternalLayoutProps> = ({ export const InternalLayout: FC<InternalLayoutProps> = ({
children, children,
setupModeActive, setupModeActive,
leafcutterEnabled,
}) => { }) => {
const [open, setOpen] = useState(true); const [open, setOpen] = useState(true);
@ -24,7 +22,6 @@ export const InternalLayout: FC<InternalLayoutProps> = ({
<Sidebar <Sidebar
open={open} open={open}
setOpen={setOpen} setOpen={setOpen}
leafcutterEnabled={leafcutterEnabled}
/> />
<Grid <Grid
item item

View file

@ -176,13 +176,11 @@ const MenuItem = ({
interface SidebarProps { interface SidebarProps {
open: boolean; open: boolean;
setOpen: (open: boolean) => void; setOpen: (open: boolean) => void;
leafcutterEnabled?: boolean;
} }
export const Sidebar: FC<SidebarProps> = ({ export const Sidebar: FC<SidebarProps> = ({
open, open,
setOpen, setOpen,
leafcutterEnabled = false,
}) => { }) => {
const pathname = usePathname(); const pathname = usePathname();
const { data: session } = useSession(); const { data: session } = useSession();
@ -372,11 +370,11 @@ export const Sidebar: FC<SidebarProps> = ({
}} }}
> >
<MenuItem <MenuItem
name="Dashboards" name="Dashboard"
href="/dashboards" href="/"
Icon={InsightsIcon} Icon={InsightsIcon}
iconSize={20} iconSize={20}
selected={pathname.startsWith("/dashboards")} selected={pathname === "/"}
open={open} open={open}
/> />
<MenuItem <MenuItem

View file

@ -4,6 +4,8 @@ import { FC } from "react";
import { Grid } from "@mui/material"; import { Grid } from "@mui/material";
import Iframe from "react-iframe"; import Iframe from "react-iframe";
const docsUrl = "https://digiresilience.org/docs/link/about/";
export const DocsWrapper: FC = () => ( export const DocsWrapper: FC = () => (
<Grid <Grid
container container
@ -17,7 +19,7 @@ export const DocsWrapper: FC = () => (
> >
<Iframe <Iframe
id="docs" id="docs"
url={"https://digiresilience.org/docs/link/about/"} url={docsUrl}
width="100%" width="100%"
height="100%" height="100%"
frameBorder={0} frameBorder={0}

View file

@ -12,12 +12,10 @@ type LayoutProps = {
export default function Layout({ children }: LayoutProps) { export default function Layout({ children }: LayoutProps) {
const setupModeActive = process.env.SETUP_MODE === "true"; const setupModeActive = process.env.SETUP_MODE === "true";
const leafcutterEnabled = process.env.LEAFCUTTER_ENABLED === "true";
return ( return (
<InternalLayout <InternalLayout
setupModeActive={setupModeActive} setupModeActive={setupModeActive}
leafcutterEnabled={leafcutterEnabled}
> >
{children} {children}
</InternalLayout> </InternalLayout>

View file

@ -2,7 +2,7 @@ import { Metadata } from "next";
import { DefaultDashboard } from "./_components/DefaultDashboard"; import { DefaultDashboard } from "./_components/DefaultDashboard";
export const metadata: Metadata = { export const metadata: Metadata = {
title: "CDR Link - Home", title: "CDR Link - Dashboard",
}; };
export default async function Page() { export default async function Page() {

View file

@ -10,7 +10,6 @@ import {
import Google from "next-auth/providers/google"; import Google from "next-auth/providers/google";
import Credentials from "next-auth/providers/credentials"; import Credentials from "next-auth/providers/credentials";
import Apple from "next-auth/providers/apple"; import Apple from "next-auth/providers/apple";
import { Redis } from "ioredis";
import AzureADProvider from "next-auth/providers/azure-ad"; import AzureADProvider from "next-auth/providers/azure-ad";
import { createLogger } from "@link-stack/logger"; import { createLogger } from "@link-stack/logger";
@ -30,7 +29,7 @@ const fetchRoles = async () => {
}; };
const fetchUser = async (email: string) => { const fetchUser = async (email: string) => {
const url = `${process.env.ZAMMAD_URL}/api/v1/users/search?query=login:${email}&limit=1`; const url = `${process.env.ZAMMAD_URL}/api/v1/users/search?query=${encodeURIComponent(`login:${email}`)}&limit=1`;
const res = await fetch(url, { headers }); const res = await fetch(url, { headers });
const users = await res.json(); const users = await res.json();
const user = users?.[0]; const user = users?.[0];
@ -124,9 +123,9 @@ if (process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET) {
export const authOptions: NextAuthOptions = { export const authOptions: NextAuthOptions = {
pages: { pages: {
signIn: "/link/login", signIn: "/login",
error: "/link/login", error: "/login",
signOut: "/link/logout", signOut: "/logout",
}, },
providers, providers,
session: { session: {
@ -139,11 +138,6 @@ export const authOptions: NextAuthOptions = {
return roles.includes("admin") || roles.includes("agent"); return roles.includes("admin") || roles.includes("agent");
}, },
session: async ({ session, token }) => { session: async ({ session, token }) => {
// const redis = new Redis(process.env.REDIS_URL);
// const isInvalidated = await redis.get(`invalidated:${token.sub}`);
// if (isInvalidated) {
// return null;
// }
// @ts-ignore // @ts-ignore
session.user.roles = token.roles ?? []; session.user.roles = token.roles ?? [];
// @ts-ignore // @ts-ignore

View file

@ -1,40 +0,0 @@
import { createLogger } from "@link-stack/logger";
const logger = createLogger('link-utils');
export const fetchLeafcutter = async (url: string, options: any) => {
/*
const headers = {
'X-Opensearch-Username': process.env.OPENSEARCH_USER!,
'X-Opensearch-Password': process.env.OPENSEARCH_PASSWORD!,
'X-Leafcutter-User': token.email.toLowerCase()
};
*/
const fetchData = async (url: string, options: any) => {
try {
const res = await fetch(url, options);
const json = await res.json();
return json;
} catch (error) {
logger.error({ error }, "Error occurred");
return null;
}
};
const data = await fetchData(url, options);
if (!data) {
const csrfURL = `${process.env.NEXT_PUBLIC_LEAFCUTTER_URL}/api/auth/csrf`;
const csrfData = await fetchData(csrfURL, {});
const authURL = `${process.env.NEXT_PUBLIC_LEAFCUTTER_URL}/api/auth/callback/credentials`;
const authData = await fetchData(authURL, { method: "POST" });
if (!authData) {
return null;
} else {
return await fetchData(url, options);
}
} else {
return data;
}
};

View file

@ -1 +1,4 @@
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
export { receiveMessage as POST } from "@link-stack/bridge-ui"; export { receiveMessage as POST } from "@link-stack/bridge-ui";

View file

@ -1 +1,4 @@
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
export { relinkBot as POST } from "@link-stack/bridge-ui"; export { relinkBot as POST } from "@link-stack/bridge-ui";

View file

@ -1 +1,4 @@
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
export { getBot as GET } from "@link-stack/bridge-ui"; export { getBot as GET } from "@link-stack/bridge-ui";

View file

@ -1 +1,4 @@
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
export { sendMessage as POST } from "@link-stack/bridge-ui"; export { sendMessage as POST } from "@link-stack/bridge-ui";

View file

@ -1,3 +1,6 @@
import { handleWebhook } from "@link-stack/bridge-ui"; import { handleWebhook } from "@link-stack/bridge-ui";
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
export { handleWebhook as GET, handleWebhook as POST }; export { handleWebhook as GET, handleWebhook as POST };

View file

@ -1,6 +1,9 @@
import NextAuth from "next-auth"; import NextAuth from "next-auth";
import { authOptions } from "@/app/_lib/authentication"; import { authOptions } from "@/app/_lib/authentication";
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
const handler = NextAuth(authOptions); const handler = NextAuth(authOptions);
export { handler as GET, handler as POST }; export { handler as GET, handler as POST };

View file

@ -1,11 +1,17 @@
import { NextRequest, NextResponse } from "next/server"; import { NextRequest, NextResponse } from "next/server";
import { createLogger } from "@link-stack/logger"; import { createLogger } from "@link-stack/logger";
import { getWorkerUtils } from "@link-stack/bridge-common"; import { getWorkerUtils } from "@link-stack/bridge-common";
import { timingSafeEqual } from "crypto";
// Force this route to be dynamic (not statically generated at build time)
export const dynamic = 'force-dynamic';
const logger = createLogger('formstack-webhook'); const logger = createLogger('formstack-webhook');
export async function POST(req: NextRequest): Promise<NextResponse> { export async function POST(req: NextRequest): Promise<NextResponse> {
try { try {
const clientIp = req.headers.get('x-forwarded-for') || req.headers.get('x-real-ip') || 'unknown';
// Get the shared secret from environment variable // Get the shared secret from environment variable
const expectedSecret = process.env.FORMSTACK_SHARED_SECRET; const expectedSecret = process.env.FORMSTACK_SHARED_SECRET;
@ -21,19 +27,47 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
const body = await req.json(); const body = await req.json();
const receivedSecret = body.HandshakeKey; const receivedSecret = body.HandshakeKey;
// Verify the shared secret // Validate that secret is provided
if (receivedSecret !== expectedSecret) { if (!receivedSecret || typeof receivedSecret !== 'string') {
logger.warn({ receivedSecret }, 'Invalid shared secret received'); logger.warn({ clientIp }, 'Missing or invalid HandshakeKey');
return NextResponse.json( return NextResponse.json(
{ error: "Unauthorized" }, { error: "Unauthorized" },
{ status: 401 } { status: 401 }
); );
} }
// Log the entire webhook payload to see the data structure // Use timing-safe comparison to prevent timing attacks
const expectedBuffer = Buffer.from(expectedSecret);
const receivedBuffer = Buffer.from(receivedSecret);
let secretsMatch = false;
if (expectedBuffer.length === receivedBuffer.length) {
try {
secretsMatch = timingSafeEqual(expectedBuffer, receivedBuffer);
} catch (e) {
secretsMatch = false;
}
}
if (!secretsMatch) {
logger.warn({
secretMatch: false,
timestamp: new Date().toISOString(),
userAgent: req.headers.get('user-agent'),
clientIp
}, 'Invalid shared secret received');
return NextResponse.json(
{ error: "Unauthorized" },
{ status: 401 }
);
}
// Log webhook receipt with non-PII metadata only
logger.info({ logger.info({
payload: body, formId: body.FormID,
headers: Object.fromEntries(req.headers.entries()), uniqueId: body.UniqueID,
timestamp: new Date().toISOString(),
fieldCount: Object.keys(body).length
}, 'Received Formstack webhook'); }, 'Received Formstack webhook');
// Enqueue a bridge-worker task to process this form submission // Enqueue a bridge-worker task to process this form submission

View file

@ -1,12 +1,6 @@
import { NextRequest, NextResponse } from "next/server"; import { NextRequest, NextResponse } from "next/server";
import { Redis } from "ioredis";
import { getToken } from "next-auth/jwt";
export async function POST(request: NextRequest) { export async function POST(request: NextRequest) {
const token = await getToken({
req: request,
secret: process.env.NEXTAUTH_SECRET,
});
const allCookies = request.cookies.getAll(); const allCookies = request.cookies.getAll();
const zammadURL = process.env.ZAMMAD_URL ?? "http://zammad-nginx:8080"; const zammadURL = process.env.ZAMMAD_URL ?? "http://zammad-nginx:8080";
const signOutURL = `${zammadURL}/api/v1/signout`; const signOutURL = `${zammadURL}/api/v1/signout`;
@ -18,7 +12,21 @@ export async function POST(request: NextRequest) {
.join("; "), .join("; "),
}; };
await fetch(signOutURL, { headers }); // Add timeout to prevent hanging requests
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 5000); // 5 second timeout
try {
await fetch(signOutURL, {
headers,
signal: controller.signal
});
} catch (error) {
// Log but don't fail logout if Zammad signout fails
console.error('Zammad signout failed:', error);
} finally {
clearTimeout(timeout);
}
const cookiePrefixesToRemove = ["_zammad"]; const cookiePrefixesToRemove = ["_zammad"];
const response = NextResponse.json({ message: "ok" }); const response = NextResponse.json({ message: "ok" });
@ -31,8 +39,5 @@ export async function POST(request: NextRequest) {
} }
} }
const redis = new Redis(process.env.REDIS_URL);
await redis.setex(`invalidated:${token.sub}`, 24 * 60 * 60, "1");
return response; return response;
} }

View file

@ -2,6 +2,6 @@
set -e set -e
echo "running migrations" echo "running migrations"
(cd ../bridge-migrations/ && npm run migrate:up:all) (cd ../bridge-migrations/ && pnpm run migrate:up:all)
echo "starting link" echo "starting link"
exec dumb-init npm run start exec dumb-init pnpm run start

View file

@ -52,19 +52,44 @@ const checkRewrites = async (request: NextRequestWithAuth) => {
}; };
if (request.nextUrl.pathname.startsWith("/dashboards")) { if (request.nextUrl.pathname.startsWith("/dashboards")) {
return rewriteURL( // Extract the path after /dashboards and append to OpenSearch URL
request, let path = request.nextUrl.pathname.slice("/dashboards".length);
`${linkBaseURL}/dashboards`, if (path.startsWith("/")) {
opensearchBaseURL, path = path.slice(1);
headers, }
); const search = request.nextUrl.search;
const destinationURL = `${opensearchBaseURL}/${path}${search}`;
logger.debug({
pathname: request.nextUrl.pathname,
path,
search,
destinationURL
}, "OpenSearch proxy");
const requestHeaders = new Headers(request.headers);
requestHeaders.delete("x-forwarded-user");
requestHeaders.delete("x-forwarded-roles");
requestHeaders.delete("connection");
for (const [key, value] of Object.entries(headers)) {
requestHeaders.set(key, value as string);
}
return NextResponse.rewrite(new URL(destinationURL), {
request: { headers: requestHeaders },
});
} }
const isDev = process.env.NODE_ENV === "development"; const isDev = process.env.NODE_ENV === "development";
const nonce = Buffer.from(crypto.randomUUID()).toString("base64"); const nonce = Buffer.from(crypto.randomUUID()).toString("base64");
// Allow digiresilience.org for embedding documentation
const frameSrcDirective = `frame-src 'self' https://digiresilience.org;`;
const cspHeader = ` const cspHeader = `
default-src 'self'; default-src 'self';
frame-src 'self' https://digiresilience.org; ${frameSrcDirective}
connect-src 'self'; connect-src 'self';
script-src 'self' 'nonce-${nonce}' 'strict-dynamic' ${isDev ? "'unsafe-eval'" : ""}; script-src 'self' 'nonce-${nonce}' 'strict-dynamic' ${isDev ? "'unsafe-eval'" : ""};
style-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline';
@ -98,6 +123,16 @@ const checkRewrites = async (request: NextRequestWithAuth) => {
contentSecurityPolicyHeaderValue, contentSecurityPolicyHeaderValue,
); );
// Additional security headers
response.headers.set("X-Frame-Options", "SAMEORIGIN");
response.headers.set("X-Content-Type-Options", "nosniff");
response.headers.set("Referrer-Policy", "strict-origin-when-cross-origin");
response.headers.set("X-XSS-Protection", "1; mode=block");
response.headers.set(
"Permissions-Policy",
"camera=(), microphone=(), geolocation=()"
);
return response; return response;
} }

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/link", "name": "@link-stack/link",
"version": "3.2.0b3", "version": "3.3.5",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "next dev -H 0.0.0.0", "dev": "next dev -H 0.0.0.0",
@ -16,10 +16,10 @@
"@emotion/react": "^11.14.0", "@emotion/react": "^11.14.0",
"@emotion/server": "^11.11.0", "@emotion/server": "^11.11.0",
"@emotion/styled": "^11.14.1", "@emotion/styled": "^11.14.1",
"@link-stack/bridge-common": "*", "@link-stack/bridge-common": "workspace:*",
"@link-stack/bridge-ui": "*", "@link-stack/bridge-ui": "workspace:*",
"@link-stack/logger": "*", "@link-stack/logger": "workspace:*",
"@link-stack/ui": "*", "@link-stack/ui": "workspace:*",
"@mui/icons-material": "^6", "@mui/icons-material": "^6",
"@mui/material": "^6", "@mui/material": "^6",
"@mui/material-nextjs": "^6", "@mui/material-nextjs": "^6",
@ -31,7 +31,7 @@
"graphql-request": "^7.2.0", "graphql-request": "^7.2.0",
"ioredis": "^5.8.1", "ioredis": "^5.8.1",
"mui-chips-input": "^6.0.0", "mui-chips-input": "^6.0.0",
"next": "15.5.4", "next": "15.5.9",
"next-auth": "^4.24.11", "next-auth": "^4.24.11",
"react": "19.2.0", "react": "19.2.0",
"react-cookie": "^8.0.1", "react-cookie": "^8.0.1",
@ -41,9 +41,8 @@
"sharp": "^0.34.4" "sharp": "^0.34.4"
}, },
"devDependencies": { "devDependencies": {
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@types/node": "^24.7.0", "@types/node": "^24.7.0",
"@types/react": "19.2.2", "@types/react": "19.2.2"
"@types/uuid": "^11.0.0"
} }
} }

View file

@ -1,67 +0,0 @@
version: '3.8'
services:
zammad-railsserver:
volumes:
# Controllers
- ${PWD}/packages/zammad-addon-bridge/src/app/controllers/channels_cdr_signal_controller.rb:/opt/zammad/app/controllers/channels_cdr_signal_controller.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/app/controllers/channels_cdr_voice_controller.rb:/opt/zammad/app/controllers/channels_cdr_voice_controller.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/app/controllers/channels_cdr_whatsapp_controller.rb:/opt/zammad/app/controllers/channels_cdr_whatsapp_controller.rb:ro
# Models
- ${PWD}/packages/zammad-addon-bridge/src/app/models/channel/driver/cdr_signal.rb:/opt/zammad/app/models/channel/driver/cdr_signal.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/app/models/channel/driver/cdr_whatsapp.rb:/opt/zammad/app/models/channel/driver/cdr_whatsapp.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/app/models/ticket/article/enqueue_communicate_cdr_signal_job.rb:/opt/zammad/app/models/ticket/article/enqueue_communicate_cdr_signal_job.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/app/models/ticket/article/enqueue_communicate_cdr_whatsapp_job.rb:/opt/zammad/app/models/ticket/article/enqueue_communicate_cdr_whatsapp_job.rb:ro
# Jobs
- ${PWD}/packages/zammad-addon-bridge/src/app/jobs/communicate_cdr_signal_job.rb:/opt/zammad/app/jobs/communicate_cdr_signal_job.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/app/jobs/communicate_cdr_whatsapp_job.rb:/opt/zammad/app/jobs/communicate_cdr_whatsapp_job.rb:ro
# Policies
- ${PWD}/packages/zammad-addon-bridge/src/app/policies/controllers/channels_cdr_signal_controller_policy.rb:/opt/zammad/app/policies/controllers/channels_cdr_signal_controller_policy.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/app/policies/controllers/channels_cdr_voice_controller_policy.rb:/opt/zammad/app/policies/controllers/channels_cdr_voice_controller_policy.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/app/policies/controllers/channels_cdr_whatsapp_controller_policy.rb:/opt/zammad/app/policies/controllers/channels_cdr_whatsapp_controller_policy.rb:ro
# Config - initializers
- ${PWD}/packages/zammad-addon-bridge/src/config/initializers/cdr_signal.rb:/opt/zammad/config/initializers/cdr_signal.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/config/initializers/cdr_whatsapp.rb:/opt/zammad/config/initializers/cdr_whatsapp.rb:ro
# Config - routes
- ${PWD}/packages/zammad-addon-bridge/src/config/routes/channel_cdr_signal.rb:/opt/zammad/config/routes/channel_cdr_signal.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/config/routes/channel_cdr_voice.rb:/opt/zammad/config/routes/channel_cdr_voice.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/config/routes/channel_cdr_whatsapp.rb:/opt/zammad/config/routes/channel_cdr_whatsapp.rb:ro
# Database migrations
- ${PWD}/packages/zammad-addon-bridge/src/db/addon/bridge/20210525091356_cdr_signal_channel.rb:/opt/zammad/db/addon/bridge/20210525091356_cdr_signal_channel.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/db/addon/bridge/20210525091357_cdr_voice_channel.rb:/opt/zammad/db/addon/bridge/20210525091357_cdr_voice_channel.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/db/addon/bridge/20210525091358_cdr_whatsapp_channel.rb:/opt/zammad/db/addon/bridge/20210525091358_cdr_whatsapp_channel.rb:ro
# Lib files
- ${PWD}/packages/zammad-addon-bridge/src/lib/cdr_signal.rb:/opt/zammad/lib/cdr_signal.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/lib/cdr_signal_api.rb:/opt/zammad/lib/cdr_signal_api.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/lib/cdr_whatsapp.rb:/opt/zammad/lib/cdr_whatsapp.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/lib/cdr_whatsapp_api.rb:/opt/zammad/lib/cdr_whatsapp_api.rb:ro
# Also map to scheduler for background jobs
zammad-scheduler:
volumes:
# Models
- ${PWD}/packages/zammad-addon-bridge/src/app/models/channel/driver/cdr_signal.rb:/opt/zammad/app/models/channel/driver/cdr_signal.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/app/models/channel/driver/cdr_whatsapp.rb:/opt/zammad/app/models/channel/driver/cdr_whatsapp.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/app/models/ticket/article/enqueue_communicate_cdr_signal_job.rb:/opt/zammad/app/models/ticket/article/enqueue_communicate_cdr_signal_job.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/app/models/ticket/article/enqueue_communicate_cdr_whatsapp_job.rb:/opt/zammad/app/models/ticket/article/enqueue_communicate_cdr_whatsapp_job.rb:ro
# Jobs
- ${PWD}/packages/zammad-addon-bridge/src/app/jobs/communicate_cdr_signal_job.rb:/opt/zammad/app/jobs/communicate_cdr_signal_job.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/app/jobs/communicate_cdr_whatsapp_job.rb:/opt/zammad/app/jobs/communicate_cdr_whatsapp_job.rb:ro
# Config - initializers
- ${PWD}/packages/zammad-addon-bridge/src/config/initializers/cdr_signal.rb:/opt/zammad/config/initializers/cdr_signal.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/config/initializers/cdr_whatsapp.rb:/opt/zammad/config/initializers/cdr_whatsapp.rb:ro
# Lib files
- ${PWD}/packages/zammad-addon-bridge/src/lib/cdr_signal.rb:/opt/zammad/lib/cdr_signal.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/lib/cdr_signal_api.rb:/opt/zammad/lib/cdr_signal_api.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/lib/cdr_whatsapp.rb:/opt/zammad/lib/cdr_whatsapp.rb:ro
- ${PWD}/packages/zammad-addon-bridge/src/lib/cdr_whatsapp_api.rb:/opt/zammad/lib/cdr_whatsapp_api.rb:ro

View file

@ -1,3 +1,4 @@
FROM node:22-alpine AS node
FROM docker:git FROM docker:git
RUN set -ex; \ RUN set -ex; \
@ -5,7 +6,22 @@ RUN set -ex; \
make \ make \
curl \ curl \
bash \ bash \
jq ; jq \
libstdc++ ;
# Copy Node.js 22 from official image
COPY --from=node /usr/lib /usr/lib
COPY --from=node /usr/local/lib /usr/local/lib
COPY --from=node /usr/local/include /usr/local/include
COPY --from=node /usr/local/bin /usr/local/bin
# Prepare pnpm (corepack is already enabled in node:22-alpine)
RUN corepack prepare pnpm@9.15.4 --activate
# Set up pnpm home
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
COPY --from=docker/buildx-bin /buildx /usr/libexec/docker/cli-plugins/docker-buildx COPY --from=docker/buildx-bin /buildx /usr/libexec/docker/cli-plugins/docker-buildx
RUN docker buildx install RUN docker buildx install
RUN docker buildx version RUN docker buildx version

View file

@ -0,0 +1,20 @@
services:
bridge-whatsapp:
container_name: bridge-whatsapp
build:
context: ../../
dockerfile: ./apps/bridge-whatsapp/Dockerfile
image: registry.gitlab.com/digiresilience/link/link-stack/bridge-whatsapp:${LINK_STACK_VERSION}
restart: ${RESTART}
environment:
PORT: 5000
NODE_ENV: production
BRIDGE_FRONTEND_URL: http://link:3000/link
volumes:
- bridge-whatsapp-data:/home/node/baileys
ports:
- 5000:5000
volumes:
bridge-whatsapp-data:
driver: local

View file

@ -4,6 +4,7 @@ x-global-vars: &common-global-variables
x-bridge-vars: &common-bridge-variables x-bridge-vars: &common-bridge-variables
DATABASE_HOST: "postgresql" DATABASE_HOST: "postgresql"
DATABASE_NAME: "cdr" DATABASE_NAME: "cdr"
DATABASE_PORT: "5432"
DATABASE_USER: ${DATABASE_USER} DATABASE_USER: ${DATABASE_USER}
DATABASE_ROOT_OWNER: "root" DATABASE_ROOT_OWNER: "root"
DATABASE_ROOT_PASSWORD: ${BRIDGE_DATABASE_ROOT_PASSWORD} DATABASE_ROOT_PASSWORD: ${BRIDGE_DATABASE_ROOT_PASSWORD}
@ -21,22 +22,14 @@ x-bridge-vars: &common-bridge-variables
NEXTAUTH_SECRET: ${NEXTAUTH_SECRET} NEXTAUTH_SECRET: ${NEXTAUTH_SECRET}
BRIDGE_SIGNAL_URL: ${BRIDGE_SIGNAL_URL} BRIDGE_SIGNAL_URL: ${BRIDGE_SIGNAL_URL}
BRIDGE_SIGNAL_AUTO_GROUPS: ${BRIDGE_SIGNAL_AUTO_GROUPS} BRIDGE_SIGNAL_AUTO_GROUPS: ${BRIDGE_SIGNAL_AUTO_GROUPS}
BRIDGE_WHATSAPP_URL: "http://bridge-whatsapp:5000"
LOG_LEVEL: "debug" LOG_LEVEL: "debug"
ZAMMAD_API_TOKEN: ${ZAMMAD_API_TOKEN} ZAMMAD_API_TOKEN: ${ZAMMAD_API_TOKEN}
ZAMMAD_URL: ${ZAMMAD_URL} ZAMMAD_URL: ${ZAMMAD_URL}
FORMSTACK_SHARED_SECRET: ${FORMSTACK_SHARED_SECRET}
FORMSTACK_FIELD_MAPPING: ${FORMSTACK_FIELD_MAPPING}
services: services:
bridge-frontend:
build:
context: ../../
dockerfile: ./apps/bridge-frontend/Dockerfile
container_name: bridge-frontend
image: registry.gitlab.com/digiresilience/link/link-stack/bridge-frontend:${LINK_STACK_VERSION}
restart: ${RESTART}
ports:
- 8006:3000
environment: *common-bridge-variables
bridge-worker: bridge-worker:
build: build:
context: ../../ context: ../../

View file

@ -16,7 +16,7 @@ services:
LINK_URL: ${LINK_URL} LINK_URL: ${LINK_URL}
BRIDGE_URL: http://bridge-frontend:3000 BRIDGE_URL: http://bridge-frontend:3000
BRIDGE_SIGNAL_URL: http://signal-cli-rest-api:8080 BRIDGE_SIGNAL_URL: http://signal-cli-rest-api:8080
BRIDGE_WHATSAPP_URL: http://bridge-whatsapp:3000 BRIDGE_WHATSAPP_URL: http://bridge-whatsapp:5000
ZAMMAD_URL: http://zammad-nginx:8080 ZAMMAD_URL: http://zammad-nginx:8080
REDIS_URL: "redis://zammad-redis:6379" REDIS_URL: "redis://zammad-redis:6379"
NEXTAUTH_URL: ${LINK_URL}/api/auth NEXTAUTH_URL: ${LINK_URL}/api/auth

View file

@ -56,9 +56,6 @@ RUN sed -i "s/'flattened'/'flat_object'/g" /opt/zammad/lib/search_index_backend.
RUN touch db/schema.rb && \ RUN touch db/schema.rb && \
ZAMMAD_SAFE_MODE=1 DATABASE_URL=postgresql://zammad:/zammad bundle exec rake assets:precompile ZAMMAD_SAFE_MODE=1 DATABASE_URL=postgresql://zammad:/zammad bundle exec rake assets:precompile
# Run additional setup for addons
RUN bundle exec rails runner /opt/zammad/contrib/link/setup.rb || true
# Clean up build artifacts # Clean up build artifacts
RUN rm -rf tmp/cache node_modules/.cache RUN rm -rf tmp/cache node_modules/.cache
ARG EMBEDDED=false ARG EMBEDDED=false
@ -78,6 +75,14 @@ RUN if [ "$EMBEDDED" = "true" ] ; then \
echo "}" >> /opt/zammad/contrib/nginx/zammad.conf; \ echo "}" >> /opt/zammad/contrib/nginx/zammad.conf; \
fi fi
# Modify entrypoint to install packages and run migrations at runtime
RUN sed -i '/^[[:space:]]*# es config/a\
echo "Installing addon packages..."\n\
bundle exec rails runner /opt/zammad/contrib/link/setup.rb\n\
bundle exec rake zammad:package:migrate\n\
' /docker-entrypoint.sh
FROM zammad/zammad-docker-compose:${ZAMMAD_VERSION} AS runner FROM zammad/zammad-docker-compose:${ZAMMAD_VERSION} AS runner
USER root USER root
@ -88,37 +93,7 @@ RUN apt-get update && \
rm -rf /var/lib/apt/lists/* && \ rm -rf /var/lib/apt/lists/* && \
npm install -g pnpm npm install -g pnpm
# Copy only the modified/added files from builder
# Copy addon files that were installed
COPY --from=builder --chown=zammad:zammad /opt/zammad/app/frontend/apps/desktop/pages/ticket/components/TicketDetailView/ /opt/zammad/app/frontend/apps/desktop/pages/ticket/components/TicketDetailView/
COPY --from=builder --chown=zammad:zammad /opt/zammad/app/frontend/shared/entities/ticket-article/action/plugins/ /opt/zammad/app/frontend/shared/entities/ticket-article/action/plugins/
COPY --from=builder --chown=zammad:zammad /opt/zammad/db/addon/ /opt/zammad/db/addon/
COPY --from=builder --chown=zammad:zammad /opt/zammad/app/assets/ /opt/zammad/app/assets/
COPY --from=builder --chown=zammad:zammad /opt/zammad/app/controllers/*cdr* /opt/zammad/app/controllers/
COPY --from=builder --chown=zammad:zammad /opt/zammad/app/jobs/*cdr* /opt/zammad/app/jobs/
COPY --from=builder --chown=zammad:zammad /opt/zammad/app/models/channel/driver/*cdr* /opt/zammad/app/models/channel/driver/
COPY --from=builder --chown=zammad:zammad /opt/zammad/app/models/ticket/article/*cdr* /opt/zammad/app/models/ticket/article/
COPY --from=builder --chown=zammad:zammad /opt/zammad/app/policies/controllers/*cdr* /opt/zammad/app/policies/controllers/
COPY --from=builder --chown=zammad:zammad /opt/zammad/config/initializers/*cdr* /opt/zammad/config/initializers/
COPY --from=builder --chown=zammad:zammad /opt/zammad/config/routes/*cdr* /opt/zammad/config/routes/
COPY --from=builder --chown=zammad:zammad /opt/zammad/lib/cdr* /opt/zammad/lib/
# CRITICAL: Copy modified search_index_backend.rb with OpenSearch fix
COPY --from=builder --chown=zammad:zammad /opt/zammad/lib/search_index_backend.rb /opt/zammad/lib/search_index_backend.rb
COPY --from=builder --chown=zammad:zammad /opt/zammad/public/assets/images/icons/*cdr* /opt/zammad/public/assets/images/icons/
COPY --from=builder --chown=zammad:zammad /opt/zammad/app/views/mailer/ticket_create/ /opt/zammad/app/views/mailer/ticket_create/
COPY --from=builder --chown=zammad:zammad /opt/zammad/public/assets/images/logo* /opt/zammad/public/assets/images/
# Copy the nginx config if embedded mode was used
COPY --from=builder --chown=zammad:zammad /opt/zammad/contrib/nginx/zammad.conf /opt/zammad/contrib/nginx/zammad.conf
# Copy the link setup scripts and addons
COPY --from=builder --chown=zammad:zammad /opt/zammad/contrib/link/ /opt/zammad/contrib/link/
# CRITICAL: Copy compiled assets that include our CoffeeScript changes
# The builder stage compiles assets at line 47, we must copy them to runner
COPY --from=builder --chown=zammad:zammad /opt/zammad/public/assets/ /opt/zammad/public/assets/
# Copy the modified entrypoint script
COPY --from=builder /docker-entrypoint.sh /docker-entrypoint.sh
USER zammad USER zammad
COPY --from=builder --chown=zammad:zammad ${ZAMMAD_DIR} ${ZAMMAD_DIR}
COPY --from=builder /usr/local/bundle /usr/local/bundle
COPY --from=builder /docker-entrypoint.sh /docker-entrypoint.sh

View file

@ -1,7 +1,7 @@
# frozen_string_literal: true # frozen_string_literal: true
# uninstall # uninstall
package_names = %w[Hardening Leafcutter Bridge] package_names = %w[Hardening Bridge]
package_names.each do |name| package_names.each do |name|
puts "Attempting to uninstall #{name} package..." puts "Attempting to uninstall #{name} package..."

15985
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,17 +1,17 @@
{ {
"name": "@link-stack", "name": "@link-stack",
"version": "3.2.0b3", "version": "3.3.5",
"description": "Link from the Center for Digital Resilience", "description": "Link from the Center for Digital Resilience",
"scripts": { "scripts": {
"dev": "dotenv -- turbo dev", "dev": "dotenv -- turbo dev",
"build": "dotenv -- turbo build", "build": "dotenv -- turbo build",
"migrate": "dotenv -- npm run migrate --workspace=database", "migrate": "dotenv -- pnpm --filter database run migrate",
"lint": "dotenv turbo lint", "lint": "dotenv turbo lint",
"update-version": "find . -name 'package.json' -exec sed -i -E 's/\"version\": \"[^\"]+\"/\"version\": \"3.2.0b3\"/' {} +", "update-version": "node --experimental-strip-types scripts/update-version.ts",
"upgrade:setup": "npm i -g npm-check-updates", "upgrade:setup": "pnpm i -g npm-check-updates",
"upgrade:check": "ncu && ncu -ws", "upgrade:check": "ncu && ncu -ws",
"upgrade": "ncu -u && ncu -ws -u && npm i", "upgrade": "ncu -u && ncu -ws -u && pnpm i",
"clean": "rm -f package-lock.json && rm -rf node_modules && rm -rf .turbo && rm -rf apps/*/node_modules && rm -rf apps/*/package-lock.json && rm -rf apps/*/.next && rm -rf packages/*/node_modules && rm -rf apps/*/.next && rm -rf packages/*/.turbo && rm -rf packages/*/build && rm -rf docker/zammad/addons/*", "clean": "rm -f pnpm-lock.yaml && rm -rf node_modules && rm -rf .turbo && rm -rf apps/*/node_modules && rm -rf apps/*/.next && rm -rf packages/*/node_modules && rm -rf apps/*/.next && rm -rf packages/*/.turbo && rm -rf packages/*/build && rm -rf docker/zammad/addons/*",
"docker:all:up": "node docker/scripts/docker.js all up", "docker:all:up": "node docker/scripts/docker.js all up",
"docker:all:down": "node docker/scripts/docker.js all down", "docker:all:down": "node docker/scripts/docker.js all down",
"docker:all:build": "node docker/scripts/docker.js all build", "docker:all:build": "node docker/scripts/docker.js all build",
@ -34,16 +34,11 @@
"docker:bridge:build": "node docker/scripts/docker.js bridge build", "docker:bridge:build": "node docker/scripts/docker.js bridge build",
"docker:zammad:restart": "docker restart zammad-railsserver zammad-scheduler" "docker:zammad:restart": "docker restart zammad-railsserver zammad-scheduler"
}, },
"workspaces": [
"apps/*",
"packages/*",
"database/*"
],
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://gitlab.com/digiresilience/link/link-stack.git" "url": "git+https://gitlab.com/digiresilience/link/link-stack.git"
}, },
"packageManager": "npm@11.6.1", "packageManager": "pnpm@9.15.4",
"author": "Darren Clarke", "author": "Darren Clarke",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"devDependencies": { "devDependencies": {
@ -57,15 +52,17 @@
"turbo": "^2.5.8", "turbo": "^2.5.8",
"typescript": "latest" "typescript": "latest"
}, },
"overrides": { "pnpm": {
"react": "19.2.0", "overrides": {
"react-dom": "19.2.0", "react": "19.2.0",
"@types/react": "19.2.2", "react-dom": "19.2.0",
"@types/react-dom": "19.2.1", "@types/react": "19.2.2",
"@mui/material": "^6.5.0" "@types/react-dom": "19.2.1",
"@mui/material": "^6.5.0"
}
}, },
"engines": { "engines": {
"npm": ">=10", "pnpm": ">=9",
"node": ">=20" "node": ">=20"
} }
} }

View file

@ -9,3 +9,12 @@ export type {
User, User,
} from "./lib/database.js"; } from "./lib/database.js";
export { getWorkerUtils } from "./lib/utils.js"; export { getWorkerUtils } from "./lib/utils.js";
export {
getMaxAttachmentSize,
getMaxTotalAttachmentSize,
MAX_ATTACHMENTS,
} from "./lib/config/attachments.js";
export {
getSignalAutoGroupNameTemplate,
buildSignalGroupName,
} from "./lib/config/signal.js";

View file

@ -0,0 +1,36 @@
/**
* Attachment size configuration for messaging channels
*
* Environment variables:
* - BRIDGE_MAX_ATTACHMENT_SIZE_MB: Maximum size for a single attachment in MB (default: 50)
*/
/**
* Get the maximum attachment size in bytes from environment variable
* Defaults to 50MB if not set
*/
export function getMaxAttachmentSize(): number {
const envValue = process.env.BRIDGE_MAX_ATTACHMENT_SIZE_MB;
const sizeInMB = envValue ? parseInt(envValue, 10) : 50;
// Validate the value
if (isNaN(sizeInMB) || sizeInMB <= 0) {
console.warn(`Invalid BRIDGE_MAX_ATTACHMENT_SIZE_MB value: ${envValue}, using default 50MB`);
return 50 * 1024 * 1024;
}
return sizeInMB * 1024 * 1024;
}
/**
* Get the maximum total size for all attachments in a message
* This is 4x the single attachment size
*/
export function getMaxTotalAttachmentSize(): number {
return getMaxAttachmentSize() * 4;
}
/**
* Maximum number of attachments per message
*/
export const MAX_ATTACHMENTS = 10;

View file

@ -0,0 +1,29 @@
/**
* Signal configuration
*
* Environment variables:
* - SIGNAL_AUTO_GROUP_NAME_TEMPLATE: Template for auto-created group names (default: "Support Request: {conversationId}")
* Available placeholders: {conversationId}
*/
/**
* Get the Signal auto-group name template from environment variable
* Defaults to "Support Request: {conversationId}" if not set
*/
export function getSignalAutoGroupNameTemplate(): string {
const template = process.env.SIGNAL_AUTO_GROUP_NAME_TEMPLATE;
if (!template) {
return "Support Request: {conversationId}";
}
return template;
}
/**
* Build a Signal group name from the template and conversation ID
*/
export function buildSignalGroupName(conversationId: string): string {
const template = getSignalAutoGroupNameTemplate();
return template.replace('{conversationId}', conversationId);
}

View file

@ -1,19 +1,12 @@
import { PostgresDialect, CamelCasePlugin } from "kysely"; import { PostgresDialect, CamelCasePlugin } from "kysely";
import type { import type { GeneratedAlways, Generated, ColumnType, Selectable } from "kysely";
GeneratedAlways,
Generated,
ColumnType,
Selectable,
} from "kysely";
import pg from "pg"; import pg from "pg";
import { KyselyAuth } from "@auth/kysely-adapter"; import { KyselyAuth } from "@auth/kysely-adapter";
const { Pool, types } = pg; const { Pool, types } = pg;
type Timestamp = ColumnType<Date, Date | string>; type Timestamp = ColumnType<Date, Date | string>;
types.setTypeParser(types.builtins.TIMESTAMPTZ, (val) => types.setTypeParser(types.builtins.TIMESTAMPTZ, (val) => new Date(val).toISOString());
new Date(val).toISOString(),
);
type GraphileJob = { type GraphileJob = {
taskIdentifier: string; taskIdentifier: string;
@ -138,15 +131,67 @@ export type VoiceLine = Selectable<Database["VoiceLine"]>;
export type Webhook = Selectable<Database["Webhook"]>; export type Webhook = Selectable<Database["Webhook"]>;
export type User = Selectable<Database["User"]>; export type User = Selectable<Database["User"]>;
export const db = new KyselyAuth<Database>({ // Lazy database initialization to avoid errors during build time
dialect: new PostgresDialect({ let _db: KyselyAuth<Database> | undefined;
pool: new Pool({
host: process.env.DATABASE_HOST, function getDb(): KyselyAuth<Database> {
database: process.env.DATABASE_NAME, if (_db) {
port: parseInt(process.env.DATABASE_PORT!), return _db;
user: process.env.DATABASE_USER, }
password: process.env.DATABASE_PASSWORD,
}), // Validate environment variables
}) as any, const DATABASE_HOST = process.env.DATABASE_HOST;
plugins: [new CamelCasePlugin() as any], const DATABASE_NAME = process.env.DATABASE_NAME;
const DATABASE_PORT = process.env.DATABASE_PORT;
const DATABASE_USER = process.env.DATABASE_USER;
const DATABASE_PASSWORD = process.env.DATABASE_PASSWORD;
if (
!DATABASE_HOST ||
!DATABASE_NAME ||
!DATABASE_PORT ||
!DATABASE_USER ||
!DATABASE_PASSWORD
) {
throw new Error(
"Missing required database environment variables: DATABASE_HOST, DATABASE_NAME, DATABASE_PORT, DATABASE_USER, DATABASE_PASSWORD",
);
}
const port = parseInt(DATABASE_PORT, 10);
if (isNaN(port) || port < 1 || port > 65535) {
throw new Error(
`Invalid DATABASE_PORT: ${DATABASE_PORT}. Must be a number between 1 and 65535.`,
);
}
_db = new KyselyAuth<Database>({
dialect: new PostgresDialect({
pool: new Pool({
host: DATABASE_HOST,
database: DATABASE_NAME,
port,
user: DATABASE_USER,
password: DATABASE_PASSWORD,
}),
}) as any,
plugins: [new CamelCasePlugin() as any],
});
return _db;
}
// Export db as a getter that lazily initializes the database
export const db = new Proxy({} as KyselyAuth<Database>, {
get(_target, prop) {
const instance = getDb();
const value = (instance as any)[prop];
// If it's a function, bind it to the actual instance to preserve 'this' context
if (typeof value === "function") {
return value.bind(instance);
}
return value;
},
}); });

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/bridge-common", "name": "@link-stack/bridge-common",
"version": "3.2.0b3", "version": "3.3.5",
"main": "build/main/index.js", "main": "build/main/index.js",
"type": "module", "type": "module",
"author": "Darren Clarke <darren@redaranj.com>", "author": "Darren Clarke <darren@redaranj.com>",
@ -15,8 +15,9 @@
"pg": "^8.16.3" "pg": "^8.16.3"
}, },
"devDependencies": { "devDependencies": {
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"@types/pg": "^8.15.5",
"typescript": "^5.9.3" "typescript": "^5.9.3"
} }
} }

View file

@ -29,21 +29,30 @@ export const QRCode: FC<QRCodeProps> = ({
useEffect(() => { useEffect(() => {
if (!verified && getValue && refreshInterval) { if (!verified && getValue && refreshInterval) {
const interval = setInterval(async () => { // Fetch immediately on mount
const fetchQR = async () => {
const { qr, kind } = await getValue(token); const { qr, kind } = await getValue(token);
setValue(qr); setValue(qr);
setKind(kind); setKind(kind);
}, refreshInterval * 1000); };
fetchQR();
// Then set up interval for refreshes
const interval = setInterval(fetchQR, refreshInterval * 1000);
return () => clearInterval(interval); return () => clearInterval(interval);
} }
}, [getValue, refreshInterval]); }, [getValue, refreshInterval, token, verified]);
return !verified ? ( return !verified ? (
<Box sx={{ backgroundColor: white, m: 2 }}> <Box sx={{ backgroundColor: white, m: 2 }}>
{kind === "data" ? ( {value ? (
<QRCodeInternal value={value} /> kind === "data" ? (
<QRCodeInternal value={value} />
) : (
<img src={value} alt={name} />
)
) : ( ) : (
<img src={value} alt={name} /> <Box>Loading QR code...</Box>
)} )}
<Box>{helperText}</Box> <Box>{helperText}</Box>
</Box> </Box>

View file

@ -2,11 +2,28 @@ import { ServiceConfig } from "../lib/service";
// import { generateSelectOneAction } from "../lib/actions"; // import { generateSelectOneAction } from "../lib/actions";
const getQRCode = async (token: string) => { const getQRCode = async (token: string) => {
const url = `/link/api/whatsapp/bots/${token}`; try {
const result = await fetch(url, { cache: "no-store" }); const url = `/link/api/whatsapp/bots/${token}`;
const { qr } = await result.json(); const result = await fetch(url, { cache: "no-store" });
return { qr, kind: "data" }; if (!result.ok) {
console.error(`Failed to fetch QR code: ${result.status} ${result.statusText}`);
return { qr: "", kind: "data" };
}
const data = await result.json();
const { qr } = data;
if (!qr) {
console.error("No QR code in response");
return { qr: "", kind: "data" };
}
return { qr, kind: "data" };
} catch (error) {
console.error("Error fetching QR code:", error);
return { qr: "", kind: "data" };
}
}; };
export const whatsappConfig: ServiceConfig = { export const whatsappConfig: ServiceConfig = {

View file

@ -1,17 +1,17 @@
{ {
"name": "@link-stack/bridge-ui", "name": "@link-stack/bridge-ui",
"version": "3.2.0b3", "version": "3.3.5",
"scripts": { "scripts": {
"build": "tsc -p tsconfig.json" "build": "tsc -p tsconfig.json"
}, },
"dependencies": { "dependencies": {
"@link-stack/bridge-common": "*", "@link-stack/bridge-common": "workspace:*",
"@link-stack/signal-api": "*", "@link-stack/signal-api": "workspace:*",
"@link-stack/ui": "*", "@link-stack/ui": "workspace:*",
"@mui/material": "^6", "@mui/material": "^6",
"@mui/x-data-grid-pro": "^7", "@mui/x-data-grid-pro": "^7",
"kysely": "0.27.5", "kysely": "0.27.5",
"next": "15.5.4", "next": "15.5.9",
"react": "19.2.0", "react": "19.2.0",
"react-dom": "19.2.0", "react-dom": "19.2.0",
"react-qr-code": "^2.0.18" "react-qr-code": "^2.0.18"

View file

@ -38,8 +38,6 @@ export const colors: any = {
helpYellow: "#fff4d5", helpYellow: "#fff4d5",
dwcDarkBlue: "#191847", dwcDarkBlue: "#191847",
hazyMint: "#ecf7f8", hazyMint: "#ecf7f8",
leafcutterElectricBlue: "#4d6aff",
leafcutterLightBlue: "#fafbfd",
waterbearElectricPurple: "#332c83", waterbearElectricPurple: "#332c83",
waterbearLightSmokePurple: "#eff3f8", waterbearLightSmokePurple: "#eff3f8",
bumpedPurple: "#212058", bumpedPurple: "#212058",

View file

@ -0,0 +1,2 @@
// Placeholder entry point for eslint-config package
module.exports = {};

View file

@ -1,10 +1,11 @@
{ {
"name": "@link-stack/eslint-config", "name": "@link-stack/eslint-config",
"version": "3.2.0b3", "version": "3.3.5",
"description": "amigo's eslint config", "description": "amigo's eslint config",
"main": "index.js",
"author": "Abel Luck <abel@guardianproject.info>", "author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"private": false, "private": true,
"scripts": { "scripts": {
"fmt": "prettier \"profile/**/*.js\" --write" "fmt": "prettier \"profile/**/*.js\" --write"
}, },

View file

@ -0,0 +1,2 @@
// Placeholder entry point for jest-config package
module.exports = {};

View file

@ -1,10 +1,11 @@
{ {
"name": "@link-stack/jest-config", "name": "@link-stack/jest-config",
"version": "3.2.0b3", "version": "3.3.5",
"description": "", "description": "",
"main": "index.js",
"author": "Abel Luck <abel@guardianproject.info>", "author": "Abel Luck <abel@guardianproject.info>",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"private": false, "private": true,
"engines": { "engines": {
"node": ">=14" "node": ">=14"
}, },

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/logger", "name": "@link-stack/logger",
"version": "3.2.0b3", "version": "3.3.5",
"description": "Shared logging utility for Link Stack monorepo", "description": "Shared logging utility for Link Stack monorepo",
"main": "./dist/index.js", "main": "./dist/index.js",
"module": "./dist/index.mjs", "module": "./dist/index.mjs",
@ -23,8 +23,8 @@
"pino-pretty": "^13.1.1" "pino-pretty": "^13.1.1"
}, },
"devDependencies": { "devDependencies": {
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"@types/node": "^24.7.0", "@types/node": "^24.7.0",
"eslint": "^9.37.0", "eslint": "^9.37.0",
"tsup": "^8.5.0", "tsup": "^8.5.0",
@ -33,4 +33,4 @@
"publishConfig": { "publishConfig": {
"access": "public" "access": "public"
} }
} }

View file

@ -17,6 +17,7 @@ export const getPinoConfig = (): LoggerOptions => {
timestamp: () => `,"timestamp":"${new Date(Date.now()).toISOString()}"`, timestamp: () => `,"timestamp":"${new Date(Date.now()).toISOString()}"`,
redact: { redact: {
paths: [ paths: [
// Top-level sensitive fields
'password', 'password',
'token', 'token',
'secret', 'secret',
@ -24,11 +25,35 @@ export const getPinoConfig = (): LoggerOptions => {
'apiKey', 'apiKey',
'authorization', 'authorization',
'cookie', 'cookie',
'HandshakeKey',
'receivedSecret',
'access_token',
'refresh_token',
'zammadCsrfToken',
'clientSecret',
// Nested sensitive fields (one level)
'*.password', '*.password',
'*.token', '*.token',
'*.secret', '*.secret',
'*.api_key', '*.api_key',
'*.apiKey', '*.apiKey',
'*.authorization',
'*.cookie',
'*.access_token',
'*.refresh_token',
'*.zammadCsrfToken',
'*.HandshakeKey',
'*.receivedSecret',
'*.clientSecret',
// Common nested patterns
'payload.HandshakeKey',
'headers.authorization',
'headers.cookie',
'headers.Authorization',
'headers.Cookie',
'credentials.password',
'credentials.secret',
'credentials.token',
], ],
censor: '[REDACTED]', censor: '[REDACTED]',
}, },

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/signal-api", "name": "@link-stack/signal-api",
"version": "3.2.0b3", "version": "3.3.5",
"type": "module", "type": "module",
"main": "build/index.js", "main": "build/index.js",
"exports": { "exports": {
@ -13,8 +13,8 @@
}, },
"devDependencies": { "devDependencies": {
"@openapitools/openapi-generator-cli": "^2.24.0", "@openapitools/openapi-generator-cli": "^2.24.0",
"@link-stack/typescript-config": "*", "@link-stack/typescript-config": "workspace:*",
"@link-stack/eslint-config": "*", "@link-stack/eslint-config": "workspace:*",
"@types/node": "^24", "@types/node": "^24",
"typescript": "^5" "typescript": "^5"
} }

View file

@ -0,0 +1,2 @@
// Placeholder entry point for typescript-config package
module.exports = {};

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/typescript-config", "name": "@link-stack/typescript-config",
"version": "3.2.0b3", "version": "3.3.5",
"description": "Shared TypeScript config", "description": "Shared TypeScript config",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"author": "Abel Luck <abel@guardianproject.info>", "author": "Abel Luck <abel@guardianproject.info>",

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/ui", "name": "@link-stack/ui",
"version": "3.2.0b3", "version": "3.3.5",
"description": "", "description": "",
"scripts": { "scripts": {
"build": "tsc -p tsconfig.json" "build": "tsc -p tsconfig.json"
@ -11,7 +11,7 @@
"@mui/material": "^6", "@mui/material": "^6",
"@mui/x-data-grid-pro": "^7", "@mui/x-data-grid-pro": "^7",
"@mui/x-license": "^7", "@mui/x-license": "^7",
"next": "15.5.4", "next": "15.5.9",
"react": "19.2.0", "react": "19.2.0",
"react-dom": "19.2.0" "react-dom": "19.2.0"
}, },

View file

@ -43,8 +43,6 @@ export const colors: any = {
helpYellow: "#fff4d5", helpYellow: "#fff4d5",
dwcDarkBlue: "#191847", dwcDarkBlue: "#191847",
hazyMint: "#ecf7f8", hazyMint: "#ecf7f8",
leafcutterElectricBlue: "#4d6aff",
leafcutterLightBlue: "#fafbfd",
waterbearElectricPurple: "#332c83", waterbearElectricPurple: "#332c83",
waterbearLightSmokePurple: "#eff3f8", waterbearLightSmokePurple: "#eff3f8",
bumpedPurple: "#212058", bumpedPurple: "#212058",

View file

@ -1,14 +1,14 @@
{ {
"name": "@link-stack/zammad-addon-bridge", "name": "@link-stack/zammad-addon-bridge",
"displayName": "Bridge", "displayName": "Bridge",
"version": "3.2.0b3", "version": "3.3.5",
"description": "An addon that adds CDR Bridge channels to Zammad.", "description": "An addon that adds CDR Bridge channels to Zammad.",
"scripts": { "scripts": {
"build": "node '../../node_modules/@link-stack/zammad-addon-common/dist/build.js'", "build": "node '../zammad-addon-common/dist/build.js'",
"migrate": "node '../../node_modules/@link-stack/zammad-addon-common/dist/migrate.js'" "migrate": "node '../zammad-addon-common/dist/migrate.js'"
}, },
"dependencies": { "dependencies": {
"@link-stack/zammad-addon-common": "*" "@link-stack/zammad-addon-common": "workspace:*"
}, },
"author": "", "author": "",
"license": "AGPL-3.0-or-later" "license": "AGPL-3.0-or-later"

View file

@ -45,13 +45,6 @@ class CdrWhatsappReply
@articleTypes: (articleTypes, ticket, ui) -> @articleTypes: (articleTypes, ticket, ui) ->
return articleTypes if !ui.permissionCheck('ticket.agent') return articleTypes if !ui.permissionCheck('ticket.agent')
# Check CDR Link allowed channels setting
allowedChannels = ui.Config.get('cdr_link_allowed_channels')
if allowedChannels && allowedChannels.trim()
whitelist = (channel.trim() for channel in allowedChannels.split(','))
# Return early if 'cdr_whatsapp' or 'whatsapp message' not in whitelist
return articleTypes if 'cdr_whatsapp' not in whitelist && 'whatsapp message' not in whitelist
return articleTypes if !ticket || !ticket.create_article_type_id return articleTypes if !ticket || !ticket.create_article_type_id
articleTypeCreate = App.TicketArticleType.find(ticket.create_article_type_id).name articleTypeCreate = App.TicketArticleType.find(ticket.create_article_type_id).name

View file

@ -0,0 +1,18 @@
# frozen_string_literal: true
class CdrSignalChannelsController < ApplicationController
prepend_before_action -> { authentication_check && authorize! }
def index
channels = Channel.where(area: 'Signal::Number', active: true).map do |channel|
{
id: channel.id,
phone_number: channel.options['phone_number'],
bot_endpoint: channel.options['bot_endpoint']
# bot_token intentionally excluded - bridge-worker should look it up from cdr database
}
end
render json: channels
end
end

View file

@ -115,13 +115,22 @@ class ChannelsCdrSignalController < ApplicationController
channel = channel_for_token(token) channel = channel_for_token(token)
return render json: {}, status: 401 if !channel || !channel.active return render json: {}, status: 401 if !channel || !channel.active
return render json: {}, status: 401 if channel.options[:token] != token # Use constant-time comparison to prevent timing attacks
return render json: {}, status: 401 unless ActiveSupport::SecurityUtils.secure_compare(
channel.options[:token].to_s,
token.to_s
)
# Handle group creation events # Handle group creation events
if params[:event] == 'group_created' if params[:event] == 'group_created'
return update_group return update_group
end end
# Handle group member joined events
if params[:event] == 'group_member_joined'
return handle_group_member_joined
end
channel_id = channel.id channel_id = channel.id
# validate input # validate input
@ -213,38 +222,13 @@ class ChannelsCdrSignalController < ApplicationController
Rails.logger.info "Channel ID: #{channel.id}" Rails.logger.info "Channel ID: #{channel.id}"
begin begin
# For group messages, search all tickets regardless of customer # Use text search on preferences YAML to efficiently find tickets without loading all into memory
# since users may have duplicate phone numbers # This prevents DoS attacks from memory exhaustion
all_tickets = Ticket.where.not(state_id: state_ids) ticket = Ticket.where.not(state_id: state_ids)
.order(updated_at: :desc) .where("preferences LIKE ?", "%channel_id: #{channel.id}%")
.where("preferences LIKE ?", "%chat_id: #{receiver_phone_number}%")
Rails.logger.info "Found #{all_tickets.count} active tickets (searching all customers for group match)" .order(updated_at: :desc)
.first
ticket = all_tickets.find do |t|
begin
has_preferences = t.preferences.is_a?(Hash)
has_cdr_signal = has_preferences && t.preferences['cdr_signal'].is_a?(Hash)
has_channel_id = has_preferences && t.preferences['channel_id'] == channel.id
if has_cdr_signal && has_channel_id
stored_chat_id = t.preferences['cdr_signal']['chat_id']
Rails.logger.info " - stored_chat_id: #{stored_chat_id}"
Rails.logger.info " - incoming_group_id: #{receiver_phone_number}"
matches = receiver_phone_number == stored_chat_id
Rails.logger.info " - MATCH: #{matches}"
matches
else
Rails.logger.info "Ticket ##{t.number} has no cdr_signal preferences or wrong channel"
false
end
rescue => e
Rails.logger.error "Error checking ticket #{t.id}: #{e.message}"
false
end
end
if ticket if ticket
Rails.logger.info "=== FOUND MATCHING TICKET BY GROUP ID: ##{ticket.number} ===" Rails.logger.info "=== FOUND MATCHING TICKET BY GROUP ID: ##{ticket.number} ==="
@ -397,6 +381,10 @@ class ChannelsCdrSignalController < ApplicationController
ticket.preferences[:cdr_signal][:original_recipient] = params[:original_recipient] if params[:original_recipient].present? ticket.preferences[:cdr_signal][:original_recipient] = params[:original_recipient] if params[:original_recipient].present?
ticket.preferences[:cdr_signal][:group_created_at] = params[:timestamp] if params[:timestamp].present? ticket.preferences[:cdr_signal][:group_created_at] = params[:timestamp] if params[:timestamp].present?
# Track whether user has joined the group (initially false)
# This will be updated to true when we receive a group join event from Signal
ticket.preferences[:cdr_signal][:group_joined] = params[:group_joined] if params.key?(:group_joined)
ticket.save! ticket.save!
Rails.logger.info "Signal group #{params[:group_id]} associated with ticket #{ticket.id}" Rails.logger.info "Signal group #{params[:group_id]} associated with ticket #{ticket.id}"
@ -407,4 +395,74 @@ class ChannelsCdrSignalController < ApplicationController
ticket_number: ticket.number ticket_number: ticket.number
}, status: :ok }, status: :ok
end end
# Webhook endpoint for receiving group member joined notifications from bridge-worker
# This is called when a user accepts the Signal group invitation
# Expected payload:
# {
# "event": "group_member_joined",
# "group_id": "group.base64encodedid",
# "member_phone": "+1234567890",
# "timestamp": "ISO8601 timestamp"
# }
def handle_group_member_joined
# Validate required parameters
errors = {}
errors['event'] = 'required' unless params[:event].present?
errors['group_id'] = 'required' unless params[:group_id].present?
errors['member_phone'] = 'required' unless params[:member_phone].present?
if errors.present?
render json: {
errors: errors
}, status: :bad_request
return
end
# Find ticket(s) with this group_id in preferences
# Use text search on preferences YAML for efficient lookup (prevents DoS from loading all tickets)
state_ids = Ticket::State.where(name: %w[closed merged removed]).pluck(:id)
ticket = Ticket.where.not(state_id: state_ids)
.where("preferences LIKE ?", "%chat_id: #{params[:group_id]}%")
.order(updated_at: :desc)
.first
unless ticket
Rails.logger.warn "Signal group member joined: Ticket not found for group_id #{params[:group_id]}"
render json: { error: 'Ticket not found for this group' }, status: :not_found
return
end
# Idempotency check: if already marked as joined, skip update and return success
# This prevents unnecessary database writes when the cron job sends duplicate notifications
if ticket.preferences.dig('cdr_signal', 'group_joined') == true
Rails.logger.debug "Signal group member #{params[:member_phone]} already marked as joined for group #{params[:group_id]} ticket #{ticket.id}, skipping update"
render json: {
success: true,
ticket_id: ticket.id,
ticket_number: ticket.number,
group_joined: true,
already_joined: true
}, status: :ok
return
end
# Update group_joined flag
member_phone = params[:member_phone]
ticket.preferences[:cdr_signal][:group_joined] = true
ticket.preferences[:cdr_signal][:group_joined_at] = params[:timestamp] if params[:timestamp].present?
ticket.preferences[:cdr_signal][:group_joined_by] = member_phone
ticket.save!
Rails.logger.info "Signal group member #{member_phone} joined group #{params[:group_id]} for ticket #{ticket.id}"
render json: {
success: true,
ticket_id: ticket.id,
ticket_number: ticket.number,
group_joined: true
}, status: :ok
end
end end

View file

@ -30,6 +30,25 @@ class CommunicateCdrSignalJob < ApplicationJob
log_error(article, log_error(article,
"Can't find ticket.preferences['cdr_signal']['chat_id'] for Ticket.find(#{article.ticket_id})") "Can't find ticket.preferences['cdr_signal']['chat_id'] for Ticket.find(#{article.ticket_id})")
end end
# Check if this is a group chat and if the user has joined
chat_id = ticket.preferences['cdr_signal']['chat_id']
is_group_chat = chat_id&.start_with?('group.')
group_joined = ticket.preferences.dig('cdr_signal', 'group_joined')
# If this is a group chat and user hasn't joined yet, don't send the message
if is_group_chat && group_joined == false
Rails.logger.info "Ticket ##{ticket.number}: User hasn't joined Signal group yet, skipping message delivery"
# Mark article as pending delivery
article.preferences['delivery_status'] = 'pending'
article.preferences['delivery_status_message'] = 'Waiting for user to join Signal group'
article.preferences['delivery_status_date'] = Time.zone.now
article.save!
# Retry later when user might have joined
raise 'User has not joined Signal group yet'
end
channel = ::CdrSignal.bot_by_bot_token(ticket.preferences['cdr_signal']['bot_token']) channel = ::CdrSignal.bot_by_bot_token(ticket.preferences['cdr_signal']['bot_token'])
channel ||= ::Channel.lookup(id: ticket.preferences['channel_id']) channel ||= ::Channel.lookup(id: ticket.preferences['channel_id'])
unless channel unless channel

View file

@ -0,0 +1,9 @@
# frozen_string_literal: true
module Controllers
class CdrSignalChannelsControllerPolicy < Controllers::ApplicationControllerPolicy
def index?
user.permissions?('admin.channel')
end
end
end

View file

@ -0,0 +1,5 @@
Zammad::Application.routes.draw do
api_path = Rails.configuration.api_path
match api_path + '/cdr_signal_channels', to: 'cdr_signal_channels#index', via: :get
end

View file

@ -1,6 +1,6 @@
{ {
"name": "@link-stack/zammad-addon-common", "name": "@link-stack/zammad-addon-common",
"version": "3.2.0b3", "version": "3.3.5",
"description": "", "description": "",
"bin": { "bin": {
"zpm-build": "./dist/build.js", "zpm-build": "./dist/build.js",
@ -16,7 +16,7 @@
"author": "", "author": "",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"dependencies": { "dependencies": {
"@link-stack/logger": "*", "@link-stack/logger": "workspace:*",
"glob": "^11.0.3" "glob": "^11.0.3"
} }
} }

View file

@ -1,14 +1,14 @@
{ {
"name": "@link-stack/zammad-addon-hardening", "name": "@link-stack/zammad-addon-hardening",
"displayName": "Hardening", "displayName": "Hardening",
"version": "3.2.0b3", "version": "3.3.5",
"description": "A Zammad addon that hardens a Zammad instance according to CDR's needs.", "description": "A Zammad addon that hardens a Zammad instance according to CDR's needs.",
"scripts": { "scripts": {
"build": "node '../../node_modules/@link-stack/zammad-addon-common/dist/build.js'", "build": "node '../zammad-addon-common/dist/build.js'",
"migrate": "node '../../node_modules/@link-stack/zammad-addon-common/dist/migrate.js'" "migrate": "node '../zammad-addon-common/dist/migrate.js'"
}, },
"dependencies": { "dependencies": {
"@link-stack/zammad-addon-common": "*" "@link-stack/zammad-addon-common": "workspace:*"
}, },
"author": "", "author": "",
"license": "AGPL-3.0-or-later" "license": "AGPL-3.0-or-later"

10586
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load diff

3
pnpm-workspace.yaml Normal file
View file

@ -0,0 +1,3 @@
packages:
- 'apps/*'
- 'packages/*'

115
scripts/update-version.ts Normal file
View file

@ -0,0 +1,115 @@
#!/usr/bin/env node --experimental-strip-types
/**
* Updates version numbers across all package.json files in the monorepo
* Usage: node scripts/update-version.ts <version>
* Example: node scripts/update-version.ts 3.3.0-beta.1
*/
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
interface UpdateResult {
success: true;
oldVersion: string;
newVersion: string;
}
interface UpdateError {
success: false;
error: string;
}
type UpdatePackageResult = UpdateResult | UpdateError;
// Get version from command line args
const newVersion = process.argv[2];
if (!newVersion) {
console.error('Error: Version number required');
console.error('Usage: node scripts/update-version.ts <version>');
console.error('Example: node scripts/update-version.ts 3.3.0-beta.1');
process.exit(1);
}
// Validate version format (basic check)
const versionRegex = /^\d+\.\d+\.\d+(-[a-zA-Z0-9.]+)?$/;
if (!versionRegex.test(newVersion)) {
console.error(`Error: Invalid version format: ${newVersion}`);
console.error('Expected format: X.Y.Z or X.Y.Z-suffix (e.g., 3.3.0-beta.1)');
process.exit(1);
}
/**
* Recursively find all package.json files
*/
function findPackageJsonFiles(dir: string, files: string[] = []): string[] {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
// Skip node_modules, .git, and other common directories
if (entry.isDirectory()) {
if (!['node_modules', '.git', '.next', '.turbo', 'build', 'dist'].includes(entry.name)) {
findPackageJsonFiles(fullPath, files);
}
} else if (entry.name === 'package.json') {
files.push(fullPath);
}
}
return files;
}
/**
* Update version in a package.json file
*/
function updatePackageVersion(filePath: string, version: string): UpdatePackageResult {
try {
const content = fs.readFileSync(filePath, 'utf8');
const pkg = JSON.parse(content);
const oldVersion = pkg.version;
pkg.version = version;
// Write back with same formatting (2 spaces, newline at end)
fs.writeFileSync(filePath, JSON.stringify(pkg, null, 2) + '\n', 'utf8');
return { success: true, oldVersion, newVersion: version };
} catch (error) {
return { success: false, error: error instanceof Error ? error.message : String(error) };
}
}
// Main execution
console.log(`Updating all package.json files to version ${newVersion}...\n`);
const rootDir = path.join(__dirname, '..');
const packageFiles = findPackageJsonFiles(rootDir);
let successCount = 0;
let failureCount = 0;
for (const filePath of packageFiles) {
const relativePath = path.relative(rootDir, filePath);
const result = updatePackageVersion(filePath, newVersion);
if (result.success) {
console.log(`${relativePath}: ${result.oldVersion}${result.newVersion}`);
successCount++;
} else {
console.error(`${relativePath}: ${result.error}`);
failureCount++;
}
}
console.log(`\nSummary: ${successCount} updated, ${failureCount} failed`);
if (failureCount > 0) {
process.exit(1);
}

View file

@ -1,9 +0,0 @@
#!/usr/bin/env ruby
require '/opt/zammad/config/boot'
require '/opt/zammad/config/application'
Rails.application.initialize!
Setting.set('cdr_link_allowed_channels', 'note,cdr_signal,email')
puts "Setting 'cdr_link_allowed_channels' has been set to: 'note,cdr_signal,email'"