Add media verification addon with C2PA/ProofMode support
Introduces a new zammad-addon-media-verify package that uses the proofmode Ruby gem (built from proofmode-rust) to verify media attachments on tickets for C2PA content credentials and ProofMode cryptographic proofs. The addon runs as a Zammad scheduled job that: - Scans incoming ticket articles for media attachments (images, video, audio, PDFs, ZIPs) - Calls proofmode check_files() to verify C2PA manifests, PGP signatures, OpenTimestamps, and EXIF metadata - Posts a human-readable verification report as an internal note on the ticket - Tracks checked articles via preferences to avoid duplicate processing Also restores the zammad-addon-common package (previously removed in repo cleanup) to share build tooling (ZPM builder and migration generator) between addon packages, keeping things DRY. The link addon now imports from common instead of inlining these. Docker integration: - Dockerfile updated to install proofmode gem from docker/zammad/gems/ - setup.rb updated to handle MediaVerify package lifecycle https://claude.ai/code/session_01GJYbRCFFJCJDAEcEVbD36N
This commit is contained in:
parent
c40d7d056e
commit
33375c9221
22 changed files with 761 additions and 2821 deletions
19
packages/zammad-addon-common/package.json
Normal file
19
packages/zammad-addon-common/package.json
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"name": "@link-stack/zammad-addon-common",
|
||||
"version": "3.5.0-beta.1",
|
||||
"description": "Shared build tooling for Zammad addon packages.",
|
||||
"exports": {
|
||||
"./build": "./src/build.ts",
|
||||
"./migrate": "./src/migrate.ts"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.7.0",
|
||||
"glob": "^11.0.3",
|
||||
"typescript": "^5"
|
||||
},
|
||||
"dependencies": {
|
||||
"glob": "^11.0.3"
|
||||
},
|
||||
"author": "",
|
||||
"license": "AGPL-3.0-or-later"
|
||||
}
|
||||
88
packages/zammad-addon-common/src/build.ts
Normal file
88
packages/zammad-addon-common/src/build.ts
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
import { promises as fs } from "fs";
|
||||
import { glob } from "glob";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
|
||||
const log = (msg: string, data?: Record<string, any>) => {
|
||||
console.log(JSON.stringify({ msg, ...data, timestamp: new Date().toISOString() }));
|
||||
};
|
||||
|
||||
const packageFile = async (actualPath: string): Promise<any> => {
|
||||
log('Packaging file', { actualPath });
|
||||
const packagePath = actualPath.slice(4);
|
||||
const data = await fs.readFile(actualPath, "utf-8");
|
||||
const content = Buffer.from(data, "utf-8").toString("base64");
|
||||
const fileStats = await fs.stat(actualPath);
|
||||
const permission = parseInt(
|
||||
(fileStats.mode & 0o777).toString(8).slice(-3),
|
||||
10,
|
||||
);
|
||||
return {
|
||||
location: packagePath,
|
||||
permission,
|
||||
encode: "base64",
|
||||
content,
|
||||
};
|
||||
};
|
||||
|
||||
const packageFiles = async () => {
|
||||
const packagedFiles: any[] = [];
|
||||
const ignoredPatterns = [
|
||||
/\.gitkeep/,
|
||||
/Gemfile/,
|
||||
/Gemfile.lock/,
|
||||
/\.ruby-version/,
|
||||
];
|
||||
|
||||
const processDir = async (dir: string) => {
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const entryPath = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
await processDir(entryPath);
|
||||
} else if (entry.isFile()) {
|
||||
if (!ignoredPatterns.some((pattern) => pattern.test(entry.name))) {
|
||||
packagedFiles.push(await packageFile(entryPath));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
await processDir("./src/");
|
||||
return packagedFiles;
|
||||
};
|
||||
|
||||
export const createZPM = async ({
|
||||
name,
|
||||
displayName,
|
||||
version,
|
||||
}: Record<string, string>) => {
|
||||
const files = await packageFiles();
|
||||
const skeleton = {
|
||||
name: displayName,
|
||||
version,
|
||||
vendor: "Center for Digital Resilience",
|
||||
license: "AGPL-v3+",
|
||||
url: `https://gitlab.com/digiresilience/link/link-stack/packages/${name}`,
|
||||
buildhost: os.hostname(),
|
||||
builddate: new Date().toISOString(),
|
||||
files,
|
||||
};
|
||||
const pkg = JSON.stringify(skeleton, null, 2);
|
||||
|
||||
try {
|
||||
const oldFiles = await glob(`../../docker/zammad/addons/${name}-v*.zpm`, {});
|
||||
|
||||
for (const file of oldFiles) {
|
||||
await fs.unlink(file);
|
||||
log('File was deleted', { file });
|
||||
}
|
||||
} catch (err) {
|
||||
log('Error removing old addon files', { error: String(err) });
|
||||
}
|
||||
await fs.writeFile(
|
||||
`../../docker/zammad/addons/${name}-v${version}.zpm`,
|
||||
pkg,
|
||||
"utf-8",
|
||||
);
|
||||
};
|
||||
43
packages/zammad-addon-common/src/migrate.ts
Normal file
43
packages/zammad-addon-common/src/migrate.ts
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
import { promises as fs } from "fs";
|
||||
import path from "path";
|
||||
|
||||
const underscore = (str: string) => {
|
||||
return str
|
||||
.replace(/([a-z\d])([A-Z])/g, "$1_$2")
|
||||
.replace(/([A-Z]+)([A-Z][a-z\d]+)/g, "$1_$2")
|
||||
.toLowerCase();
|
||||
}
|
||||
|
||||
const camelize = (str: string): string => {
|
||||
const camelizedStr = str.replace(/_([a-z])/g, (g) => g[1].toUpperCase());
|
||||
|
||||
return camelizedStr.charAt(0).toUpperCase() + camelizedStr.slice(1);
|
||||
}
|
||||
|
||||
export const createMigration = async ({ displayName }: Record<string, string>) => {
|
||||
const rawName: string = await new Promise((resolve) => {
|
||||
process.stdin.setEncoding("utf-8");
|
||||
process.stdout.write("Enter migration name: ");
|
||||
process.stdin.once("data", (data: string) => {
|
||||
resolve(data.trim());
|
||||
});
|
||||
});
|
||||
|
||||
const migrationBaseName = `${displayName}_${underscore(rawName)}`;
|
||||
const migrationName = camelize(migrationBaseName);
|
||||
const migrationTemplate = `class MIGRATION_NAME < ActiveRecord::Migration[5.2]
|
||||
def self.up
|
||||
# add your code here
|
||||
end
|
||||
|
||||
def self.down
|
||||
# add your code here
|
||||
end
|
||||
end`;
|
||||
const contents = migrationTemplate.replace("MIGRATION_NAME", migrationName);
|
||||
const time = new Date().toISOString().replace(/[-:.]/g, "").slice(0, 14);
|
||||
const migrationFileName = `${time}_${migrationBaseName}.rb`;
|
||||
const addonDir = path.join("src", "db", "addon", displayName);
|
||||
await fs.mkdir(addonDir, { recursive: true });
|
||||
await fs.writeFile(path.join(addonDir, migrationFileName), contents);
|
||||
}
|
||||
13
packages/zammad-addon-common/tsconfig.json
Normal file
13
packages/zammad-addon-common/tsconfig.json
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"strict": true,
|
||||
"declaration": true,
|
||||
"sourceMap": true,
|
||||
"outDir": "dist",
|
||||
"rootDir": "src"
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
|
|
@ -7,9 +7,11 @@
|
|||
"build": "tsx scripts/build.ts",
|
||||
"migrate": "tsx scripts/migrate.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@link-stack/zammad-addon-common": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.7.0",
|
||||
"glob": "^11.0.3",
|
||||
"tsx": "^4.20.6",
|
||||
"typescript": "^5"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,94 +1,12 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
import { promises as fs } from "fs";
|
||||
import { glob } from "glob";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
import { createZPM } from "@link-stack/zammad-addon-common/build";
|
||||
|
||||
const log = (msg: string, data?: Record<string, any>) => {
|
||||
console.log(JSON.stringify({ msg, ...data, timestamp: new Date().toISOString() }));
|
||||
};
|
||||
|
||||
const packageFile = async (actualPath: string): Promise<any> => {
|
||||
log('Packaging file', { actualPath });
|
||||
const packagePath = actualPath.slice(4);
|
||||
const data = await fs.readFile(actualPath, "utf-8");
|
||||
const content = Buffer.from(data, "utf-8").toString("base64");
|
||||
const fileStats = await fs.stat(actualPath);
|
||||
const permission = parseInt(
|
||||
(fileStats.mode & 0o777).toString(8).slice(-3),
|
||||
10,
|
||||
);
|
||||
return {
|
||||
location: packagePath,
|
||||
permission,
|
||||
encode: "base64",
|
||||
content,
|
||||
};
|
||||
};
|
||||
|
||||
const packageFiles = async () => {
|
||||
const packagedFiles: any[] = [];
|
||||
const ignoredPatterns = [
|
||||
/\.gitkeep/,
|
||||
/Gemfile/,
|
||||
/Gemfile.lock/,
|
||||
/\.ruby-version/,
|
||||
];
|
||||
|
||||
const processDir = async (dir: string) => {
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const entryPath = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
await processDir(entryPath);
|
||||
} else if (entry.isFile()) {
|
||||
if (!ignoredPatterns.some((pattern) => pattern.test(entry.name))) {
|
||||
packagedFiles.push(await packageFile(entryPath));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
await processDir("./src/");
|
||||
return packagedFiles;
|
||||
};
|
||||
|
||||
export const createZPM = async ({
|
||||
name,
|
||||
displayName,
|
||||
version,
|
||||
}: Record<string, string>) => {
|
||||
const files = await packageFiles();
|
||||
const skeleton = {
|
||||
name: displayName,
|
||||
version,
|
||||
vendor: "Center for Digital Resilience",
|
||||
license: "AGPL-v3+",
|
||||
url: `https://gitlab.com/digiresilience/link/link-stack/packages/${name}`,
|
||||
buildhost: os.hostname(),
|
||||
builddate: new Date().toISOString(),
|
||||
files,
|
||||
};
|
||||
const pkg = JSON.stringify(skeleton, null, 2);
|
||||
|
||||
try {
|
||||
const oldFiles = await glob(`../../docker/zammad/addons/${name}-v*.zpm`, {});
|
||||
|
||||
for (const file of oldFiles) {
|
||||
await fs.unlink(file);
|
||||
log('File was deleted', { file });
|
||||
}
|
||||
} catch (err) {
|
||||
log('Error removing old addon files', { error: String(err) });
|
||||
}
|
||||
await fs.writeFile(
|
||||
`../../docker/zammad/addons/${name}-v${version}.zpm`,
|
||||
pkg,
|
||||
"utf-8",
|
||||
);
|
||||
};
|
||||
|
||||
const main = async () => {
|
||||
const packageJSON = JSON.parse(await fs.readFile("./package.json", "utf-8"));
|
||||
const { name: fullName, displayName, version } = packageJSON;
|
||||
|
|
|
|||
|
|
@ -1,48 +1,7 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
import { promises as fs } from "fs";
|
||||
import path from "path";
|
||||
|
||||
const underscore = (str: string) => {
|
||||
return str
|
||||
.replace(/([a-z\d])([A-Z])/g, "$1_$2")
|
||||
.replace(/([A-Z]+)([A-Z][a-z\d]+)/g, "$1_$2")
|
||||
.toLowerCase();
|
||||
}
|
||||
|
||||
const camelize = (str: string): string => {
|
||||
const camelizedStr = str.replace(/_([a-z])/g, (g) => g[1].toUpperCase());
|
||||
|
||||
return camelizedStr.charAt(0).toUpperCase() + camelizedStr.slice(1);
|
||||
}
|
||||
|
||||
export const createMigration = async ({ displayName }: Record<string, string>) => {
|
||||
const rawName: string = await new Promise((resolve) => {
|
||||
process.stdin.setEncoding("utf-8");
|
||||
process.stdout.write("Enter migration name: ");
|
||||
process.stdin.once("data", (data: string) => {
|
||||
resolve(data.trim());
|
||||
});
|
||||
});
|
||||
|
||||
const migrationBaseName = `${displayName}_${underscore(rawName)}`;
|
||||
const migrationName = camelize(migrationBaseName);
|
||||
const migrationTemplate = `class MIGRATION_NAME < ActiveRecord::Migration[5.2]
|
||||
def self.up
|
||||
# add your code here
|
||||
end
|
||||
|
||||
def self.down
|
||||
# add your code here
|
||||
end
|
||||
end`;
|
||||
const contents = migrationTemplate.replace("MIGRATION_NAME", migrationName);
|
||||
const time = new Date().toISOString().replace(/[-:.]/g, "").slice(0, 14);
|
||||
const migrationFileName = `${time}_${migrationBaseName}.rb`;
|
||||
const addonDir = path.join("src", "db", "addon", displayName);
|
||||
await fs.mkdir(addonDir, { recursive: true });
|
||||
await fs.writeFile(path.join(addonDir, migrationFileName), contents);
|
||||
}
|
||||
import { createMigration } from "@link-stack/zammad-addon-common/migrate";
|
||||
|
||||
const main = async () => {
|
||||
const packageJSON = JSON.parse(await fs.readFile("./package.json", "utf-8"));
|
||||
|
|
|
|||
20
packages/zammad-addon-media-verify/package.json
Normal file
20
packages/zammad-addon-media-verify/package.json
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"name": "@link-stack/zammad-addon-media-verify",
|
||||
"displayName": "MediaVerify",
|
||||
"version": "3.5.0-beta.1",
|
||||
"description": "Zammad addon that verifies media attachments for C2PA and ProofMode data using the proofmode-rust library.",
|
||||
"scripts": {
|
||||
"build": "tsx scripts/build.ts",
|
||||
"migrate": "tsx scripts/migrate.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@link-stack/zammad-addon-common": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.7.0",
|
||||
"tsx": "^4.20.6",
|
||||
"typescript": "^5"
|
||||
},
|
||||
"author": "",
|
||||
"license": "AGPL-3.0-or-later"
|
||||
}
|
||||
18
packages/zammad-addon-media-verify/scripts/build.ts
Normal file
18
packages/zammad-addon-media-verify/scripts/build.ts
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
import { promises as fs } from "fs";
|
||||
import { createZPM } from "@link-stack/zammad-addon-common/build";
|
||||
|
||||
const log = (msg: string, data?: Record<string, any>) => {
|
||||
console.log(JSON.stringify({ msg, ...data, timestamp: new Date().toISOString() }));
|
||||
};
|
||||
|
||||
const main = async () => {
|
||||
const packageJSON = JSON.parse(await fs.readFile("./package.json", "utf-8"));
|
||||
const { name: fullName, displayName, version } = packageJSON;
|
||||
log('Building addon', { displayName, version });
|
||||
const name = fullName.split("/").pop();
|
||||
await createZPM({ name, displayName, version });
|
||||
};
|
||||
|
||||
main();
|
||||
12
packages/zammad-addon-media-verify/scripts/migrate.ts
Normal file
12
packages/zammad-addon-media-verify/scripts/migrate.ts
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
import { promises as fs } from "fs";
|
||||
import { createMigration } from "@link-stack/zammad-addon-common/migrate";
|
||||
|
||||
const main = async () => {
|
||||
const packageJSON = JSON.parse(await fs.readFile("./package.json", "utf-8"));
|
||||
const { displayName } = packageJSON;
|
||||
await createMigration({ displayName });
|
||||
}
|
||||
|
||||
main();
|
||||
1
packages/zammad-addon-media-verify/src/.ruby-version
Normal file
1
packages/zammad-addon-media-verify/src/.ruby-version
Normal file
|
|
@ -0,0 +1 @@
|
|||
3.1.3
|
||||
9
packages/zammad-addon-media-verify/src/Gemfile
Normal file
9
packages/zammad-addon-media-verify/src/Gemfile
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This Gemfile documents Ruby dependencies for the media-verify addon.
|
||||
# It is NOT included in the .zpm package (excluded by build script).
|
||||
# The proofmode gem must be installed at the Docker image level.
|
||||
|
||||
source 'https://rubygems.org'
|
||||
|
||||
gem 'proofmode', '~> 0.7.0'
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CdrMediaVerifyJob < ApplicationJob
|
||||
BATCH_SIZE = 20
|
||||
|
||||
def perform
|
||||
return unless Setting.get('media_verify_enabled')
|
||||
|
||||
articles_to_check.each do |article|
|
||||
verify_article(article)
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "MediaVerify: Failed to check article #{article.id}: #{e.message}"
|
||||
Rails.logger.error e.backtrace&.first(5)&.join("\n")
|
||||
mark_checked(article, error: e.message)
|
||||
end
|
||||
end
|
||||
|
||||
def self.perform_now
|
||||
new.perform
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def articles_to_check
|
||||
# Find articles with attachments that haven't been checked yet.
|
||||
# We look for articles that:
|
||||
# 1. Have at least one Store (attachment) record
|
||||
# 2. Haven't been marked as media_verify_checked in preferences
|
||||
# 3. Are from customers (incoming media) - agent articles are unlikely to need verification
|
||||
article_ids_with_attachments = Store
|
||||
.where(store_object_id: store_object_id)
|
||||
.select(:o_id)
|
||||
.distinct
|
||||
.pluck(:o_id)
|
||||
|
||||
return [] if article_ids_with_attachments.empty?
|
||||
|
||||
Ticket::Article
|
||||
.where(id: article_ids_with_attachments)
|
||||
.where(sender: Ticket::Article::Sender.find_by(name: 'Customer'))
|
||||
.where.not("preferences->>'media_verify_checked' = ?", 'true')
|
||||
.order(created_at: :desc)
|
||||
.limit(BATCH_SIZE)
|
||||
end
|
||||
|
||||
def store_object_id
|
||||
@store_object_id ||= ObjectLookup.by_name('Ticket::Article')
|
||||
end
|
||||
|
||||
def verify_article(article)
|
||||
Rails.logger.info "MediaVerify: Checking article #{article.id} on ticket #{article.ticket_id}"
|
||||
|
||||
check_output = CdrMediaVerify.check_article(article)
|
||||
|
||||
if check_output.nil?
|
||||
Rails.logger.debug { "MediaVerify: No verifiable attachments in article #{article.id}" }
|
||||
mark_checked(article)
|
||||
return
|
||||
end
|
||||
|
||||
body = CdrMediaVerify.format_result(check_output)
|
||||
create_verification_article(article.ticket, article, body)
|
||||
mark_checked(article)
|
||||
|
||||
Rails.logger.info "MediaVerify: Posted verification report for article #{article.id}"
|
||||
end
|
||||
|
||||
def create_verification_article(ticket, source_article, body)
|
||||
Ticket::Article.create!(
|
||||
ticket_id: ticket.id,
|
||||
subject: 'Media Verification Report',
|
||||
content_type: 'text/plain',
|
||||
body: body,
|
||||
internal: true,
|
||||
sender: Ticket::Article::Sender.find_by(name: 'System'),
|
||||
type: Ticket::Article::Type.find_by(name: 'note'),
|
||||
preferences: {
|
||||
media_verify_report: true,
|
||||
media_verify_source_article_id: source_article.id,
|
||||
},
|
||||
updated_by_id: 1,
|
||||
created_by_id: 1,
|
||||
)
|
||||
end
|
||||
|
||||
def mark_checked(article, error: nil)
|
||||
article.preferences['media_verify_checked'] = 'true'
|
||||
article.preferences['media_verify_checked_at'] = Time.current.iso8601
|
||||
article.preferences['media_verify_error'] = error if error
|
||||
article.save!
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
Rails.application.config.after_initialize do
|
||||
require 'cdr_media_verify'
|
||||
|
||||
Rails.logger.info 'CDR MediaVerify addon loaded'
|
||||
end
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class MediaVerifyAddMediaVerify < ActiveRecord::Migration[5.2]
|
||||
def self.up
|
||||
# Setting to enable/disable media verification
|
||||
Setting.create_if_not_exists(
|
||||
title: 'Media Verification',
|
||||
name: 'media_verify_enabled',
|
||||
area: 'Integration::MediaVerify',
|
||||
description: 'Enable automatic verification of media attachments for C2PA and ProofMode data.',
|
||||
options: {
|
||||
form: [
|
||||
{
|
||||
display: '',
|
||||
null: true,
|
||||
name: 'media_verify_enabled',
|
||||
tag: 'boolean',
|
||||
options: {
|
||||
true => 'yes',
|
||||
false => 'no',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
state: true,
|
||||
preferences: {
|
||||
prio: 1,
|
||||
permission: ['admin.integration'],
|
||||
},
|
||||
frontend: false,
|
||||
)
|
||||
|
||||
# Scheduler to run media verification checks
|
||||
Scheduler.create_if_not_exists(
|
||||
name: 'Verify media attachments for C2PA and ProofMode data',
|
||||
method: 'CdrMediaVerifyJob.perform_now',
|
||||
period: 5.minutes,
|
||||
prio: 3,
|
||||
active: true,
|
||||
updated_by_id: 1,
|
||||
created_by_id: 1,
|
||||
)
|
||||
end
|
||||
|
||||
def self.down
|
||||
Scheduler.find_by(name: 'Verify media attachments for C2PA and ProofMode data')&.destroy
|
||||
Setting.find_by(name: 'media_verify_enabled')&.destroy
|
||||
end
|
||||
end
|
||||
333
packages/zammad-addon-media-verify/src/lib/cdr_media_verify.rb
Normal file
333
packages/zammad-addon-media-verify/src/lib/cdr_media_verify.rb
Normal file
|
|
@ -0,0 +1,333 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'proofmode'
|
||||
require 'json'
|
||||
require 'tempfile'
|
||||
|
||||
class CdrMediaVerify
|
||||
VERIFIABLE_CONTENT_TYPES = %w[
|
||||
image/jpeg
|
||||
image/png
|
||||
image/heic
|
||||
image/heif
|
||||
image/tiff
|
||||
image/webp
|
||||
video/mp4
|
||||
video/quicktime
|
||||
video/webm
|
||||
video/x-msvideo
|
||||
audio/mpeg
|
||||
audio/ogg
|
||||
audio/wav
|
||||
audio/mp4
|
||||
application/pdf
|
||||
application/zip
|
||||
application/x-zip-compressed
|
||||
].freeze
|
||||
|
||||
class CheckCallbacks < Proofmode::ProofModeCallbacks
|
||||
attr_reader :progress_messages
|
||||
|
||||
def initialize
|
||||
super
|
||||
@progress_messages = []
|
||||
end
|
||||
|
||||
def get_location
|
||||
nil
|
||||
end
|
||||
|
||||
def get_device_info
|
||||
nil
|
||||
end
|
||||
|
||||
def get_network_info
|
||||
nil
|
||||
end
|
||||
|
||||
def save_data(_hash, _filename, _data)
|
||||
# No-op: we only check, we don't generate proofs
|
||||
end
|
||||
|
||||
def save_text(_hash, _filename, _text)
|
||||
# No-op: we only check, we don't generate proofs
|
||||
end
|
||||
|
||||
def sign_data(_data)
|
||||
nil
|
||||
end
|
||||
|
||||
def report_progress(message)
|
||||
@progress_messages << message
|
||||
Rails.logger.debug { "ProofMode check progress: #{message}" }
|
||||
end
|
||||
end
|
||||
|
||||
def self.verifiable?(attachment)
|
||||
content_type = attachment.preferences&.dig('Mime-Type') ||
|
||||
attachment.preferences&.dig('Content-Type') ||
|
||||
'application/octet-stream'
|
||||
VERIFIABLE_CONTENT_TYPES.include?(content_type.downcase)
|
||||
end
|
||||
|
||||
def self.check_article(article)
|
||||
attachments = Store.list(object: 'Ticket::Article', o_id: article.id)
|
||||
verifiable = attachments.select { |a| verifiable?(a) }
|
||||
return nil if verifiable.empty?
|
||||
|
||||
Dir.mktmpdir('proofmode-check') do |tmpdir|
|
||||
file_paths = verifiable.map do |attachment|
|
||||
path = File.join(tmpdir, sanitize_filename(attachment.filename))
|
||||
File.binwrite(path, attachment.content)
|
||||
path
|
||||
end
|
||||
|
||||
callbacks = CheckCallbacks.new
|
||||
result = Proofmode.check_files(file_paths, callbacks)
|
||||
|
||||
{
|
||||
result: result,
|
||||
progress: callbacks.progress_messages,
|
||||
attachments: verifiable.map(&:filename)
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
def self.format_result(check_output)
|
||||
result = check_output[:result]
|
||||
filenames = check_output[:attachments]
|
||||
|
||||
lines = []
|
||||
lines << '=== Media Verification Report ==='
|
||||
lines << ''
|
||||
lines << "Files checked: #{filenames.join(', ')}"
|
||||
lines << "Check time: #{Time.current.strftime('%Y-%m-%d %H:%M:%S %Z')}"
|
||||
lines << ''
|
||||
|
||||
begin
|
||||
# The result may be a ProofCheck object or JSON-serializable structure.
|
||||
# Convert to hash for uniform access.
|
||||
data = result_to_hash(result)
|
||||
|
||||
lines.concat(format_metadata(data))
|
||||
lines.concat(format_integrity(data))
|
||||
lines.concat(format_consistency(data))
|
||||
lines.concat(format_synchrony(data))
|
||||
lines.concat(format_errors(data))
|
||||
rescue StandardError => e
|
||||
lines << "Raw result: #{result.inspect}"
|
||||
lines << "Format error: #{e.message}"
|
||||
end
|
||||
|
||||
lines << ''
|
||||
lines << '=== End of Report ==='
|
||||
lines.join("\n")
|
||||
end
|
||||
|
||||
class << self
|
||||
private
|
||||
|
||||
def sanitize_filename(filename)
|
||||
# Remove path traversal attempts and null bytes
|
||||
filename.gsub(/[\/\\]/, '_').gsub("\0", '').strip
|
||||
end
|
||||
|
||||
def result_to_hash(result)
|
||||
if result.is_a?(Hash)
|
||||
result
|
||||
elsif result.respond_to?(:to_json)
|
||||
JSON.parse(result.to_json)
|
||||
elsif result.respond_to?(:to_h)
|
||||
result.to_h
|
||||
else
|
||||
JSON.parse(result.to_s)
|
||||
end
|
||||
rescue JSON::ParserError
|
||||
{ 'raw' => result.to_s }
|
||||
end
|
||||
|
||||
def format_metadata(data)
|
||||
lines = []
|
||||
meta = data['metadata'] || data[:metadata]
|
||||
return lines unless meta
|
||||
|
||||
lines << '--- Metadata ---'
|
||||
lines << " File count: #{meta['file_count'] || meta[:file_count]}" if meta['file_count'] || meta[:file_count]
|
||||
lines << " Platform: #{meta['platform'] || meta[:platform]}" if meta['platform'] || meta[:platform]
|
||||
lines << ''
|
||||
lines
|
||||
end
|
||||
|
||||
def format_integrity(data)
|
||||
lines = []
|
||||
integrity = data['integrity'] || data[:integrity]
|
||||
return lines unless integrity
|
||||
|
||||
lines << '--- Integrity Verification ---'
|
||||
|
||||
# PGP verification
|
||||
pgp = integrity['pgp'] || integrity[:pgp]
|
||||
if pgp
|
||||
lines << ' PGP Signatures:'
|
||||
if pgp.is_a?(Hash)
|
||||
media_verified = pgp.dig('media', 'verified') || pgp.dig(:media, :verified)
|
||||
json_verified = pgp.dig('json', 'verified') || pgp.dig(:json, :verified)
|
||||
lines << " Media signature: #{verification_status(media_verified)}"
|
||||
lines << " Proof JSON signature: #{verification_status(json_verified)}"
|
||||
else
|
||||
lines << " Status: #{pgp}"
|
||||
end
|
||||
end
|
||||
|
||||
# C2PA verification
|
||||
c2pa = integrity['c2pa'] || integrity[:c2pa]
|
||||
if c2pa
|
||||
lines << ' C2PA (Content Credentials):'
|
||||
if c2pa.is_a?(Hash) && (c2pa['manifest'] || c2pa[:manifest])
|
||||
manifest = c2pa['manifest'] || c2pa[:manifest]
|
||||
lines << ' Manifest found: Yes'
|
||||
if manifest.is_a?(String)
|
||||
begin
|
||||
manifest_data = JSON.parse(manifest)
|
||||
lines << " Title: #{manifest_data['title']}" if manifest_data['title']
|
||||
lines << " Claim generator: #{manifest_data['claim_generator']}" if manifest_data['claim_generator']
|
||||
|
||||
if manifest_data['assertions']
|
||||
lines << " Assertions: #{manifest_data['assertions'].length}"
|
||||
end
|
||||
rescue JSON::ParserError
|
||||
lines << " Manifest data: #{manifest[0..200]}"
|
||||
end
|
||||
else
|
||||
lines << " Title: #{manifest['title'] || manifest[:title]}" if manifest['title'] || manifest[:title]
|
||||
end
|
||||
elsif c2pa.is_a?(Hash)
|
||||
lines << " Manifest found: #{c2pa.empty? ? 'No' : 'Yes'}"
|
||||
else
|
||||
lines << " Status: #{c2pa}"
|
||||
end
|
||||
end
|
||||
|
||||
# OpenTimestamps
|
||||
ots = integrity['opentimestamps'] || integrity[:opentimestamps]
|
||||
if ots
|
||||
lines << ' OpenTimestamps:'
|
||||
if ots.is_a?(Hash)
|
||||
lines << " Verified: #{verification_status(ots['verified'] || ots[:verified])}"
|
||||
lines << " Timestamp: #{ots['timestamp'] || ots[:timestamp]}" if ots['timestamp'] || ots[:timestamp]
|
||||
else
|
||||
lines << " Status: #{ots}"
|
||||
end
|
||||
end
|
||||
|
||||
# EXIF
|
||||
exif = integrity['exif'] || integrity[:exif]
|
||||
if exif
|
||||
lines << ' EXIF Metadata:'
|
||||
lines << " Present: #{exif.is_a?(Hash) && !exif.empty? ? 'Yes' : 'No'}"
|
||||
end
|
||||
|
||||
# Summary counts
|
||||
summary = integrity['summary'] || integrity[:summary]
|
||||
if summary
|
||||
lines << ' Summary:'
|
||||
lines << " Total files verified: #{summary['total_verified'] || summary[:total_verified] || 'N/A'}"
|
||||
lines << " PGP verified: #{summary['pgp_verified'] || summary[:pgp_verified] || 'N/A'}"
|
||||
lines << " C2PA verified: #{summary['c2pa_verified'] || summary[:c2pa_verified] || 'N/A'}"
|
||||
end
|
||||
|
||||
lines << ''
|
||||
lines
|
||||
end
|
||||
|
||||
def format_consistency(data)
|
||||
lines = []
|
||||
consistency = data['consistency'] || data[:consistency]
|
||||
return lines unless consistency
|
||||
|
||||
lines << '--- Consistency Analysis ---'
|
||||
|
||||
summary = consistency['summary'] || consistency[:summary]
|
||||
if summary
|
||||
total = summary['total_files'] || summary[:total_files]
|
||||
flagged = summary['flagged_files'] || summary[:flagged_files]
|
||||
flags = summary['total_flags'] || summary[:total_flags]
|
||||
lines << " Total files: #{total || 'N/A'}"
|
||||
lines << " Flagged files: #{flagged || 0}"
|
||||
lines << " Total flags: #{flags || 0}"
|
||||
end
|
||||
|
||||
discrepancies = consistency['discrepancies'] || consistency[:discrepancies] ||
|
||||
consistency['devices'] || consistency[:devices]
|
||||
if discrepancies.is_a?(Array) && discrepancies.any?
|
||||
lines << ' Discrepancies:'
|
||||
discrepancies.each do |d|
|
||||
if d.is_a?(Hash)
|
||||
field = d['field'] || d[:field]
|
||||
severity = d['severity'] || d[:severity]
|
||||
message = d['message'] || d[:message]
|
||||
lines << " [#{severity}] #{field}: #{message}"
|
||||
else
|
||||
lines << " #{d}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
lines << ''
|
||||
lines
|
||||
end
|
||||
|
||||
def format_synchrony(data)
|
||||
lines = []
|
||||
synchrony = data['synchrony'] || data[:synchrony]
|
||||
return lines unless synchrony
|
||||
|
||||
lines << '--- Temporal Synchrony ---'
|
||||
|
||||
patterns = synchrony['temporal_patterns'] || synchrony[:temporal_patterns]
|
||||
if patterns.is_a?(Hash)
|
||||
lines << " Mean interval: #{patterns['mean_interval'] || patterns[:mean_interval] || 'N/A'}"
|
||||
lines << " Burst count: #{patterns['burst_count'] || patterns[:burst_count] || 0}"
|
||||
lines << " Gap count: #{patterns['gap_count'] || patterns[:gap_count] || 0}"
|
||||
end
|
||||
|
||||
anomalies = synchrony['anomalies'] || synchrony[:anomalies]
|
||||
if anomalies.is_a?(Array) && anomalies.any?
|
||||
lines << ' Anomalies:'
|
||||
anomalies.each do |a|
|
||||
if a.is_a?(Hash)
|
||||
lines << " Between #{a['prev_file'] || a[:prev_file]} and #{a['next_file'] || a[:next_file]}: " \
|
||||
"interval #{a['interval'] || a[:interval]}s (z-score: #{a['z_score'] || a[:z_score]})"
|
||||
else
|
||||
lines << " #{a}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
lines << ''
|
||||
lines
|
||||
end
|
||||
|
||||
def format_errors(data)
|
||||
lines = []
|
||||
errors = data['errors'] || data[:errors]
|
||||
return lines unless errors.is_a?(Array) && errors.any?
|
||||
|
||||
lines << '--- Errors ---'
|
||||
errors.each do |err|
|
||||
lines << " #{err}"
|
||||
end
|
||||
lines << ''
|
||||
lines
|
||||
end
|
||||
|
||||
def verification_status(value)
|
||||
case value
|
||||
when true then 'Verified'
|
||||
when false then 'Not verified'
|
||||
when nil then 'Not present'
|
||||
else value.to_s
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
11
packages/zammad-addon-media-verify/tsconfig.json
Normal file
11
packages/zammad-addon-media-verify/tsconfig.json
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true
|
||||
},
|
||||
"include": ["scripts"]
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue