Fix sending audio and video files

This commit is contained in:
N-Pex 2025-09-10 18:06:41 +02:00
parent bdd240b4c7
commit dfa354bf74
6 changed files with 66 additions and 65 deletions

View file

@ -393,6 +393,7 @@ export default {
break;
case "image/gif":
extension = ".gif";
break;
}
let fileName = event.getId() + extension;
@ -407,6 +408,14 @@ export default {
}
} else if (mime.startsWith("audio/")) {
var extension = ".webm";
switch (mime) {
case "audio/mpeg":
extension = ".mp3";
break;
case "audio/x-m4a":
extension = ".m4a";
break;
}
let fileName = event.getId() + extension;
audioFolder.file(fileName, blob); // TODO calc bytes
let elements = comp.el.getElementsByTagName("audio");

View file

@ -208,7 +208,7 @@ export default {
if (isForExport) {
return MessageIncomingVideoExport;
}
return MessageVideo;
return MessageThread;
} else if (event.getContent().msgtype == "m.file") {
if (isForExport) {
return MessageIncomingFileExport;
@ -255,7 +255,7 @@ export default {
if (isForExport) {
return MessageOutgoingVideoExport;
}
return MessageVideo;
return MessageThread;
} else if (event.getContent().msgtype == "m.file") {
if (isForExport) {
return MessageOutgoingFileExport;

View file

@ -64,7 +64,7 @@ const poster: Ref<string | undefined> = ref(undefined);
const updateSource = () => {
if (isEventAttachment(props.item)) {
const eventAttachment = props.item;
if (isVideo.value || eventAttachment.src) {
if (eventAttachment.src) {
source.value = eventAttachment.src;
} else if (previewOnly) {
eventAttachment.loadThumbnail().then((url) => {
@ -74,6 +74,10 @@ const updateSource = () => {
eventAttachment.loadSrc().then((url) => {
source.value = url.data;
})
} else if (isVideo.value) {
eventAttachment.loadSrc().then((url) => {
source.value = url.data;
})
}
} else if (isAttachment(props.item)) {
const attachment = props.item;

View file

@ -128,7 +128,7 @@ watch(
event,
() => {
if (event.value) {
if (event.value.getContent().msgtype == "m.image") {
if (["m.image", "m.video"].includes(event.value.getContent().msgtype ?? "")) {
// Single image mode
items.value = [event.value].map((e: MatrixEvent) => {
let ea = $matrix.attachmentManager.getEventAttachment(e);
@ -157,14 +157,14 @@ onBeforeUnmount(() => {
});
const showMessageText = computed((): boolean => {
if (event.value?.getContent().msgtype == "m.image") {
if (["m.image", "m.video"].includes(event.value?.getContent().msgtype ?? "")) {
return false;
}
return true;
});
const showMultiview = computed((): boolean => {
if (event.value?.getContent().msgtype == "m.image") {
if (["m.image", "m.video"].includes(event.value?.getContent().msgtype ?? "")) {
return true;
}
return (

View file

@ -47,21 +47,6 @@ class Util {
return Thread.hasServerSideSupport ? "m.thread" : "io.element.thread";
}
getAttachmentUrlAndDuration(event) {
return new Promise((resolve, reject) => {
const content = event.getContent();
if (content.url != null) {
resolve([content.url, content.info.duration]);
return;
}
if (content.file && content.file.url) {
resolve([content.file.url, content.info.duration]);
} else {
reject("No url found!");
}
});
}
getAttachment(matrixClient, useAuthedMedia, event, progressCallback, asBlob = false, abortController = undefined) {
return new Promise((resolve, reject) => {
const content = event.getContent();
@ -577,7 +562,7 @@ class Util {
// Generate audio waveforms
if (msgtype == "m.audio") {
this.generateWaveform(fileContents, messageContent);
await this.generateWaveform(fileContents, messageContent);
}
const result = await this.sendMessage(matrixClient, roomId, "m.room.message", messageContent);
@ -594,46 +579,56 @@ class Util {
return uploadPromise;
}
generateWaveform(data, messageContent) {
async generateWaveform(data, messageContent) {
if (!(window.AudioContext || window.webkitAudioContext)) {
return; // No support
}
const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
if (audioCtx) {
return audioCtx.decodeAudioData(data).then((audioBuffer) => {
const rawData = audioBuffer.getChannelData(0); // TODO - currently using only 1 channel
const samples = 1000; // Number of samples
const blockSize = Math.floor(rawData.length / samples);
let filteredData = [];
for (let i = 0; i < samples; i++) {
let blockStart = blockSize * i; // the location of the first sample in the block
let sum = 0;
for (let j = 0; j < blockSize; j++) {
sum = sum + Math.abs(rawData[blockStart + j]); // find the sum of all the samples in the block
try {
const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
if (audioCtx) {
const audioBuffer = await audioCtx.decodeAudioData(data);
if (audioBuffer) {
const rawData = audioBuffer.getChannelData(0); // TODO - currently using only 1 channel
const samples = 1000; // Number of samples
const blockSize = Math.floor(rawData.length / samples);
let filteredData = [];
for (let i = 0; i < samples; i++) {
let blockStart = blockSize * i; // the location of the first sample in the block
let sum = 0;
for (let j = 0; j < blockSize; j++) {
sum = sum + Math.abs(rawData[blockStart + j]); // find the sum of all the samples in the block
}
filteredData.push(sum / blockSize); // divide the sum by the block size to get the average
}
// Normalize
const multiplier = Math.pow(Math.max(...filteredData), -1);
filteredData = filteredData.map((n) => n * multiplier);
// Integerize
filteredData = filteredData.map((n) => parseInt((n * 255).toFixed()));
// Generate SVG of waveform
let svg = `<svg viewBox="0 0 ${samples} 255" fill="none" preserveAspectRatio="none" xmlns="http://www.w3.org/2000/svg">`;
svg += `<path d="`;
filteredData.forEach((d, i) => {
const delta = d / 2;
svg += `M${i} ${128 - delta}V${128 + delta}`;
});
svg += `" style="fill:none;stroke:green;stroke-width:1" />`;
svg += "</svg>";
messageContent.format = "org.matrix.custom.html";
messageContent.formatted_body = svg;
// if duration is not set, do that here, since we have it
if (!messageContent.info.duration) {
messageContent.info.duration = parseInt((1000 * audioBuffer.duration).toFixed());
}
filteredData.push(sum / blockSize); // divide the sum by the block size to get the average
}
// Normalize
const multiplier = Math.pow(Math.max(...filteredData), -1);
filteredData = filteredData.map((n) => n * multiplier);
// Integerize
filteredData = filteredData.map((n) => parseInt((n * 255).toFixed()));
// Generate SVG of waveform
let svg = `<svg viewBox="0 0 ${samples} 255" fill="none" preserveAspectRatio="none" xmlns="http://www.w3.org/2000/svg">`;
svg += `<path d="`;
filteredData.forEach((d, i) => {
const delta = d / 2;
svg += `M${i} ${128 - delta}V${128 + delta}`;
});
svg += `" style="fill:none;stroke:green;stroke-width:1" />`;
svg += "</svg>";
messageContent.format = "org.matrix.custom.html";
messageContent.formatted_body = svg;
});
}
} catch (error) {
return;
}
}

View file

@ -46,14 +46,7 @@ export default {
this.infoMap.set(eventId, entry);
// Get duration information
utils
.getAttachmentUrlAndDuration(event)
.then(([ignoredurl, duration]) => {
entry.duration = duration;
})
.catch((err) => {
console.error("Failed to fetch attachment duration: ", err);
});
entry.duration = event.getContent()?.info?.duration ?? 0;
}
entry.listeners.add(uid);
return entry;