keanu-weblite/src/components/AudioLayout.vue
2023-03-16 15:23:26 +01:00

466 lines
No EOL
14 KiB
Vue

<template>
<div v-bind="{ ...$props, ...$attrs }" v-on="$listeners" class="messageIn">
<div class="load-earlier clickable" @click="loadPrevious">
<v-icon color="white" size="28">expand_less</v-icon>
</div>
<!-- Currently recording users -->
<div class="typing-users">
<transition-group name="list" tag="div">
<v-avatar v-for="(member) in recordingMembersExceptMe" :key="member.userId" class="typing-user" size="32" color="grey">
<img v-if="memberAvatar(member)" :src="memberAvatar(member)" />
<span v-else class="white--text headline">{{
member.name.substring(0, 1).toUpperCase()
}}</span>
</v-avatar>
</transition-group>
</div>
<div class="sound-wave-view">
<div class="volume-container">
<div ref="volume"></div>
</div>
<v-avatar v-if="currentAudioEvent" class="avatar" ref="avatar" size="32" color="#ededed"
@click.stop="otherAvatarClicked($refs.avatar.$el)">
<img v-if="messageEventAvatar(currentAudioEvent)" :src="messageEventAvatar(currentAudioEvent)" />
<span v-else class="white--text headline">{{
eventSenderDisplayName(currentAudioEvent).substring(0, 1).toUpperCase()
}}</span>
</v-avatar>
</div>
<div v-if="currentAudioEvent" class="senderAndTime">
<div class="sender">{{ eventSenderDisplayName(currentAudioEvent) }}</div>
<div class="time">
{{ formatTime(currentAudioEvent.event.origin_server_ts) }}
</div>
</div>
<div class="play-time">
{{ currentTime }} / {{ totalTime }}
</div>
<audio ref="player" :src="src" @durationchange="updateDuration">
{{ $t('fallbacks.audio_file') }}
</audio>
<div v-if="currentAudioEvent" class="auto-audio-player">
<v-btn id="btn-rewind" @click.stop="rewind" icon>
<v-icon size="28">$vuetify.icons.rewind</v-icon>
</v-btn>
<v-btn v-if="playing" id="btn-pause" @click.stop="pause" icon>
<v-icon size="56">$vuetify.icons.pause_circle</v-icon>
</v-btn>
<v-btn v-else id="btn-play" @click.stop="play" icon>
<v-icon size="56">$vuetify.icons.play_circle</v-icon>
</v-btn>
<v-btn id="btn-forward" @click.stop="forward" icon>
<v-icon size="28">$vuetify.icons.forward</v-icon>
</v-btn>
</div>
<div class="load-later">
<v-btn :class="{'mic-button': true, 'dimmed': !canRecordAudio}" ref="mic_button" fab small elevation="0" v-blur
@click.stop="micButtonClicked()">
<v-icon color="white">mic</v-icon>
</v-btn>
<v-icon class="clickable" @click="loadNext" color="white" size="28">expand_more</v-icon>
</div>
<div v-if="showReadOnlyToast" class="toast-read-only">{{ $t("message.not_allowed_to_send") }}</div>
</div>
</template>
<script>
import messageMixin from "./messages/messageMixin";
import util from "../plugins/utils";
export default {
mixins: [messageMixin],
components: {},
props: {
autoplay: {
type: Boolean,
default: function () {
return true
}
},
events: {
type: Array,
default: function () {
return []
}
},
readMarker: {
type: String,
default: function () {
return null;
}
},
recordingMembers: {
type: Array,
default: function () {
return []
}
},
},
data() {
return {
src: null,
currentAudioEvent: null,
autoPlayNextEvent: false,
currentAudioSource: null,
player: null,
duration: 0,
playPercent: 0,
playTime: 0,
playing: false,
analyzer: null,
analyzerDataArray: null,
showReadOnlyToast: false,
};
},
mounted() {
document.body.classList.add("dark");
this.$root.$on('playback-start', this.onPlaybackStart);
this.player = this.$refs.player;
this.player.autoplay = false;
this.player.addEventListener("timeupdate", this.updateProgressBar);
this.player.addEventListener("play", () => {
if (!this.analyser) {
const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
let audioSource = null;
if (audioCtx) {
audioSource = audioCtx.createMediaElementSource(this.player);
this.analyser = audioCtx.createAnalyser();
audioSource.connect(this.analyser);
this.analyser.connect(audioCtx.destination);
this.analyser.fftSize = 128;
const bufferLength = this.analyser.frequencyBinCount;
this.analyzerDataArray = new Uint8Array(bufferLength);
}
}
this.playing = true;
this.updateVisualization();
if (this.currentAudioEvent) {
this.$emit("mark-read", this.currentAudioEvent.getId(), this.currentAudioEvent.getId());
}
});
this.player.addEventListener("pause", () => {
this.playing = false;
this.clearVisualization();
});
this.player.addEventListener("ended", () => {
this.pause();
this.playing = false;
this.clearVisualization();
this.onPlaybackEnd();
});
},
beforeDestroy() {
document.body.classList.remove("dark");
this.currentAudioEvent = null;
this.loadAudioAttachmentSource(); // Release
this.$root.$off('playback-start', this.onPlaybackStart);
},
computed: {
canRecordAudio() {
return !this.$matrix.currentRoomIsReadOnlyForUser && util.browserCanRecordAudio();
},
currentTime() {
return util.formatDuration(this.playTime);
},
totalTime() {
return util.formatDuration(this.duration);
},
playheadPercent: {
get: function () {
return this.playPercent;
},
set: function (percent) {
if (this.player.src) {
this.playPercent = percent;
this.player.currentTime = (percent / 100) * this.player.duration;
}
},
},
recordingMembersExceptMe() {
return this.recordingMembers.filter((member) => {
return member.userId !== this.$matrix.currentUserId;
});
},
},
watch: {
autoplay: {
immediate: true,
handler(autoplay, ignoredOldValue) {
if (!autoplay) {
this.pause();
}
}
},
events: {
immediate: true,
handler(events, ignoredOldValue) {
console.log("Events changed", this.currentAudioEvent, this.autoPlayNextEvent);
if (!this.currentAudioEvent || this.autoPlayNextEvent) {
// Make sure all events are decrypted!
const eventsBeingDecrypted = events.filter((e) => e.isBeingDecrypted());
if (eventsBeingDecrypted.length > 0) {
console.log("All not decrypted, wait");
Promise.allSettled(eventsBeingDecrypted.map((e) => e.getDecryptionPromise())).then(() => {
console.log("DONE DECRYPTING!")
this.loadNext(this.autoPlayNextEvent && this.autoplay);
});
} else {
console.log("All decrypted, load next");
this.loadNext(this.autoPlayNextEvent && this.autoplay);
}
}
}
},
currentAudioEvent: {
immediate: true,
handler(value, oldValue) {
console.log("Current audio derom", value, oldValue);
if (value && oldValue && value.getId && oldValue.getId && value.getId() === oldValue.getId()) {
console.log("Ignoring change!!!");
return;
}
if (!value || !value.getId) {
return;
}
this.src = null;
const autoPlayWasSet = this.autoPlayNextEvent;
this.autoPlayNextEvent = false;
if (value.getSender() == this.$matrix.currentUserId) {
// Sent by us. Don't autoplay if we just sent this (i.e. it is ahead of our read marker)
if (this.room && !this.room.getReceiptsForEvent(value).includes(value.getSender())) {
this.player.autoplay = false;
this.autoPlayNextEvent = autoPlayWasSet;
}
}
this.loadAudioAttachmentSource();
}
},
src: {
immediate: true,
handler(value, ignoredOldValue) {
console.log("Source changed to", value, ignoredOldValue);
}
}
},
methods: {
play() {
if (this.player.src) {
this.$root.$emit("playback-start", this);
if (this.player.paused) {
this.player.play();
} else if (this.player.ended) {
// restart
this.player.currentTime = 0;
this.player.play();
}
}
},
pause() {
this.player.autoplay = false;
if (this.player.src) {
this.player.pause();
}
},
rewind() {
if (this.player.src) {
this.player.currentTime = Math.max(0, this.player.currentTime - 15);
}
},
forward() {
if (this.player.src) {
this.player.currentTime = Math.min(this.player.duration, this.player.currentTime + 15);
}
},
updateProgressBar() {
if (this.player.duration > 0) {
this.playPercent = Math.floor(
(100 / this.player.duration) * this.player.currentTime
);
} else {
this.playPercent = 0;
}
this.playTime = 1000 * this.player.currentTime;
},
updateDuration() {
this.duration = 1000 * this.player.duration;
},
onPlaybackStart(item) {
this.player.autoplay = false;
if (item != this && this.playing) {
this.pause();
}
},
onPlaybackEnd() {
this.loadNext(true && this.autoplay);
},
loadPrevious() {
const audioMessages = this.events.filter((e) => e.getContent().msgtype === "m.audio");
for (let i = 0; i < audioMessages.length; i++) {
const e = audioMessages[i];
if (this.currentAudioEvent && e.getId() === this.currentAudioEvent.getId()) {
if (i > 0) {
this.pause();
this.currentAudioEvent = audioMessages[i - 1];
return;
}
break;
}
}
this.$emit("loadprevious");
},
loadNext(autoplay = false) {
const audioMessages = this.events.filter((e) => e.getContent().msgtype === "m.audio");
if (audioMessages.length == 0) {
// Try to load earlier
this.$emit("loadprevious");
return;
}
if (!this.currentAudioEvent) {
// Figure out which audio event to start with, i.e. our "read marker"
for (let i = 0; i < audioMessages.length; i++) {
const e = audioMessages[i];
if (e.getId() === this.readMarker) {
if (i < (audioMessages.length - 1)) {
this.pause();
this.player.autoplay = autoplay;
this.currentAudioEvent = audioMessages[i + 1];
} else {
this.autoPlayNextEvent = true;
this.player.autoplay = autoplay;
this.currentAudioEvent = e;
this.$emit("loadnext");
}
return;
}
}
// No read marker found. Just use the first event here...
if (audioMessages.length > 0) {
this.pause();
this.player.autoplay = autoplay;
this.currentAudioEvent = audioMessages[0];
}
return;
}
for (let i = 0; i < audioMessages.length; i++) {
const e = audioMessages[i];
if (e.getId() === this.currentAudioEvent.getId()) {
if (i < (audioMessages.length - 1)) {
this.pause();
this.player.autoplay = autoplay;
this.currentAudioEvent = audioMessages[i + 1];
} else {
this.autoPlayNextEvent = true;
this.player.autoplay = autoplay;
this.$emit("loadnext");
}
break;
}
}
},
updateVisualization() {
const volume = this.$refs.volume;
if (volume && this.analyser) {
const volumeContainer = volume.parentElement;
const bufferLength = this.analyser.frequencyBinCount;
this.analyser.getByteFrequencyData(this.analyzerDataArray);
var value = 0;
for (let i = 0; i < bufferLength; i++) {
value += this.analyzerDataArray[i];
}
value = value / bufferLength;
const avatarWidth = 1.1 * this.$refs.avatar ? this.$refs.avatar.clientWidth : 104;
const range = Math.max(0, (volumeContainer.clientWidth - avatarWidth));
const w = avatarWidth + (value * range) / 256;
volume.style.width = "" + w + "px";
volume.style.height = "" + w + "px";
const color = 80 + (value * (256 - 80)) / 256;
volume.style.backgroundColor = `rgb(${color},${color},${color})`;
if (this.playing) {
requestAnimationFrame(this.updateVisualization);
} else {
this.clearVisualization();
}
}
},
clearVisualization() {
const volume = this.$refs.volume;
volume.style.width = "0px";
volume.style.height = "0px";
volume.style.backgroundColor = "transparent";
},
loadAudioAttachmentSource() {
console.log("loadAUto");
if (this.src) {
const objectUrl = this.src;
this.src = null;
URL.revokeObjectURL(objectUrl);
}
if (this.currentAudioEvent) {
console.log("Will load");
if (this.currentAudioSource) {
this.currentAudioSource.reject("Aborted");
}
this.currentAudioSource =
util
.getAttachment(this.$matrix.matrixClient, this.currentAudioEvent, (progress) => {
this.downloadProgress = progress;
})
.then((url) => {
console.log("Loaded", url);
this.src = url;
this.currentAudioSource = null;
this.$nextTick(() => {
this.player.load();
});
})
.catch((err) => {
console.log("Failed to fetch attachment: ", err);
});
}
},
memberAvatar(member) {
if (member) {
return member.getAvatarUrl(
this.$matrix.matrixClient.getHomeserverUrl(),
40,
40,
"scale",
true
);
}
return null;
},
micButtonClicked() {
if (this.$matrix.currentRoomIsReadOnlyForUser) {
this.showReadOnlyToast = true;
setTimeout(() => {
this.showReadOnlyToast = false;
}, 3000);
} else {
this.$emit('start-recording');
}
}
}
};
</script>
<style lang="scss">
@import "@/assets/css/chat.scss";
</style>