Support sending clap events at a specific time in audio mode

This commit is contained in:
N-Pex 2023-06-07 09:39:05 +02:00
parent 5f3f5d0afd
commit d5182395af
9 changed files with 149 additions and 21 deletions

View file

@ -28,6 +28,20 @@
}}</span>
</v-avatar>
</div>
<!-- Current emoji reactions -->
<div class="typing-users">
<transition-group name="list" tag="div">
<v-avatar v-for="reaction in reactions" :key="reaction.member.userId" class="typing-user" size="32" color="grey">
<img v-if="memberAvatar(reaction.member)" :src="memberAvatar(reaction.member)" />
<span v-else class="white--text headline">{{
reaction.member.name.substring(0, 1).toUpperCase()
}}</span>
<div class="reaction-emoji">{{ reaction.emoji }}</div>
</v-avatar>
</transition-group>
</div>
<div v-if="currentAudioEvent" class="senderAndTime">
<div class="sender">{{ eventSenderDisplayName(currentAudioEvent) }}</div>
<div class="time">
@ -54,10 +68,13 @@
</div>
<div class="load-later">
<v-btn :class="{'mic-button': true, 'dimmed': !canRecordAudio}" ref="mic_button" fab small elevation="0" v-blur
@click.stop="micButtonClicked()">
<v-icon color="white">mic</v-icon>
</v-btn>
<div style="align-self: flex-end;">
<v-btn class="clap-button" text elevation="0" v-blur @click.stop="clapButtonClicked()">👏</v-btn>
<v-btn :class="{'mic-button': true, 'dimmed': !canRecordAudio}" ref="mic_button" fab small elevation="0" v-blur
@click.stop="micButtonClicked()">
<v-icon color="white">mic</v-icon>
</v-btn>
</div>
<v-icon class="clickable" @click="loadNext" color="white" size="28">expand_more</v-icon>
</div>
@ -100,18 +117,22 @@ export default {
},
data() {
return {
REACTION_ANIMATION_TIME: 2500,
info: null,
currentAudioEvent: null,
autoPlayNextEvent: false,
analyzer: null,
analyzerDataArray: null,
showReadOnlyToast: false,
reactions: [],
updateReactionsTimer: null,
};
},
mounted() {
this.$root.$on('audio-playback-started', this.audioPlaybackStarted);
this.$root.$on('audio-playback-paused', this.audioPlaybackPaused);
this.$root.$on('audio-playback-ended', this.audioPlaybackEnded);
this.$root.$on('audio-playback-reaction', this.audioPlaybackReaction);
document.body.classList.add("dark");
this.$audioPlayer.setAutoplay(false);
},
@ -119,6 +140,7 @@ export default {
this.$root.$off('audio-playback-started', this.audioPlaybackStarted);
this.$root.$off('audio-playback-paused', this.audioPlaybackPaused);
this.$root.$off('audio-playback-ended', this.audioPlaybackEnded);
this.$root.$off('audio-playback-reaction', this.audioPlaybackReaction);
document.body.classList.remove("dark");
this.$audioPlayer.removeListener(this._uid);
this.currentAudioEvent = null;
@ -130,6 +152,9 @@ export default {
currentTime() {
return util.formatDuration(this.info ? this.info.currentTime : 0);
},
currentTimeMs() {
return this.info ? this.info.currentTime : 0;
},
totalTime() {
return util.formatDuration(this.info ? this.info.duration : 0);
},
@ -174,6 +199,8 @@ export default {
return;
}
this.clearReactions();
this.info = this.$audioPlayer.addListener(this._uid, value);
const autoPlayWasSet = this.autoPlayNextEvent;
@ -187,7 +214,7 @@ export default {
}
}
this.$audioPlayer.load(value);
this.$audioPlayer.load(value, this.timelineSet);
}
},
},
@ -195,7 +222,7 @@ export default {
play() {
if (this.currentAudioEvent) {
this.$audioPlayer.setAutoplay(false);
this.$audioPlayer.play(this.currentAudioEvent);
this.$audioPlayer.play(this.currentAudioEvent, this.timelineSet);
}
},
pause() {
@ -241,6 +268,20 @@ export default {
this.clearVisualization();
this.loadNext(true && this.autoplay);
},
audioPlaybackReaction(reaction) {
// Play sound!
const audio = new Audio(require("@/assets/sounds/clapping.mp3"));
audio.volume = 0.6;
audio.play();
const member = this.room.getMember(reaction.sender);
if (member) {
this.reactions.push(Object.assign({ addedAt: Date.now(), member: member}, reaction));
if (!this.updateReactionsTimer) {
this.updateReactionsTimer = setInterval(this.updateReactions, 300);
}
}
},
loadPrevious() {
const audioMessages = this.events.filter((e) => e.getContent().msgtype === "m.audio");
for (let i = 0; i < audioMessages.length; i++) {
@ -328,7 +369,6 @@ export default {
volume.style.height = "" + w + "px";
const color = 80 + (value * (256 - 80)) / 256;
volume.style.backgroundColor = `rgb(${color},${color},${color})`;
if (this.info && this.info.playing) {
requestAnimationFrame(this.updateVisualization);
} else {
@ -342,6 +382,24 @@ export default {
volume.style.height = "0px";
volume.style.backgroundColor = "transparent";
},
updateReactions() {
const now = Date.now();
this.reactions = this.reactions.filter(r => {
return (r.addedAt + this.REACTION_ANIMATION_TIME > now);
});
if (this.reactions.length == 0) {
this.clearReactions();
}
},
clearReactions() {
if (this.updateReactionsTimer) {
clearInterval(this.updateReactionsTimer);
this.updateReactionsTimer = null;
}
this.reactions = [];
},
memberAvatar(member) {
if (member) {
return member.getAvatarUrl(
@ -364,6 +422,12 @@ export default {
} else {
this.$emit('start-recording');
}
},
clapButtonClicked() {
if (this.currentAudioEvent) {
this.$emit("sendclap", { event: this.currentAudioEvent, timeOffset: this.currentTimeMs })
}
}
}
};

View file

@ -10,6 +10,7 @@
v-on:loadnext="handleScrolledToBottom(false)"
v-on:loadprevious="handleScrolledToTop()"
v-on:mark-read="sendRR"
v-on:sendclap="sendClapReactionAtTime"
/>
<VoiceRecorder class="audio-layout" v-if="useVoiceMode" :micButtonRef="$refs.mic_button" :ptt="showRecorderPTT" :show="showRecorder"
v-on:close="showRecorder = false" v-on:file="onVoiceRecording" :sendTypingIndicators="useVoiceMode" />
@ -1301,6 +1302,17 @@ export default {
this.$refs.messageOperationsSheet.close();
},
sendClapReactionAtTime(e) {
util
.sendQuickReaction(this.$matrix.matrixClient, this.roomId, "👏", e.event, { timeOffset: e.timeOffset.toFixed(0)})
.then(() => {
console.log("Send clap reaction at time", e.timeOffset);
})
.catch((err) => {
console.log("Failed to send clap reaction:", err);
});
},
sendQuickReaction(e) {
let previousReaction = null;
@ -1577,7 +1589,7 @@ export default {
const nextEvent = filteredEvents[index + 1];
if (nextEvent.getContent().msgtype === "m.audio") {
// Yes, audio event!
this.$audioPlayer.play(nextEvent);
this.$audioPlayer.play(nextEvent, this.timelineSet);
}
}
}

View file

@ -22,6 +22,12 @@ export default {
return null;
},
},
timelineSet: {
type: Object,
default: function () {
return null;
},
},
},
data() {
return {
@ -44,7 +50,7 @@ export default {
return this.$audioPlayer.addListener(this._uid, this.event);
},
play() {
this.$audioPlayer.play(this.event);
this.$audioPlayer.play(this.event, this.timelineSet);
},
pause() {
this.$audioPlayer.pause(this.event);

View file

@ -1,7 +1,7 @@
<template>
<message-incoming v-bind="{...$props, ...$attrs}" v-on="$listeners">
<div class="bubble audio-bubble">
<audio-player :event="event">{{ $t('fallbacks.audio_file')}}</audio-player>
<audio-player :event="event" :timelineSet="timelineSet">{{ $t('fallbacks.audio_file')}}</audio-player>
</div>
</message-incoming>
</template>

View file

@ -1,7 +1,7 @@
<template>
<message-outgoing v-bind="{ ...$props, ...$attrs }" v-on="$listeners">
<div class="audio-bubble">
<audio-player :event="event">{{ $t('fallbacks.audio_file')}}</audio-player>
<audio-player :event="event" :timelineSet="timelineSet">{{ $t('fallbacks.audio_file')}}</audio-player>
</div>
</message-outgoing>
</template>