Support sending clap events at a specific time in audio mode

This commit is contained in:
N-Pex 2023-06-07 09:39:05 +02:00
parent 5f3f5d0afd
commit d5182395af
9 changed files with 149 additions and 21 deletions

View file

@ -1431,6 +1431,14 @@ body {
width: 32px !important;
height: 32px !important;
margin-left: -8px !important;
position: relative;
overflow: visible;
}
.reaction-emoji {
position: absolute;
top: -6px;
right: -6px;
font-size: 17px;
}
.list-enter-active,
.list-leave-active {
@ -1450,6 +1458,9 @@ body {
justify-content: flex-end;
width: 100%;
}
.clap-button {
font-size: 24px;
}
.mic-button {
align-self: flex-end;
}

Binary file not shown.

View file

@ -28,6 +28,20 @@
}}</span>
</v-avatar>
</div>
<!-- Current emoji reactions -->
<div class="typing-users">
<transition-group name="list" tag="div">
<v-avatar v-for="reaction in reactions" :key="reaction.member.userId" class="typing-user" size="32" color="grey">
<img v-if="memberAvatar(reaction.member)" :src="memberAvatar(reaction.member)" />
<span v-else class="white--text headline">{{
reaction.member.name.substring(0, 1).toUpperCase()
}}</span>
<div class="reaction-emoji">{{ reaction.emoji }}</div>
</v-avatar>
</transition-group>
</div>
<div v-if="currentAudioEvent" class="senderAndTime">
<div class="sender">{{ eventSenderDisplayName(currentAudioEvent) }}</div>
<div class="time">
@ -54,10 +68,13 @@
</div>
<div class="load-later">
<v-btn :class="{'mic-button': true, 'dimmed': !canRecordAudio}" ref="mic_button" fab small elevation="0" v-blur
@click.stop="micButtonClicked()">
<v-icon color="white">mic</v-icon>
</v-btn>
<div style="align-self: flex-end;">
<v-btn class="clap-button" text elevation="0" v-blur @click.stop="clapButtonClicked()">👏</v-btn>
<v-btn :class="{'mic-button': true, 'dimmed': !canRecordAudio}" ref="mic_button" fab small elevation="0" v-blur
@click.stop="micButtonClicked()">
<v-icon color="white">mic</v-icon>
</v-btn>
</div>
<v-icon class="clickable" @click="loadNext" color="white" size="28">expand_more</v-icon>
</div>
@ -100,18 +117,22 @@ export default {
},
data() {
return {
REACTION_ANIMATION_TIME: 2500,
info: null,
currentAudioEvent: null,
autoPlayNextEvent: false,
analyzer: null,
analyzerDataArray: null,
showReadOnlyToast: false,
reactions: [],
updateReactionsTimer: null,
};
},
mounted() {
this.$root.$on('audio-playback-started', this.audioPlaybackStarted);
this.$root.$on('audio-playback-paused', this.audioPlaybackPaused);
this.$root.$on('audio-playback-ended', this.audioPlaybackEnded);
this.$root.$on('audio-playback-reaction', this.audioPlaybackReaction);
document.body.classList.add("dark");
this.$audioPlayer.setAutoplay(false);
},
@ -119,6 +140,7 @@ export default {
this.$root.$off('audio-playback-started', this.audioPlaybackStarted);
this.$root.$off('audio-playback-paused', this.audioPlaybackPaused);
this.$root.$off('audio-playback-ended', this.audioPlaybackEnded);
this.$root.$off('audio-playback-reaction', this.audioPlaybackReaction);
document.body.classList.remove("dark");
this.$audioPlayer.removeListener(this._uid);
this.currentAudioEvent = null;
@ -130,6 +152,9 @@ export default {
currentTime() {
return util.formatDuration(this.info ? this.info.currentTime : 0);
},
currentTimeMs() {
return this.info ? this.info.currentTime : 0;
},
totalTime() {
return util.formatDuration(this.info ? this.info.duration : 0);
},
@ -174,6 +199,8 @@ export default {
return;
}
this.clearReactions();
this.info = this.$audioPlayer.addListener(this._uid, value);
const autoPlayWasSet = this.autoPlayNextEvent;
@ -187,7 +214,7 @@ export default {
}
}
this.$audioPlayer.load(value);
this.$audioPlayer.load(value, this.timelineSet);
}
},
},
@ -195,7 +222,7 @@ export default {
play() {
if (this.currentAudioEvent) {
this.$audioPlayer.setAutoplay(false);
this.$audioPlayer.play(this.currentAudioEvent);
this.$audioPlayer.play(this.currentAudioEvent, this.timelineSet);
}
},
pause() {
@ -241,6 +268,20 @@ export default {
this.clearVisualization();
this.loadNext(true && this.autoplay);
},
audioPlaybackReaction(reaction) {
// Play sound!
const audio = new Audio(require("@/assets/sounds/clapping.mp3"));
audio.volume = 0.6;
audio.play();
const member = this.room.getMember(reaction.sender);
if (member) {
this.reactions.push(Object.assign({ addedAt: Date.now(), member: member}, reaction));
if (!this.updateReactionsTimer) {
this.updateReactionsTimer = setInterval(this.updateReactions, 300);
}
}
},
loadPrevious() {
const audioMessages = this.events.filter((e) => e.getContent().msgtype === "m.audio");
for (let i = 0; i < audioMessages.length; i++) {
@ -328,7 +369,6 @@ export default {
volume.style.height = "" + w + "px";
const color = 80 + (value * (256 - 80)) / 256;
volume.style.backgroundColor = `rgb(${color},${color},${color})`;
if (this.info && this.info.playing) {
requestAnimationFrame(this.updateVisualization);
} else {
@ -342,6 +382,24 @@ export default {
volume.style.height = "0px";
volume.style.backgroundColor = "transparent";
},
updateReactions() {
const now = Date.now();
this.reactions = this.reactions.filter(r => {
return (r.addedAt + this.REACTION_ANIMATION_TIME > now);
});
if (this.reactions.length == 0) {
this.clearReactions();
}
},
clearReactions() {
if (this.updateReactionsTimer) {
clearInterval(this.updateReactionsTimer);
this.updateReactionsTimer = null;
}
this.reactions = [];
},
memberAvatar(member) {
if (member) {
return member.getAvatarUrl(
@ -364,6 +422,12 @@ export default {
} else {
this.$emit('start-recording');
}
},
clapButtonClicked() {
if (this.currentAudioEvent) {
this.$emit("sendclap", { event: this.currentAudioEvent, timeOffset: this.currentTimeMs })
}
}
}
};

View file

@ -10,6 +10,7 @@
v-on:loadnext="handleScrolledToBottom(false)"
v-on:loadprevious="handleScrolledToTop()"
v-on:mark-read="sendRR"
v-on:sendclap="sendClapReactionAtTime"
/>
<VoiceRecorder class="audio-layout" v-if="useVoiceMode" :micButtonRef="$refs.mic_button" :ptt="showRecorderPTT" :show="showRecorder"
v-on:close="showRecorder = false" v-on:file="onVoiceRecording" :sendTypingIndicators="useVoiceMode" />
@ -1301,6 +1302,17 @@ export default {
this.$refs.messageOperationsSheet.close();
},
sendClapReactionAtTime(e) {
util
.sendQuickReaction(this.$matrix.matrixClient, this.roomId, "👏", e.event, { timeOffset: e.timeOffset.toFixed(0)})
.then(() => {
console.log("Send clap reaction at time", e.timeOffset);
})
.catch((err) => {
console.log("Failed to send clap reaction:", err);
});
},
sendQuickReaction(e) {
let previousReaction = null;
@ -1577,7 +1589,7 @@ export default {
const nextEvent = filteredEvents[index + 1];
if (nextEvent.getContent().msgtype === "m.audio") {
// Yes, audio event!
this.$audioPlayer.play(nextEvent);
this.$audioPlayer.play(nextEvent, this.timelineSet);
}
}
}

View file

@ -22,6 +22,12 @@ export default {
return null;
},
},
timelineSet: {
type: Object,
default: function () {
return null;
},
},
},
data() {
return {
@ -44,7 +50,7 @@ export default {
return this.$audioPlayer.addListener(this._uid, this.event);
},
play() {
this.$audioPlayer.play(this.event);
this.$audioPlayer.play(this.event, this.timelineSet);
},
pause() {
this.$audioPlayer.pause(this.event);

View file

@ -1,7 +1,7 @@
<template>
<message-incoming v-bind="{...$props, ...$attrs}" v-on="$listeners">
<div class="bubble audio-bubble">
<audio-player :event="event">{{ $t('fallbacks.audio_file')}}</audio-player>
<audio-player :event="event" :timelineSet="timelineSet">{{ $t('fallbacks.audio_file')}}</audio-player>
</div>
</message-incoming>
</template>

View file

@ -1,7 +1,7 @@
<template>
<message-outgoing v-bind="{ ...$props, ...$attrs }" v-on="$listeners">
<div class="audio-bubble">
<audio-player :event="event">{{ $t('fallbacks.audio_file')}}</audio-player>
<audio-player :event="event" :timelineSet="timelineSet">{{ $t('fallbacks.audio_file')}}</audio-player>
</div>
</message-outgoing>
</template>

View file

@ -222,13 +222,13 @@ class Util {
return this.sendMessage(matrixClient, roomId, "m.room.message", content);
}
sendQuickReaction(matrixClient, roomId, emoji, event) {
sendQuickReaction(matrixClient, roomId, emoji, event, extraData = {}) {
const content = {
'm.relates_to': {
'm.relates_to': Object.assign(extraData, {
key: emoji,
rel_type: 'm.annotation',
event_id: event.getId()
}
})
};
return this.sendMessage(matrixClient, roomId, "m.reaction", content);
}

View file

@ -14,6 +14,7 @@ export default {
constructor() {
this.player = new Audio();
this.currentEvent = null;
this.currentClapReactions = [];
this.infoMap = new Map();
this.player.addEventListener("durationchange", this.onDurationChange.bind(this));
this.player.addEventListener("timeupdate", this.onTimeUpdate.bind(this));
@ -70,15 +71,15 @@ export default {
);
}
play(event) {
this.play_(event, false);
play(event, timelineSet) {
this.play_(event, timelineSet, false);
}
load(event) {
this.play_(event, true);
load(event, timelineSet) {
this.play_(event, timelineSet, true);
}
play_(event, onlyLoad) {
play_(event, timelineSet, onlyLoad) {
const eventId = event.getId();
if (this.currentEvent != eventId) {
// Media change, pause the one currently playing.
@ -91,6 +92,10 @@ export default {
this.currentEvent = eventId;
const info = this.infoMap.get(eventId);
if (info) {
// Get all clap reactions
this.initializeClapEvents(event, timelineSet);
if (info.url) {
// Restart from beginning?
if (info.currentTime == info.duration) {
@ -121,9 +126,9 @@ export default {
// Still on this item? Call ourselves recursively.
if (this.currentEvent == eventId) {
if (onlyLoad) {
this.load(event);
this.load(event, timelineSet);
} else {
this.play(event);
this.play(event, timelineSet);
}
}
})
@ -204,8 +209,10 @@ export default {
onTimeUpdate() {
var entry = this.infoMap.get(this.currentEvent);
if (entry) {
const oldTime = entry.currentTime;
entry.currentTime = 1000 * this.player.currentTime;
this.updatePlayPercent(entry);
this.maybePlayClapEvent(oldTime, entry.currentTime);
}
}
onDurationChange() {
@ -226,6 +233,34 @@ export default {
entry.playPercent = 0;
}
}
initializeClapEvents(event, timelineSet) {
if (event) {
const reactions = timelineSet.relations.getChildEventsForEvent(event.getId(), 'm.annotation', 'm.reaction');
if (reactions) {
this.currentClapReactions = reactions.getRelations()
.filter(r => r.getRelation().key == "👏" && r.getRelation().timeOffset && parseInt(r.getRelation().timeOffset) > 0)
.map(r => {
return {
sender: r.getSender(),
emoji: r.getRelation().key,
timeOffset: parseInt(r.getRelation().timeOffset)
}
})
.sort((a,b) => a.timeOffset - b.timeOffset);
}
} else {
this.currentClapReactions = [];
}
}
maybePlayClapEvent(previousTimeMs, timeNowMs) {
(this.currentClapReactions || []).forEach(reaction => {
if (previousTimeMs < reaction.timeOffset && timeNowMs >= reaction.timeOffset) {
this.$root.$emit("audio-playback-reaction", reaction);
}
});
}
}
Vue.prototype.$audioPlayer = new SharedAudioPlayer();