Merge branch '472-audio-mode-add-clapping-interaction' into 'dev'

Support sending clap events at a specific time in audio mode

See merge request keanuapp/keanuapp-weblite!196
This commit is contained in:
N Pex 2023-06-07 07:42:35 +00:00
commit 1f00c6f311
9 changed files with 149 additions and 21 deletions

View file

@ -1431,6 +1431,14 @@ body {
width: 32px !important; width: 32px !important;
height: 32px !important; height: 32px !important;
margin-left: -8px !important; margin-left: -8px !important;
position: relative;
overflow: visible;
}
.reaction-emoji {
position: absolute;
top: -6px;
right: -6px;
font-size: 17px;
} }
.list-enter-active, .list-enter-active,
.list-leave-active { .list-leave-active {
@ -1450,6 +1458,9 @@ body {
justify-content: flex-end; justify-content: flex-end;
width: 100%; width: 100%;
} }
.clap-button {
font-size: 24px;
}
.mic-button { .mic-button {
align-self: flex-end; align-self: flex-end;
} }

Binary file not shown.

View file

@ -28,6 +28,20 @@
}}</span> }}</span>
</v-avatar> </v-avatar>
</div> </div>
<!-- Current emoji reactions -->
<div class="typing-users">
<transition-group name="list" tag="div">
<v-avatar v-for="reaction in reactions" :key="reaction.member.userId" class="typing-user" size="32" color="grey">
<img v-if="memberAvatar(reaction.member)" :src="memberAvatar(reaction.member)" />
<span v-else class="white--text headline">{{
reaction.member.name.substring(0, 1).toUpperCase()
}}</span>
<div class="reaction-emoji">{{ reaction.emoji }}</div>
</v-avatar>
</transition-group>
</div>
<div v-if="currentAudioEvent" class="senderAndTime"> <div v-if="currentAudioEvent" class="senderAndTime">
<div class="sender">{{ eventSenderDisplayName(currentAudioEvent) }}</div> <div class="sender">{{ eventSenderDisplayName(currentAudioEvent) }}</div>
<div class="time"> <div class="time">
@ -54,10 +68,13 @@
</div> </div>
<div class="load-later"> <div class="load-later">
<v-btn :class="{'mic-button': true, 'dimmed': !canRecordAudio}" ref="mic_button" fab small elevation="0" v-blur <div style="align-self: flex-end;">
@click.stop="micButtonClicked()"> <v-btn class="clap-button" text elevation="0" v-blur @click.stop="clapButtonClicked()">👏</v-btn>
<v-icon color="white">mic</v-icon> <v-btn :class="{'mic-button': true, 'dimmed': !canRecordAudio}" ref="mic_button" fab small elevation="0" v-blur
</v-btn> @click.stop="micButtonClicked()">
<v-icon color="white">mic</v-icon>
</v-btn>
</div>
<v-icon class="clickable" @click="loadNext" color="white" size="28">expand_more</v-icon> <v-icon class="clickable" @click="loadNext" color="white" size="28">expand_more</v-icon>
</div> </div>
@ -100,18 +117,22 @@ export default {
}, },
data() { data() {
return { return {
REACTION_ANIMATION_TIME: 2500,
info: null, info: null,
currentAudioEvent: null, currentAudioEvent: null,
autoPlayNextEvent: false, autoPlayNextEvent: false,
analyzer: null, analyzer: null,
analyzerDataArray: null, analyzerDataArray: null,
showReadOnlyToast: false, showReadOnlyToast: false,
reactions: [],
updateReactionsTimer: null,
}; };
}, },
mounted() { mounted() {
this.$root.$on('audio-playback-started', this.audioPlaybackStarted); this.$root.$on('audio-playback-started', this.audioPlaybackStarted);
this.$root.$on('audio-playback-paused', this.audioPlaybackPaused); this.$root.$on('audio-playback-paused', this.audioPlaybackPaused);
this.$root.$on('audio-playback-ended', this.audioPlaybackEnded); this.$root.$on('audio-playback-ended', this.audioPlaybackEnded);
this.$root.$on('audio-playback-reaction', this.audioPlaybackReaction);
document.body.classList.add("dark"); document.body.classList.add("dark");
this.$audioPlayer.setAutoplay(false); this.$audioPlayer.setAutoplay(false);
}, },
@ -119,6 +140,7 @@ export default {
this.$root.$off('audio-playback-started', this.audioPlaybackStarted); this.$root.$off('audio-playback-started', this.audioPlaybackStarted);
this.$root.$off('audio-playback-paused', this.audioPlaybackPaused); this.$root.$off('audio-playback-paused', this.audioPlaybackPaused);
this.$root.$off('audio-playback-ended', this.audioPlaybackEnded); this.$root.$off('audio-playback-ended', this.audioPlaybackEnded);
this.$root.$off('audio-playback-reaction', this.audioPlaybackReaction);
document.body.classList.remove("dark"); document.body.classList.remove("dark");
this.$audioPlayer.removeListener(this._uid); this.$audioPlayer.removeListener(this._uid);
this.currentAudioEvent = null; this.currentAudioEvent = null;
@ -130,6 +152,9 @@ export default {
currentTime() { currentTime() {
return util.formatDuration(this.info ? this.info.currentTime : 0); return util.formatDuration(this.info ? this.info.currentTime : 0);
}, },
currentTimeMs() {
return this.info ? this.info.currentTime : 0;
},
totalTime() { totalTime() {
return util.formatDuration(this.info ? this.info.duration : 0); return util.formatDuration(this.info ? this.info.duration : 0);
}, },
@ -174,6 +199,8 @@ export default {
return; return;
} }
this.clearReactions();
this.info = this.$audioPlayer.addListener(this._uid, value); this.info = this.$audioPlayer.addListener(this._uid, value);
const autoPlayWasSet = this.autoPlayNextEvent; const autoPlayWasSet = this.autoPlayNextEvent;
@ -187,7 +214,7 @@ export default {
} }
} }
this.$audioPlayer.load(value); this.$audioPlayer.load(value, this.timelineSet);
} }
}, },
}, },
@ -195,7 +222,7 @@ export default {
play() { play() {
if (this.currentAudioEvent) { if (this.currentAudioEvent) {
this.$audioPlayer.setAutoplay(false); this.$audioPlayer.setAutoplay(false);
this.$audioPlayer.play(this.currentAudioEvent); this.$audioPlayer.play(this.currentAudioEvent, this.timelineSet);
} }
}, },
pause() { pause() {
@ -241,6 +268,20 @@ export default {
this.clearVisualization(); this.clearVisualization();
this.loadNext(true && this.autoplay); this.loadNext(true && this.autoplay);
}, },
audioPlaybackReaction(reaction) {
// Play sound!
const audio = new Audio(require("@/assets/sounds/clapping.mp3"));
audio.volume = 0.6;
audio.play();
const member = this.room.getMember(reaction.sender);
if (member) {
this.reactions.push(Object.assign({ addedAt: Date.now(), member: member}, reaction));
if (!this.updateReactionsTimer) {
this.updateReactionsTimer = setInterval(this.updateReactions, 300);
}
}
},
loadPrevious() { loadPrevious() {
const audioMessages = this.events.filter((e) => e.getContent().msgtype === "m.audio"); const audioMessages = this.events.filter((e) => e.getContent().msgtype === "m.audio");
for (let i = 0; i < audioMessages.length; i++) { for (let i = 0; i < audioMessages.length; i++) {
@ -328,7 +369,6 @@ export default {
volume.style.height = "" + w + "px"; volume.style.height = "" + w + "px";
const color = 80 + (value * (256 - 80)) / 256; const color = 80 + (value * (256 - 80)) / 256;
volume.style.backgroundColor = `rgb(${color},${color},${color})`; volume.style.backgroundColor = `rgb(${color},${color},${color})`;
if (this.info && this.info.playing) { if (this.info && this.info.playing) {
requestAnimationFrame(this.updateVisualization); requestAnimationFrame(this.updateVisualization);
} else { } else {
@ -342,6 +382,24 @@ export default {
volume.style.height = "0px"; volume.style.height = "0px";
volume.style.backgroundColor = "transparent"; volume.style.backgroundColor = "transparent";
}, },
updateReactions() {
const now = Date.now();
this.reactions = this.reactions.filter(r => {
return (r.addedAt + this.REACTION_ANIMATION_TIME > now);
});
if (this.reactions.length == 0) {
this.clearReactions();
}
},
clearReactions() {
if (this.updateReactionsTimer) {
clearInterval(this.updateReactionsTimer);
this.updateReactionsTimer = null;
}
this.reactions = [];
},
memberAvatar(member) { memberAvatar(member) {
if (member) { if (member) {
return member.getAvatarUrl( return member.getAvatarUrl(
@ -364,6 +422,12 @@ export default {
} else { } else {
this.$emit('start-recording'); this.$emit('start-recording');
} }
},
clapButtonClicked() {
if (this.currentAudioEvent) {
this.$emit("sendclap", { event: this.currentAudioEvent, timeOffset: this.currentTimeMs })
}
} }
} }
}; };

View file

@ -10,6 +10,7 @@
v-on:loadnext="handleScrolledToBottom(false)" v-on:loadnext="handleScrolledToBottom(false)"
v-on:loadprevious="handleScrolledToTop()" v-on:loadprevious="handleScrolledToTop()"
v-on:mark-read="sendRR" v-on:mark-read="sendRR"
v-on:sendclap="sendClapReactionAtTime"
/> />
<VoiceRecorder class="audio-layout" v-if="useVoiceMode" :micButtonRef="$refs.mic_button" :ptt="showRecorderPTT" :show="showRecorder" <VoiceRecorder class="audio-layout" v-if="useVoiceMode" :micButtonRef="$refs.mic_button" :ptt="showRecorderPTT" :show="showRecorder"
v-on:close="showRecorder = false" v-on:file="onVoiceRecording" :sendTypingIndicators="useVoiceMode" /> v-on:close="showRecorder = false" v-on:file="onVoiceRecording" :sendTypingIndicators="useVoiceMode" />
@ -1301,6 +1302,17 @@ export default {
this.$refs.messageOperationsSheet.close(); this.$refs.messageOperationsSheet.close();
}, },
sendClapReactionAtTime(e) {
util
.sendQuickReaction(this.$matrix.matrixClient, this.roomId, "👏", e.event, { timeOffset: e.timeOffset.toFixed(0)})
.then(() => {
console.log("Send clap reaction at time", e.timeOffset);
})
.catch((err) => {
console.log("Failed to send clap reaction:", err);
});
},
sendQuickReaction(e) { sendQuickReaction(e) {
let previousReaction = null; let previousReaction = null;
@ -1577,7 +1589,7 @@ export default {
const nextEvent = filteredEvents[index + 1]; const nextEvent = filteredEvents[index + 1];
if (nextEvent.getContent().msgtype === "m.audio") { if (nextEvent.getContent().msgtype === "m.audio") {
// Yes, audio event! // Yes, audio event!
this.$audioPlayer.play(nextEvent); this.$audioPlayer.play(nextEvent, this.timelineSet);
} }
} }
} }

View file

@ -22,6 +22,12 @@ export default {
return null; return null;
}, },
}, },
timelineSet: {
type: Object,
default: function () {
return null;
},
},
}, },
data() { data() {
return { return {
@ -44,7 +50,7 @@ export default {
return this.$audioPlayer.addListener(this._uid, this.event); return this.$audioPlayer.addListener(this._uid, this.event);
}, },
play() { play() {
this.$audioPlayer.play(this.event); this.$audioPlayer.play(this.event, this.timelineSet);
}, },
pause() { pause() {
this.$audioPlayer.pause(this.event); this.$audioPlayer.pause(this.event);

View file

@ -1,7 +1,7 @@
<template> <template>
<message-incoming v-bind="{...$props, ...$attrs}" v-on="$listeners"> <message-incoming v-bind="{...$props, ...$attrs}" v-on="$listeners">
<div class="bubble audio-bubble"> <div class="bubble audio-bubble">
<audio-player :event="event">{{ $t('fallbacks.audio_file')}}</audio-player> <audio-player :event="event" :timelineSet="timelineSet">{{ $t('fallbacks.audio_file')}}</audio-player>
</div> </div>
</message-incoming> </message-incoming>
</template> </template>

View file

@ -1,7 +1,7 @@
<template> <template>
<message-outgoing v-bind="{ ...$props, ...$attrs }" v-on="$listeners"> <message-outgoing v-bind="{ ...$props, ...$attrs }" v-on="$listeners">
<div class="audio-bubble"> <div class="audio-bubble">
<audio-player :event="event">{{ $t('fallbacks.audio_file')}}</audio-player> <audio-player :event="event" :timelineSet="timelineSet">{{ $t('fallbacks.audio_file')}}</audio-player>
</div> </div>
</message-outgoing> </message-outgoing>
</template> </template>

View file

@ -222,13 +222,13 @@ class Util {
return this.sendMessage(matrixClient, roomId, "m.room.message", content); return this.sendMessage(matrixClient, roomId, "m.room.message", content);
} }
sendQuickReaction(matrixClient, roomId, emoji, event) { sendQuickReaction(matrixClient, roomId, emoji, event, extraData = {}) {
const content = { const content = {
'm.relates_to': { 'm.relates_to': Object.assign(extraData, {
key: emoji, key: emoji,
rel_type: 'm.annotation', rel_type: 'm.annotation',
event_id: event.getId() event_id: event.getId()
} })
}; };
return this.sendMessage(matrixClient, roomId, "m.reaction", content); return this.sendMessage(matrixClient, roomId, "m.reaction", content);
} }

View file

@ -14,6 +14,7 @@ export default {
constructor() { constructor() {
this.player = new Audio(); this.player = new Audio();
this.currentEvent = null; this.currentEvent = null;
this.currentClapReactions = [];
this.infoMap = new Map(); this.infoMap = new Map();
this.player.addEventListener("durationchange", this.onDurationChange.bind(this)); this.player.addEventListener("durationchange", this.onDurationChange.bind(this));
this.player.addEventListener("timeupdate", this.onTimeUpdate.bind(this)); this.player.addEventListener("timeupdate", this.onTimeUpdate.bind(this));
@ -70,15 +71,15 @@ export default {
); );
} }
play(event) { play(event, timelineSet) {
this.play_(event, false); this.play_(event, timelineSet, false);
} }
load(event) { load(event, timelineSet) {
this.play_(event, true); this.play_(event, timelineSet, true);
} }
play_(event, onlyLoad) { play_(event, timelineSet, onlyLoad) {
const eventId = event.getId(); const eventId = event.getId();
if (this.currentEvent != eventId) { if (this.currentEvent != eventId) {
// Media change, pause the one currently playing. // Media change, pause the one currently playing.
@ -91,6 +92,10 @@ export default {
this.currentEvent = eventId; this.currentEvent = eventId;
const info = this.infoMap.get(eventId); const info = this.infoMap.get(eventId);
if (info) { if (info) {
// Get all clap reactions
this.initializeClapEvents(event, timelineSet);
if (info.url) { if (info.url) {
// Restart from beginning? // Restart from beginning?
if (info.currentTime == info.duration) { if (info.currentTime == info.duration) {
@ -121,9 +126,9 @@ export default {
// Still on this item? Call ourselves recursively. // Still on this item? Call ourselves recursively.
if (this.currentEvent == eventId) { if (this.currentEvent == eventId) {
if (onlyLoad) { if (onlyLoad) {
this.load(event); this.load(event, timelineSet);
} else { } else {
this.play(event); this.play(event, timelineSet);
} }
} }
}) })
@ -204,8 +209,10 @@ export default {
onTimeUpdate() { onTimeUpdate() {
var entry = this.infoMap.get(this.currentEvent); var entry = this.infoMap.get(this.currentEvent);
if (entry) { if (entry) {
const oldTime = entry.currentTime;
entry.currentTime = 1000 * this.player.currentTime; entry.currentTime = 1000 * this.player.currentTime;
this.updatePlayPercent(entry); this.updatePlayPercent(entry);
this.maybePlayClapEvent(oldTime, entry.currentTime);
} }
} }
onDurationChange() { onDurationChange() {
@ -226,6 +233,34 @@ export default {
entry.playPercent = 0; entry.playPercent = 0;
} }
} }
initializeClapEvents(event, timelineSet) {
if (event) {
const reactions = timelineSet.relations.getChildEventsForEvent(event.getId(), 'm.annotation', 'm.reaction');
if (reactions) {
this.currentClapReactions = reactions.getRelations()
.filter(r => r.getRelation().key == "👏" && r.getRelation().timeOffset && parseInt(r.getRelation().timeOffset) > 0)
.map(r => {
return {
sender: r.getSender(),
emoji: r.getRelation().key,
timeOffset: parseInt(r.getRelation().timeOffset)
}
})
.sort((a,b) => a.timeOffset - b.timeOffset);
}
} else {
this.currentClapReactions = [];
}
}
maybePlayClapEvent(previousTimeMs, timeNowMs) {
(this.currentClapReactions || []).forEach(reaction => {
if (previousTimeMs < reaction.timeOffset && timeNowMs >= reaction.timeOffset) {
this.$root.$emit("audio-playback-reaction", reaction);
}
});
}
} }
Vue.prototype.$audioPlayer = new SharedAudioPlayer(); Vue.prototype.$audioPlayer = new SharedAudioPlayer();