{actionStopRecording && ( )} {actionPauseAudio && ( )} {actionPlayAudio && ( )} {actionDefault && ( )}
{!isRecording && } {isRecording && }
{displayTimeString} {isRecording &&
}
{!isRecording && (
)}
{isRecording ? ( ) : ( )}
); } private handleHoverActions() { if (this.state.isRecording && !this.state.actionHover) { this.setState({ actionHover: true, }); } } private async timerUpdate() { const { nowTimestamp, startTimestamp } = this.state; const elapsedTime = nowTimestamp - startTimestamp; // Prevent voice messages exceeding max length. if (elapsedTime >= Constants.CONVERSATION.MAX_VOICE_MESSAGE_DURATION) { await this.stopRecordingStream(); } this.setState({ nowTimestamp: getTimestamp(), }); } private handleUnhoverActions() { if (this.state.isRecording && this.state.actionHover) { this.setState({ actionHover: false, }); } } private async stopRecording() { this.setState({ isRecording: false, isPaused: true, }); } private async playAudio() { // Generate audio element if it doesn't exist const generateAudioElement = () => { const { mediaBlob, recordDuration } = this.state; if (!mediaBlob) { return undefined; } const audioURL = window.URL.createObjectURL(mediaBlob.data); const audioElementN = new Audio(audioURL); audioElementN.loop = false; audioElementN.oncanplaythrough = async () => { const duration = recordDuration; if (duration && audioElementN.currentTime < duration) { await audioElementN.play(); } }; return audioElementN; }; const audioElement = this.state.audioElement || generateAudioElement(); if (!audioElement) { return; } // Draw sweeping timeline const drawSweepingTimeline = () => { const { isPaused } = this.state; const { width, height, barColorPlay } = this.state.canvasParams; const canvas = this.playbackCanvas.current; if (!canvas || isPaused) { return; } // Once audioElement is fully buffered, we get the true duration let audioDuration = this.state.recordDuration; if (audioElement.duration !== Infinity) { audioDuration = audioElement.duration; } const progress = width * (audioElement.currentTime / audioDuration); const canvasContext = canvas.getContext('2d'); if (!canvasContext) { return; } canvasContext.beginPath(); canvasContext.fillStyle = barColorPlay; canvasContext.globalCompositeOperation = 'source-atop'; canvasContext.fillRect(0, 0, progress, height); // Pause audio when it reaches the end of the blob if ( audioElement.duration && audioElement.currentTime === audioElement.duration ) { this.pauseAudio(); return; } requestAnimationFrame(drawSweepingTimeline); }; this.setState({ audioElement, isRecording: false, isPaused: false, isPlaying: true, }); // If end of audio reached, reset the position of the sweeping timeline if ( audioElement.duration && audioElement.currentTime === audioElement.duration ) { await this.initPlaybackView(); } await audioElement.play(); requestAnimationFrame(drawSweepingTimeline); } private pauseAudio() { this.state.audioElement?.pause(); this.setState({ isPlaying: false, isPaused: true, }); } private async onDeleteVoiceMessage() { this.pauseAudio(); await this.stopRecordingStream(); this.props.onExitVoiceNoteView(); } private onSendVoiceMessage() { const audioBlob = this.state.mediaBlob.data; if (!audioBlob) { return; } // Is the audio file > attachment filesize limit if (audioBlob.size > Constants.CONVERSATION.MAX_ATTACHMENT_FILESIZE) { // TODO VINCE: warn the user that it's too big return; } this.props.sendVoiceMessage(audioBlob); return; } private async initiateRecordingStream() { navigator.getUserMedia( { audio: true }, this.onRecordingStream, this.onStreamError ); } private async stopRecordingStream() { const { streamParams } = this.state; // Exit if parameters aren't yet set if (!streamParams) { return; } // Stop the stream if (streamParams.media.state !== 'inactive') { streamParams.media.stop(); } streamParams.input.disconnect(); streamParams.processor.disconnect(); streamParams.stream.getTracks().forEach((track: any) => track.stop); // Stop recording await this.stopRecording(); } private async onRecordingStream(stream: any) { // If not recording, stop stream if (!this.state.isRecording) { await this.stopRecordingStream(); return; } // Start recording the stream const media = new window.MediaRecorder(stream, { mimeType: 'audio/webm' }); media.ondataavailable = (mediaBlob: any) => { this.setState({ mediaBlob }, async () => { // Generate PCM waveform for playback await this.initPlaybackView(); }); }; media.start(); // Audio Context const audioContext = new window.AudioContext(); const input = audioContext.createMediaStreamSource(stream); const bufferSize = 1024; const analyser = audioContext.createAnalyser(); analyser.smoothingTimeConstant = 0.3; analyser.fftSize = 512; const processor = audioContext.createScriptProcessor(bufferSize, 1, 1); processor.onaudioprocess = () => { const streamParams = { stream, media, input, processor }; this.setState({ streamParams }); const { width, height, barWidth, barPadding, barColorInit, maxBarHeight, minBarHeight, } = this.state.canvasParams; // Array of volumes by frequency (not in Hz, arbitrary unit) const freqTypedArray = new Uint8Array(analyser.frequencyBinCount); analyser.getByteFrequencyData(freqTypedArray); const freqArray = Array.from(freqTypedArray); // CANVAS CONTEXT const drawRecordingCanvas = () => { const canvas = this.visualisationCanvas.current; const numBars = width / (barPadding + barWidth); let volumeArray = freqArray.map(n => { const maxVal = Math.max(...freqArray); const initialHeight = maxBarHeight * (n / maxVal); const freqBarHeight = initialHeight > minBarHeight ? initialHeight : minBarHeight; return freqBarHeight; }); // Create initial fake bars to improve appearance. // Gradually increasing wave rather than a wall at the beginning const frontLoadLen = Math.ceil(volumeArray.length / 10); const frontLoad = volumeArray .slice(0, frontLoadLen - 1) .reverse() .map(n => n * 0.8); volumeArray = [...frontLoad, ...volumeArray]; // Chop off values which exceed the bounds of the container volumeArray = volumeArray.slice(0, numBars); if (canvas) { canvas.width = width; canvas.height = height; } const canvasContext = canvas && canvas.getContext('2d'); for (let i = 0; i < volumeArray.length; i++) { const barHeight = Math.ceil(volumeArray[i]); const offsetX = Math.ceil(i * (barWidth + barPadding)); const offsetY = Math.ceil(height / 2 - barHeight / 2); if (canvasContext) { canvasContext.fillStyle = barColorInit; this.drawRoundedRect(canvasContext, offsetX, offsetY, barHeight); } } }; if (this.state.isRecording) { requestAnimationFrame(drawRecordingCanvas); } }; // Init listeners for visualisation input.connect(analyser); processor.connect(audioContext.destination); } private onStreamError(error: any) { return error; } private compactPCM(array: Float32Array, numGroups: number) { // Takes an array of arbitary size and compresses it down into a smaller // array, by grouping elements into bundles of groupSize and taking their // average. // Eg. [73, 6, 1, 9, 5, 11, 2, 19, 35] of groupSize 3, becomes // = [(73 + 6 + 1) / 3 + (9 + 5 + 11) / 3 + (2 + 19 + 35) / 3] // = [27, 8, 19] // It's used to get a fixed number of freqBars or volumeBars out of // a huge sample array. const groupSize = Math.floor(array.length / numGroups); let sum = 0; const compacted = new Float32Array(numGroups); for (let i = 0; i < array.length; i++) { sum += array[i]; if ((i + 1) % groupSize === 0) { const compactedIndex = (i + 1) / groupSize; const average = sum / groupSize; compacted[compactedIndex] = average; sum = 0; } } return compacted; } private async initPlaybackView() { const { width, height, barWidth, barPadding, barColorInit, maxBarHeight, minBarHeight, } = this.state.canvasParams; const numBars = width / (barPadding + barWidth); // Scan through audio file getting average volume per bar // to display amplitude over time as a static image const blob = this.state.mediaBlob.data; const arrayBuffer = await new Response(blob).arrayBuffer(); const audioContext = new window.AudioContext(); await audioContext.decodeAudioData(arrayBuffer, (buffer: AudioBuffer) => { this.setState({ recordDuration: buffer.duration, }); // Get audio amplitude with PCM Data in Float32 // Grab single channel only to save compuation const channelData = buffer.getChannelData(0); const pcmData = this.compactPCM(channelData, numBars); const pcmDataArray = Array.from(pcmData); const pcmDataArrayNormalised = pcmDataArray.map(v => Math.abs(v)); // Prepare values for drawing to canvas const maxAmplitude = Math.max(...pcmDataArrayNormalised); const barSizeArray = pcmDataArrayNormalised.map(amplitude => { let barSize = maxBarHeight * (amplitude / maxAmplitude); // Prevent values that are too small if (barSize < minBarHeight) { barSize = minBarHeight; } return barSize; }); // CANVAS CONTEXT const drawPlaybackCanvas = () => { const canvas = this.playbackCanvas.current; if (!canvas) { return; } canvas.height = height; canvas.width = width; const canvasContext = canvas.getContext('2d'); if (!canvasContext) { return; } for (let i = 0; i < barSizeArray.length; i++) { const barHeight = Math.ceil(barSizeArray[i]); const offsetX = Math.ceil(i * (barWidth + barPadding)); const offsetY = Math.ceil(height / 2 - barHeight / 2); canvasContext.fillStyle = barColorInit; this.drawRoundedRect(canvasContext, offsetX, offsetY, barHeight); } }; drawPlaybackCanvas(); }); } private drawRoundedRect( ctx: CanvasRenderingContext2D, x: number, y: number, h: number ) { let r = this.state.canvasParams.barRadius; const w = this.state.canvasParams.barWidth; if (w < r * 2) { r = w / 2; } if (h < r * 2) { r = h / 2; } ctx.beginPath(); ctx.moveTo(x + r, y); ctx.arcTo(x + w, y, x + w, y + h, r); ctx.arcTo(x + w, y + h, x, y + h, r); ctx.arcTo(x, y + h, x, y, r); ctx.arcTo(x, y, x + w, y, r); ctx.closePath(); ctx.fill(); } private updateCanvasDimensions() { const canvas = this.visualisationCanvas.current || this.playbackCanvas.current; const width = canvas?.clientWidth || 0; this.setState({ canvasParams: { ...this.state.canvasParams, width }, }); } private async onKeyDown(event: any) { if (event.key === 'Escape') { await this.onDeleteVoiceMessage(); } } }