-
Notifications
You must be signed in to change notification settings - Fork 46
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Browse files
Browse the repository at this point in the history
* wip * recording wip * recording wip * added audio component with canvas * save audio recording * audio file uplaod * recording pause and resume working * cleanup * cleanup * save edits * audio recording wip * audio wip * recording wip - still send msg bug * recording wip * added polyfill, refactored audioClip and updated svgs * refactoring and polyfill fix * saving fixes * finalizing working version * typing and code clean-up * refactoring and lint * refactoring and type fix * fix bazel dependency * fix responsive waveform * fix window type * fixing mediaRecorder ts error * fix mediarecorder error * last refactoring fix * refactoring: small typing and svg fix * small tooltip fix * audioClip broken down * refactoring audioclip component * refactoring and removed logs
- Loading branch information
Showing
37 changed files
with
1,240 additions
and
288 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
60 changes: 60 additions & 0 deletions
60
frontend/ui/src/pages/Inbox/MessageInput/AudioRecording/AudioStream.tsx
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,60 @@ | ||
import React, {useState, useEffect} from 'react'; | ||
import {WaveformAudio} from './WaveformAudio'; | ||
import {ReactComponent as Pause} from 'assets/images/icons/stopMedia.svg'; | ||
import styles from './index.module.scss'; | ||
|
||
declare global { | ||
interface Window { | ||
webkitAudioContext: typeof AudioContext; | ||
} | ||
} | ||
|
||
type AudioStreamProps = { | ||
audioStream: MediaStream; | ||
pauseRecording: () => void; | ||
}; | ||
|
||
export function AudioStream({audioStream, pauseRecording}: AudioStreamProps) { | ||
const [dataArr, setDataArr] = useState<number[]>([0]); | ||
let audioAnalyser; | ||
let audioArr; | ||
let updateAudioArrId; | ||
let source; | ||
|
||
useEffect(() => { | ||
const audioContext = new (window.AudioContext || window.webkitAudioContext)(); | ||
audioAnalyser = audioContext.createAnalyser(); | ||
audioAnalyser.minDecibels = -90; | ||
audioAnalyser.maxDecibels = -10; | ||
audioAnalyser.smoothingTimeConstant = 0.85; | ||
audioArr = new Uint8Array(audioAnalyser.frequencyBinCount); | ||
|
||
source = audioContext.createMediaStreamSource(audioStream); | ||
source.connect(audioAnalyser); | ||
updateAudioArrId = requestAnimationFrame(updateAudio); | ||
|
||
return () => { | ||
window.cancelAnimationFrame(updateAudioArrId); | ||
audioAnalyser.disconnect(); | ||
source.disconnect(); | ||
}; | ||
}, []); | ||
|
||
const updateAudio = () => { | ||
audioAnalyser.getByteFrequencyData(audioArr); | ||
setDataArr([...audioArr]); | ||
updateAudioArrId = requestAnimationFrame(updateAudio); | ||
}; | ||
|
||
return ( | ||
<div className={styles.container}> | ||
<div className={styles.waveformContainer}> | ||
<WaveformAudio audioData={dataArr} /> | ||
</div> | ||
|
||
<button type="button" className={`${styles.audioButtons} ${styles.pauseButton}`} onClick={pauseRecording}> | ||
<Pause /> | ||
</button> | ||
</div> | ||
); | ||
} |
66 changes: 66 additions & 0 deletions
66
frontend/ui/src/pages/Inbox/MessageInput/AudioRecording/WaveformAudio.tsx
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,66 @@ | ||
import React, {useState, useEffect, useRef} from 'react'; | ||
|
||
type WaveformAudioProps = { | ||
audioData: number[]; | ||
}; | ||
|
||
export function WaveformAudio({audioData}: WaveformAudioProps) { | ||
const canvas = useRef(null); | ||
const [context, setContext] = useState(null); | ||
const [barWidth, setBarWidth] = useState(3); | ||
const [barTotalCount, setBarTotalCount] = useState(57); | ||
const maxFrequencyValue = 255; | ||
const canvasHeight = 40; | ||
|
||
useEffect(() => { | ||
if (canvas && canvas.current) { | ||
setResponsiveCanvas(); | ||
setContext(canvas.current.getContext('2d')); | ||
canvas.current.style.width = '100%'; | ||
canvas.current.style.height = canvasHeight + 'px'; | ||
canvas.current.width = canvas.current.offsetWidth; | ||
canvas.current.height = canvas.current.offsetHeight; | ||
} | ||
}, []); | ||
|
||
useEffect(() => { | ||
if (audioData && context) { | ||
context.clearRect(0, 0, canvas.current.width, canvas.current.height); | ||
visualizeAudioRecording(); | ||
} | ||
}, [context, audioData]); | ||
|
||
const setResponsiveCanvas = () => { | ||
if (window.innerWidth >= 1800 && window.innerWidth < 2000) { | ||
setBarTotalCount(72); | ||
} else if (window.innerWidth >= 2000) { | ||
setBarTotalCount(90); | ||
setBarWidth(4); | ||
} | ||
}; | ||
|
||
const visualizeAudioRecording = () => { | ||
const canvasHeight = canvas.current.height; | ||
const singleBarSize = canvas.current.width / barTotalCount; | ||
|
||
context.lineWidth = barWidth; | ||
context.strokeStyle = '#1578D4'; //Airy blue | ||
context.lineCap = 'round'; | ||
|
||
let x = barWidth * 2; | ||
for (let i = 0; i < barTotalCount; i++) { | ||
const freqHeight = (audioData[i] / maxFrequencyValue) * canvasHeight; | ||
const baseHeight = canvasHeight / 8; | ||
const yStartingPoint = canvasHeight / 2 - freqHeight / 2 - baseHeight / 2; | ||
const yEndPoint = yStartingPoint + freqHeight + baseHeight; | ||
|
||
context.beginPath(); | ||
context.moveTo(x, yStartingPoint); | ||
context.lineTo(x, yEndPoint); | ||
context.stroke(); | ||
x += singleBarSize; | ||
} | ||
}; | ||
|
||
return <canvas ref={canvas}></canvas>; | ||
} |
63 changes: 63 additions & 0 deletions
63
frontend/ui/src/pages/Inbox/MessageInput/AudioRecording/index.module.scss
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
@import 'assets/scss/colors.scss'; | ||
@import 'assets/scss/fonts.scss'; | ||
|
||
.container { | ||
width: 100%; | ||
height: 100%; | ||
position: relative; | ||
display: flex; | ||
align-items: center; | ||
justify-content: flex-end; | ||
} | ||
|
||
.waveformContainer { | ||
width: 100%; | ||
display: flex; | ||
align-items: center; | ||
} | ||
|
||
.loading { | ||
margin: 4px 0; | ||
} | ||
|
||
.audioButtons { | ||
width: 28px; | ||
height: 24px; | ||
display: flex; | ||
justify-content: center; | ||
align-items: center; | ||
background-color: var(--color-airy-blue); | ||
border-radius: 50%; | ||
border: none; | ||
cursor: pointer; | ||
|
||
svg { | ||
path { | ||
fill: white; | ||
} | ||
} | ||
} | ||
|
||
.cancelButton { | ||
margin-left: 18px; | ||
margin-right: 6px; | ||
} | ||
|
||
.pauseButton { | ||
margin-right: 10px; | ||
margin-left: 6px; | ||
} | ||
|
||
@keyframes fadeIn { | ||
0% { | ||
opacity: 0; | ||
} | ||
100% { | ||
opacity: 1; | ||
} | ||
} | ||
|
||
.audioComponent { | ||
margin: 4px 6px 4px 0; | ||
animation: fadeIn 2.5s linear forwards; | ||
} |
190 changes: 190 additions & 0 deletions
190
frontend/ui/src/pages/Inbox/MessageInput/AudioRecording/index.tsx
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,190 @@ | ||
import React, {useState, useEffect} from 'react'; | ||
import {AudioStream} from './AudioStream'; | ||
import {AudioClip, SimpleLoader} from 'components'; | ||
import {uploadMedia} from '../../../../services/mediaUploader'; | ||
import {ReactComponent as Cancel} from 'assets/images/icons/cancelCross.svg'; | ||
import AudioRecorder from 'audio-recorder-polyfill'; | ||
import mpegEncoder from 'audio-recorder-polyfill/mpeg-encoder'; | ||
import styles from './index.module.scss'; | ||
|
||
declare global { | ||
interface Window { | ||
webkitAudioContext: typeof AudioContext; | ||
MediaRecorder: typeof MediaRecorder; | ||
} | ||
} | ||
|
||
AudioRecorder.encoder = mpegEncoder; | ||
AudioRecorder.prototype.mimeType = 'audio/mpeg'; | ||
window.MediaRecorder = AudioRecorder; | ||
|
||
type AudioRecordingProps = { | ||
fetchMediaRecorder: (mediaRecorder: MediaRecorder) => void; | ||
isAudioRecordingPaused: (isPaused: boolean) => void; | ||
setAudioRecordingPreviewLoading: React.Dispatch<React.SetStateAction<boolean>>; | ||
getUploadedAudioRecordingFile: (fileUrl: string) => void; | ||
audioRecordingResumed: boolean; | ||
setAudioRecordingResumed: React.Dispatch<React.SetStateAction<boolean>>; | ||
audioRecordingSent: boolean; | ||
audioRecordingCanceledUpdate: (isCanceled: boolean) => void; | ||
setErrorPopUp: React.Dispatch<React.SetStateAction<string>>; | ||
}; | ||
|
||
export function AudioRecording({ | ||
fetchMediaRecorder, | ||
isAudioRecordingPaused, | ||
setAudioRecordingPreviewLoading, | ||
getUploadedAudioRecordingFile, | ||
audioRecordingResumed, | ||
setAudioRecordingResumed, | ||
audioRecordingSent, | ||
audioRecordingCanceledUpdate, | ||
setErrorPopUp, | ||
}: AudioRecordingProps) { | ||
const [audioStream, setAudioStream] = useState<MediaStream | null>(null); | ||
const [mediaRecorder, setMediaRecorder] = useState<MediaRecorder | null>(null); | ||
const [savedAudioRecording, setSavedAudioRecording] = useState<File | null>(null); | ||
const [audioRecordingFileUploaded, setAudioRecordingFileUploaded] = useState<string | null>(null); | ||
const [loading, setLoading] = useState(false); | ||
|
||
useEffect(() => { | ||
let abort = false; | ||
|
||
const startVoiceRecording = async () => { | ||
try { | ||
const stream = await navigator.mediaDevices.getUserMedia({ | ||
audio: true, | ||
}); | ||
setAudioStream(stream); | ||
} catch { | ||
audioRecordingCanceledUpdate(true); | ||
setErrorPopUp( | ||
'Microphone access denied. Check your browser settings to make sure Airy has permission to access your microphone, and try again.' | ||
); | ||
} | ||
}; | ||
|
||
if (!abort) { | ||
startVoiceRecording(); | ||
} | ||
|
||
return () => { | ||
abort = true; | ||
}; | ||
}, []); | ||
|
||
useEffect(() => { | ||
if (audioStream && !audioRecordingSent) { | ||
const mediaRecorder = new MediaRecorder(audioStream); | ||
setMediaRecorder(mediaRecorder); | ||
fetchMediaRecorder(mediaRecorder); | ||
|
||
mediaRecorder.start(); | ||
|
||
const audioChunks = []; | ||
|
||
const getAudioFile = event => { | ||
audioChunks.push(event.data); | ||
|
||
const audioBlob = new Blob(audioChunks); | ||
|
||
const file = new File(audioChunks, 'recording.mp3', { | ||
type: audioBlob.type, | ||
lastModified: Date.now(), | ||
}); | ||
|
||
setSavedAudioRecording(file); | ||
}; | ||
|
||
mediaRecorder.addEventListener('dataavailable', getAudioFile); | ||
|
||
return () => { | ||
mediaRecorder.removeEventListener('dataavailable', getAudioFile); | ||
}; | ||
} | ||
}, [audioStream]); | ||
|
||
useEffect(() => { | ||
if (savedAudioRecording && !audioRecordingSent) { | ||
let isRequestAborted = false; | ||
|
||
if (!isRequestAborted) { | ||
setLoading(true); | ||
uploadMedia(savedAudioRecording) | ||
.then((response: {mediaUrl: string}) => { | ||
setAudioRecordingFileUploaded(response.mediaUrl); | ||
getUploadedAudioRecordingFile(response.mediaUrl); | ||
setLoading(false); | ||
}) | ||
.catch(() => { | ||
setLoading(false); | ||
cancelRecording(); | ||
setErrorPopUp('Failed to upload the audio recording. Please try again later.'); | ||
}); | ||
} | ||
return () => { | ||
isRequestAborted = true; | ||
}; | ||
} | ||
}, [savedAudioRecording, audioRecordingSent]); | ||
|
||
useEffect(() => { | ||
if (loading) { | ||
setAudioRecordingPreviewLoading(true); | ||
} else { | ||
setAudioRecordingPreviewLoading(false); | ||
} | ||
}, [loading]); | ||
|
||
useEffect(() => { | ||
if (audioRecordingResumed && mediaRecorder) { | ||
setAudioRecordingFileUploaded(null); | ||
mediaRecorder.resume(); | ||
} | ||
}, [audioRecordingResumed, mediaRecorder]); | ||
|
||
useEffect(() => { | ||
if (audioRecordingSent) { | ||
cancelRecording(); | ||
} | ||
}, [audioRecordingSent]); | ||
|
||
const pauseRecording = () => { | ||
mediaRecorder.requestData(); | ||
mediaRecorder.pause(); | ||
isAudioRecordingPaused(true); | ||
setAudioRecordingResumed(false); | ||
}; | ||
|
||
const cancelRecording = () => { | ||
setAudioRecordingFileUploaded(null); | ||
|
||
mediaRecorder.stop(); | ||
mediaRecorder.stream.getTracks()[0].stop(); | ||
|
||
setAudioStream(null); | ||
audioRecordingCanceledUpdate(true); | ||
}; | ||
|
||
return ( | ||
<div className={`${styles.container} ${loading ? styles.loading : ''}`}> | ||
{!loading && ( | ||
<button type="button" className={`${styles.audioButtons} ${styles.cancelButton}`} onClick={cancelRecording}> | ||
<Cancel /> | ||
</button> | ||
)} | ||
|
||
{!audioRecordingFileUploaded && !loading && audioStream && ( | ||
<AudioStream pauseRecording={pauseRecording} audioStream={audioStream} /> | ||
)} | ||
|
||
{loading && <SimpleLoader />} | ||
|
||
{audioRecordingFileUploaded && ( | ||
<div className={styles.audioComponent}> | ||
<AudioClip audioUrl={audioRecordingFileUploaded} /> | ||
</div> | ||
)} | ||
</div> | ||
); | ||
} |
Oops, something went wrong.