diff --git a/components/ui/Global/Global.vue b/components/ui/Global/Global.vue index 023552376e..e441b26c1c 100644 --- a/components/ui/Global/Global.vue +++ b/components/ui/Global/Global.vue @@ -6,6 +6,7 @@ import { TrackKind } from '~/libraries/WebRTC/types' import { ModalWindows } from '~/store/ui/types' import iridium from '~/libraries/Iridium/IridiumManager' import { useWebRTC } from '~/libraries/Iridium/webrtc/hooks' +import { PropCommonEnum } from '~/libraries/Enums/enums' declare module 'vue/types/vue' { interface Vue { @@ -26,7 +27,7 @@ export default Vue.extend({ } }, computed: { - ...mapState(['ui', 'media', 'conversation', 'files']), + ...mapState(['ui', 'media', 'conversation', 'files', 'settings']), ModalWindows: () => ModalWindows, showBackgroundCall(): boolean { if (!this.$device.isMobile) { @@ -35,6 +36,14 @@ export default Vue.extend({ return this.isBackgroundCall || (this.isActiveCall && this.ui.showSidebar) }, }, + watch: { + 'settings.audioInput'(audioInput: string) { + this.updateWebRTCState({ audioInput }) + }, + 'settings.videoInput'(videoInput: string) { + this.updateWebRTCState({ videoInput }) + }, + }, mounted() { // This determines if we should show the let lsVersion = localStorage.getItem('local-version') @@ -65,6 +74,9 @@ export default Vue.extend({ this.toggleModal('changelog') localStorage.setItem('local-version', this.$config.clientVersion) } + + const { audioInput, videoInput } = this.settings + this.updateWebRTCState({ audioInput, videoInput }) }, methods: { /** @@ -110,6 +122,29 @@ export default Vue.extend({ denyCall() { iridium.webRTC.denyCall() }, + /** + * @method updateWebRTCState + * @description Updates the WebRTC state with the given settings. + * @example this.updateWebRTCState({ audioInput: 'default', videoInput: 'default' }) + */ + updateWebRTCState({ + audioInput, + videoInput, + }: { + audioInput?: string + videoInput?: string + }) { + const streamConstraints = {} as MediaStreamConstraints + + if (audioInput && audioInput !== PropCommonEnum.DEFAULT) { + streamConstraints.audio = { deviceId: audioInput } + } + if (videoInput && videoInput !== PropCommonEnum.DEFAULT) { + streamConstraints.video = { deviceId: videoInput } + } + + iridium.webRTC.streamConstraints = streamConstraints + }, }, }) diff --git a/libraries/Iridium/webrtc/WebRTCManager.ts b/libraries/Iridium/webrtc/WebRTCManager.ts index 54a8a73a28..8bc94dfeb5 100644 --- a/libraries/Iridium/webrtc/WebRTCManager.ts +++ b/libraries/Iridium/webrtc/WebRTCManager.ts @@ -21,6 +21,10 @@ const initialState: WebRTCState = { activeCall: null, streamMuted: {}, createdAt: 0, + streamConstraints: { + audio: true, + video: true, + }, } export default class WebRTCManager extends Emitter { @@ -39,6 +43,17 @@ export default class WebRTCManager extends Emitter { this.state = initialState } + set streamConstraints(constraints: MediaStreamConstraints) { + this.state.streamConstraints = { + ...this.state.streamConstraints, + ...constraints, + } + } + + get streamConstraints() { + return this.state.streamConstraints + } + async init() { await this.fetch() // this.setupListeners() @@ -497,7 +512,8 @@ export default class WebRTCManager extends Emitter { root: true, }) - await call.createLocalTracks(kinds) + const constraints = this.streamConstraints + await call.createLocalTracks(kinds, constraints) this.state.incomingCall = null this.state.activeCall = { @@ -820,7 +836,8 @@ export default class WebRTCManager extends Emitter { return } - await call.createLocalTracks(kinds) + const constraints = this.streamConstraints + await call.createLocalTracks(kinds, constraints) await call.answer(did, data) } @@ -856,7 +873,8 @@ export default class WebRTCManager extends Emitter { } const isMuted = this.state.streamMuted[did]?.[kind] if (isMuted) { - await call.unmute({ did, kind }) + const constraints = this.streamConstraints + await call.unmute({ did, kind, constraints }) $Sounds.playSound(Sounds.UNMUTE) return } @@ -895,4 +913,30 @@ export default class WebRTCManager extends Emitter { encrypt: { recipients: [did] }, }) } + + public async mute({ + kind = 'audio', + did = iridium.connector?.id, + }: { + kind: string + did?: string + }) { + if (!this.state.activeCall) return + const call = $WebRTC.getCall(this.state.activeCall.callId) + if (!call) return + await call.mute({ kind, did }) + } + + public async unmute({ + kind, + did = iridium.connector?.id, + }: { + kind: string + did?: string + }) { + if (!this.state.activeCall) return + const call = $WebRTC.getCall(this.state.activeCall.callId) + if (!call) return + await call.unmute({ did, kind, constraints: this.streamConstraints }) + } } diff --git a/libraries/Iridium/webrtc/types.ts b/libraries/Iridium/webrtc/types.ts index 726249b109..fe22f6c55e 100644 --- a/libraries/Iridium/webrtc/types.ts +++ b/libraries/Iridium/webrtc/types.ts @@ -21,6 +21,7 @@ export interface WebRTCState { data: SignalData } | null createdAt: number + streamConstraints: MediaStreamConstraints } export enum WebRTCError { diff --git a/libraries/WebRTC/Call.ts b/libraries/WebRTC/Call.ts index 53d4286753..afc17615d0 100644 --- a/libraries/WebRTC/Call.ts +++ b/libraries/WebRTC/Call.ts @@ -281,11 +281,11 @@ export class Call extends Emitter { } if (kinds.includes('audio')) { - await this.createAudioStream(constraints?.audio || true) + await this.createAudioStream(constraints?.audio) } if (kinds.includes('video')) { - await this.createVideoStream(constraints?.video || true) + await this.createVideoStream(constraints?.video) } return this.streams @@ -299,9 +299,7 @@ export class Call extends Emitter { * @example * await call.createAudioStream() */ - async createAudioStream( - constraints: MediaTrackConstraints | boolean | undefined, - ) { + async createAudioStream(constraints?: MediaStreamConstraints['audio']) { if (!iridium.connector?.id) return const audioStream = await navigator.mediaDevices.getUserMedia({ @@ -342,9 +340,7 @@ export class Call extends Emitter { * @example * await call.createVideoStream() */ - async createVideoStream( - constraints: MediaTrackConstraints | boolean | undefined, - ) { + async createVideoStream(constraints?: MediaStreamConstraints['video']) { if (!iridium.connector?.id) return const videoStream = await navigator.mediaDevices.getUserMedia({ video: constraints || true, @@ -771,20 +767,22 @@ export class Call extends Emitter { async unmute({ kind, did = iridium.connector?.id, + constraints, }: { kind: string did?: string + constraints: MediaStreamConstraints }) { if (!did) return if (did === iridium.connector?.id) { if (kind === 'audio' && !this.streams[did]?.audio) { - await this.createAudioStream(true) + await this.createAudioStream(constraints?.audio) } else if ( kind === 'video' && !this.streams[did]?.video?.getVideoTracks()?.length ) { - await this.createVideoStream(true) + await this.createVideoStream(constraints?.video) } else if (kind === 'screen' && !this.streams[did]?.screen) { await this.createDisplayStream() } diff --git a/store/audio/actions.ts b/store/audio/actions.ts index 5026487705..f410a30017 100644 --- a/store/audio/actions.ts +++ b/store/audio/actions.ts @@ -29,11 +29,11 @@ export default { } if (!state.muted) { - await call.mute({ kind: 'audio' }) + await iridium.webRTC.mute({ kind: 'audio' }) commit('setMute', true) return } - await call.unmute({ kind: 'audio' }) + await iridium.webRTC.unmute({ kind: 'audio' }) commit('setMute', false) }, /** diff --git a/store/video/actions.ts b/store/video/actions.ts index fb6922a676..db9a13ae99 100644 --- a/store/video/actions.ts +++ b/store/video/actions.ts @@ -12,11 +12,11 @@ const videoActions = { } if (!state.disabled) { - await call.mute({ kind: 'video' }) + await iridium.webRTC.mute({ kind: 'video' }) commit('setDisabled', true) return } - await call.unmute({ kind: 'video' }) + await iridium.webRTC.unmute({ kind: 'video' }) commit('setDisabled', false) }, } diff --git a/store/webrtc/actions.ts b/store/webrtc/actions.ts index 5bd3b76f02..3cf69cfd4b 100644 --- a/store/webrtc/actions.ts +++ b/store/webrtc/actions.ts @@ -374,11 +374,11 @@ const webRTCActions = { } const isMuted = state.streamMuted[peerId]?.[kind] if (isMuted) { - await call.unmute({ peerId, kind }) + await iridium.webRTC.unmute({ kind }) dispatch('sounds/playSound', Sounds.UNMUTE, { root: true }) return } - await call.mute({ peerId, kind }) + await iridium.webRTC.mute({ kind }) dispatch('sounds/playSound', Sounds.MUTE, { root: true }) },