Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(webrtc): initialize webrtc streams with user selected devices #4457

Merged
merged 1 commit into from
Aug 25, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 36 additions & 1 deletion components/ui/Global/Global.vue
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { TrackKind } from '~/libraries/WebRTC/types'
import { ModalWindows } from '~/store/ui/types'
import iridium from '~/libraries/Iridium/IridiumManager'
import { useWebRTC } from '~/libraries/Iridium/webrtc/hooks'
import { PropCommonEnum } from '~/libraries/Enums/enums'

declare module 'vue/types/vue' {
interface Vue {
Expand All @@ -26,7 +27,7 @@ export default Vue.extend({
}
},
computed: {
...mapState(['ui', 'media', 'conversation', 'files']),
...mapState(['ui', 'media', 'conversation', 'files', 'settings']),
ModalWindows: () => ModalWindows,
showBackgroundCall(): boolean {
if (!this.$device.isMobile) {
Expand All @@ -35,6 +36,14 @@ export default Vue.extend({
return this.isBackgroundCall || (this.isActiveCall && this.ui.showSidebar)
},
},
watch: {
'settings.audioInput'(audioInput: string) {
this.updateWebRTCState({ audioInput })
},
'settings.videoInput'(videoInput: string) {
this.updateWebRTCState({ videoInput })
},
},
mounted() {
// This determines if we should show the
let lsVersion = localStorage.getItem('local-version')
Expand Down Expand Up @@ -65,6 +74,9 @@ export default Vue.extend({
this.toggleModal('changelog')
localStorage.setItem('local-version', this.$config.clientVersion)
}

const { audioInput, videoInput } = this.settings
this.updateWebRTCState({ audioInput, videoInput })
},
methods: {
/**
Expand Down Expand Up @@ -110,6 +122,29 @@ export default Vue.extend({
denyCall() {
iridium.webRTC.denyCall()
},
/**
* @method updateWebRTCState
* @description Updates the WebRTC state with the given settings.
* @example this.updateWebRTCState({ audioInput: 'default', videoInput: 'default' })
*/
updateWebRTCState({
audioInput,
videoInput,
}: {
audioInput?: string
videoInput?: string
}) {
const streamConstraints = {} as MediaStreamConstraints

if (audioInput && audioInput !== PropCommonEnum.DEFAULT) {
streamConstraints.audio = { deviceId: audioInput }
}
if (videoInput && videoInput !== PropCommonEnum.DEFAULT) {
streamConstraints.video = { deviceId: videoInput }
}

iridium.webRTC.streamConstraints = streamConstraints
},
},
})
</script>
50 changes: 47 additions & 3 deletions libraries/Iridium/webrtc/WebRTCManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@ const initialState: WebRTCState = {
activeCall: null,
streamMuted: {},
createdAt: 0,
streamConstraints: {
audio: true,
video: true,
},
}

export default class WebRTCManager extends Emitter {
Expand All @@ -39,6 +43,17 @@ export default class WebRTCManager extends Emitter {
this.state = initialState
}

set streamConstraints(constraints: MediaStreamConstraints) {
this.state.streamConstraints = {
...this.state.streamConstraints,
...constraints,
}
}

get streamConstraints() {
return this.state.streamConstraints
}

async init() {
await this.fetch()
// this.setupListeners()
Expand Down Expand Up @@ -497,7 +512,8 @@ export default class WebRTCManager extends Emitter {
root: true,
})

await call.createLocalTracks(kinds)
const constraints = this.streamConstraints
await call.createLocalTracks(kinds, constraints)

this.state.incomingCall = null
this.state.activeCall = {
Expand Down Expand Up @@ -820,7 +836,8 @@ export default class WebRTCManager extends Emitter {
return
}

await call.createLocalTracks(kinds)
const constraints = this.streamConstraints
await call.createLocalTracks(kinds, constraints)
await call.answer(did, data)
}

Expand Down Expand Up @@ -856,7 +873,8 @@ export default class WebRTCManager extends Emitter {
}
const isMuted = this.state.streamMuted[did]?.[kind]
if (isMuted) {
await call.unmute({ did, kind })
const constraints = this.streamConstraints
await call.unmute({ did, kind, constraints })
$Sounds.playSound(Sounds.UNMUTE)
return
}
Expand Down Expand Up @@ -895,4 +913,30 @@ export default class WebRTCManager extends Emitter {
encrypt: { recipients: [did] },
})
}

public async mute({
kind = 'audio',
did = iridium.connector?.id,
}: {
kind: string
did?: string
}) {
if (!this.state.activeCall) return
const call = $WebRTC.getCall(this.state.activeCall.callId)
if (!call) return
await call.mute({ kind, did })
}

public async unmute({
kind,
did = iridium.connector?.id,
}: {
kind: string
did?: string
}) {
if (!this.state.activeCall) return
const call = $WebRTC.getCall(this.state.activeCall.callId)
if (!call) return
await call.unmute({ did, kind, constraints: this.streamConstraints })
}
}
1 change: 1 addition & 0 deletions libraries/Iridium/webrtc/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ export interface WebRTCState {
data: SignalData
} | null
createdAt: number
streamConstraints: MediaStreamConstraints
}

export enum WebRTCError {
Expand Down
18 changes: 8 additions & 10 deletions libraries/WebRTC/Call.ts
Original file line number Diff line number Diff line change
Expand Up @@ -281,11 +281,11 @@ export class Call extends Emitter<CallEventListeners> {
}

if (kinds.includes('audio')) {
await this.createAudioStream(constraints?.audio || true)
await this.createAudioStream(constraints?.audio)
}

if (kinds.includes('video')) {
await this.createVideoStream(constraints?.video || true)
await this.createVideoStream(constraints?.video)
}

return this.streams
Expand All @@ -299,9 +299,7 @@ export class Call extends Emitter<CallEventListeners> {
* @example
* await call.createAudioStream()
*/
async createAudioStream(
constraints: MediaTrackConstraints | boolean | undefined,
) {
async createAudioStream(constraints?: MediaStreamConstraints['audio']) {
if (!iridium.connector?.id) return

const audioStream = await navigator.mediaDevices.getUserMedia({
Expand Down Expand Up @@ -342,9 +340,7 @@ export class Call extends Emitter<CallEventListeners> {
* @example
* await call.createVideoStream()
*/
async createVideoStream(
constraints: MediaTrackConstraints | boolean | undefined,
) {
async createVideoStream(constraints?: MediaStreamConstraints['video']) {
if (!iridium.connector?.id) return
const videoStream = await navigator.mediaDevices.getUserMedia({
video: constraints || true,
Expand Down Expand Up @@ -771,20 +767,22 @@ export class Call extends Emitter<CallEventListeners> {
async unmute({
kind,
did = iridium.connector?.id,
constraints,
}: {
kind: string
did?: string
constraints: MediaStreamConstraints
}) {
if (!did) return

if (did === iridium.connector?.id) {
if (kind === 'audio' && !this.streams[did]?.audio) {
await this.createAudioStream(true)
await this.createAudioStream(constraints?.audio)
} else if (
kind === 'video' &&
!this.streams[did]?.video?.getVideoTracks()?.length
) {
await this.createVideoStream(true)
await this.createVideoStream(constraints?.video)
} else if (kind === 'screen' && !this.streams[did]?.screen) {
await this.createDisplayStream()
}
Expand Down
4 changes: 2 additions & 2 deletions store/audio/actions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,11 @@ export default {
}

if (!state.muted) {
await call.mute({ kind: 'audio' })
await iridium.webRTC.mute({ kind: 'audio' })
commit('setMute', true)
return
}
await call.unmute({ kind: 'audio' })
await iridium.webRTC.unmute({ kind: 'audio' })
commit('setMute', false)
},
/**
Expand Down
4 changes: 2 additions & 2 deletions store/video/actions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@ const videoActions = {
}

if (!state.disabled) {
await call.mute({ kind: 'video' })
await iridium.webRTC.mute({ kind: 'video' })
commit('setDisabled', true)
return
}
await call.unmute({ kind: 'video' })
await iridium.webRTC.unmute({ kind: 'video' })
commit('setDisabled', false)
},
}
Expand Down
4 changes: 2 additions & 2 deletions store/webrtc/actions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -374,11 +374,11 @@ const webRTCActions = {
}
const isMuted = state.streamMuted[peerId]?.[kind]
if (isMuted) {
await call.unmute({ peerId, kind })
await iridium.webRTC.unmute({ kind })
dispatch('sounds/playSound', Sounds.UNMUTE, { root: true })
return
}
await call.mute({ peerId, kind })
await iridium.webRTC.mute({ kind })
dispatch('sounds/playSound', Sounds.MUTE, { root: true })
},

Expand Down