Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Apple Vision Pro Media Playback Fix #9837

Draft
wants to merge 4 commits into
base: dev
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@ export const PositionalAudioComponent = defineComponent({
audioNodes.panner.coneOuterAngle = audio.coneOuterAngle.value
audioNodes.panner.coneOuterGain = audio.coneOuterGain.value
}, [
mediaElement?.element,
audio.refDistance,
audio.rolloffFactor,
audio.maxDistance,
Expand Down
34 changes: 6 additions & 28 deletions packages/engine/src/audio/systems/MediaSystem.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,6 @@ import { PositionalAudioComponent } from '../components/PositionalAudioComponent
export class AudioEffectPlayer {
static instance = new AudioEffectPlayer()

constructor() {
// only init when running in client
if (isClient) {
this.#init()
}
}

static SOUNDS = {
notification: '/sfx/notification.mp3',
message: '/sfx/message.mp3',
Expand All @@ -65,37 +58,22 @@ export class AudioEffectPlayer {
return buffer
}

// pool of elements
#els: HTMLAudioElement[] = []

#init() {
if (this.#els.length) return
for (let i = 0; i < 20; i++) {
const audioElement = document.createElement('audio')
audioElement.crossOrigin = 'anonymous'
audioElement.loop = false
this.#els.push(audioElement)
}
}

play = async (sound: string, volumeMultiplier = getState(AudioState).notificationVolume) => {
await Promise.resolve()

if (!this.#els.length) return

if (!this.bufferMap[sound]) {
// create buffer if doesn't exist
this.bufferMap[sound] = await AudioEffectPlayer?.instance?.loadBuffer(sound)
}

const source = getState(AudioState).audioContext.createBufferSource()
const audioContext = getState(AudioState).audioContext
const source = audioContext.createBufferSource()
const gain = audioContext.createGain()
gain.gain.value = getState(AudioState).masterVolume * volumeMultiplier
source.buffer = this.bufferMap[sound]
const el = this.#els.find((el) => el.paused) ?? this.#els[0]
el.volume = getState(AudioState).masterVolume * volumeMultiplier
if (el.src !== sound) el.src = sound
el.currentTime = 0
source.connect(gain)
gain.connect(audioContext.destination)
source.start()
source.connect(getState(AudioState).audioContext.destination)
}
}

Expand Down
100 changes: 85 additions & 15 deletions packages/engine/src/scene/components/MediaComponent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,18 @@ import Hls from 'hls.js'
import { startTransition, useEffect } from 'react'
import { DoubleSide, Mesh, MeshBasicMaterial, PlaneGeometry } from 'three'

import { NO_PROXY, State, getMutableState, getState, none, useHookstate } from '@etherealengine/hyperflux'
import {
NO_PROXY,
State,
getMutableState,
getState,
none,
useHookstate,
useMutableState
} from '@etherealengine/hyperflux'

import { isClient } from '@etherealengine/common/src/utils/getEnvironment'
import { defineQuery } from '@etherealengine/ecs'
import {
defineComponent,
getComponent,
Expand All @@ -51,6 +60,8 @@ import { setObjectLayers } from '@etherealengine/spatial/src/renderer/components
import { setVisibleComponent } from '@etherealengine/spatial/src/renderer/components/VisibleComponent'
import { ObjectLayers } from '@etherealengine/spatial/src/renderer/constants/ObjectLayers'
import { EntityTreeComponent } from '@etherealengine/spatial/src/transform/components/EntityTree'
import { requestXRSession } from '@etherealengine/spatial/src/xr/XRSessionFunctions'
import { XRState } from '@etherealengine/spatial/src/xr/XRState'
import { AssetLoader } from '../../assets/classes/AssetLoader'
import { useTexture } from '../../assets/functions/resourceHooks'
import { AudioState } from '../../audio/AudioState'
Expand Down Expand Up @@ -88,6 +99,11 @@ export const createAudioNodeGroup = (
export const MediaElementComponent = defineComponent({
name: 'MediaElement',

// elementPool: {
// video: [] as HTMLVideoElement[],
// audio: [] as HTMLAudioElement[]
// },

onInit: (entity) => {
return {
element: undefined! as HTMLMediaElement,
Expand Down Expand Up @@ -124,6 +140,51 @@ export const MediaElementComponent = defineComponent({
errors: ['MEDIA_ERROR', 'HLS_ERROR']
})

// if ('HTMLMediaElement' in globalThis) {
// for (let i = 0; i < 20; i++) {
// MediaElementComponent.elementPool.video.push(document.createElement('video'))
// MediaElementComponent.elementPool.audio.push(document.createElement('audio'))
// }
// }

// In Safari on Apple Vision Pro, all media looses autoplay permissions after entering XR, so
// we need to trigger the play() method on the media element (and resume the audio context)
// during a user activation event, which necessarily happens when starting an XR session.
// play-puase all media elements to tickle the user activation
// autoplay policy in Safari on Apple Vision Pro in the perfect way

const elementsQuery = defineQuery([MediaElementComponent])
let playPausePromises = [] as Promise<HTMLMediaElement>[]

requestXRSession.beforeHooks.push(() => {
alert('test')
console.log('requestXRSession.beforeHooks')

playPausePromises = elementsQuery().map((eid) => {
const el = getComponent(eid, MediaElementComponent).element
return el
.play()
.then(() => el.pause())
.then(() => el)
})

getState(AudioState).audioContext.resume()
})

requestXRSession.afterHooks.push((ctx) => {
ctx.result.then(() => {
for (const p of playPausePromises) {
p.then((mediaElement) => {
mediaElement.play()
console.log('Did resume media playback: ' + mediaElement.src)
})
}
playPausePromises.length = 0
getState(AudioState).audioContext.resume()
console.log('Did resume audio context')
})
})

export const MediaComponent = defineComponent({
name: 'MediaComponent',
jsonID: 'EE_media',
Expand Down Expand Up @@ -240,19 +301,23 @@ export const MediaComponent = defineComponent({
export function MediaReactor() {
const entity = useEntityContext()
const media = useComponent(entity, MediaComponent)
const mediaElement = useOptionalComponent(entity, MediaElementComponent)
const mediaElementComponent = useOptionalComponent(entity, MediaElementComponent)
const audioContext = getState(AudioState).audioContext
const gainNodeMixBuses = getState(AudioState).gainNodeMixBuses
const xrSession = useMutableState(XRState).session

if (!isClient) return null

useEffect(() => {
// This must be outside of the normal ECS flow by necessity, since we have to respond to user-input synchronously
// in order to ensure media will play programmatically
const handleAutoplay = () => {
console.log('handleAutoplay')
const mediaComponent = getComponent(entity, MediaElementComponent)
// handle when we dont have autoplay enabled but have programatically started playback
if (!media.autoplay.value && !media.paused.value) mediaComponent?.element.play()
mediaComponent?.element.play().then(() => {
if (!media.autoplay.value && !media.paused.value) mediaComponent?.element.pause()
})
// handle when we have autoplay enabled but have paused playback
if (media.autoplay.value && media.paused.value) media.paused.set(false)
// handle when we have autoplay and mediaComponent is paused
Expand All @@ -272,6 +337,9 @@ export function MediaReactor() {
document.body.addEventListener('touchend', handleAutoplay)
EngineRenderer.instance.renderer.domElement.addEventListener('pointerup', handleAutoplay)
EngineRenderer.instance.renderer.domElement.addEventListener('touchend', handleAutoplay)
const mediaElement = mediaElementComponent?.element.value
mediaElement?.addEventListener('pause', handleAutoplay)
xrSession.value?.addEventListener('squeeze', handleAutoplay)

return () => {
window.removeEventListener('pointerup', handleAutoplay)
Expand All @@ -281,33 +349,35 @@ export function MediaReactor() {
document.body.removeEventListener('touchend', handleAutoplay)
EngineRenderer.instance.renderer.domElement.removeEventListener('pointerup', handleAutoplay)
EngineRenderer.instance.renderer.domElement.removeEventListener('touchend', handleAutoplay)
mediaElement?.removeEventListener('pause', handleAutoplay)
xrSession.value?.removeEventListener('squeeze', handleAutoplay)
}
}, [])
}, [mediaElementComponent, xrSession])

useEffect(
function updatePlay() {
if (!mediaElement) return
if (!mediaElementComponent) return
if (media.paused.value) {
mediaElement.element.value.pause()
mediaElementComponent.element.value.pause()
} else {
const promise = mediaElement.element.value.play()
const promise = mediaElementComponent.element.value.play()
if (promise) {
promise.catch((error) => {
console.error(error)
})
}
}
},
[media.paused, mediaElement]
[media.paused, mediaElementComponent]
)

useEffect(
function updateSeekTime() {
if (!mediaElement) return
setTime(mediaElement.element, media.seekTime.value)
if (!mediaElement.element.paused.value) mediaElement.element.value.play() // if not paused, start play again
if (!mediaElementComponent) return
setTime(mediaElementComponent.element, media.seekTime.value)
if (!mediaElementComponent.element.paused.value) mediaElementComponent.element.value.play() // if not paused, start play again
},
[media.seekTime, mediaElement]
[media.seekTime, mediaElementComponent]
)

useEffect(
Expand Down Expand Up @@ -469,16 +539,16 @@ export function MediaReactor() {

useEffect(
function updateMixbus() {
if (!mediaElement?.value) return
const element = mediaElement.element.get({ noproxy: true })
if (!mediaElementComponent?.value) return
const element = mediaElementComponent.element.get({ noproxy: true })
const audioNodes = AudioNodeGroups.get(element)
if (audioNodes) {
audioNodes.gain.disconnect(audioNodes.mixbus)
audioNodes.mixbus = media.isMusic.value ? gainNodeMixBuses.music : gainNodeMixBuses.soundEffects
audioNodes.gain.connect(audioNodes.mixbus)
}
},
[mediaElement, media.isMusic]
[mediaElementComponent, media.isMusic]
)

const debugEnabled = useHookstate(getMutableState(RendererState).nodeHelperVisibility)
Expand Down
5 changes: 4 additions & 1 deletion packages/engine/src/scene/components/UVOL1Component.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,9 @@ import {
PlaneGeometry,
SRGBColorSpace,
ShaderMaterial,
Texture
Sphere,
Texture,
Vector3
} from 'three'
import { CORTOLoader } from '../../assets/loaders/corto/CORTOLoader'
import { AssetLoaderState } from '../../assets/state/AssetLoaderState'
Expand Down Expand Up @@ -319,6 +321,7 @@ function UVOL1Reactor() {
throw new Error('VDEBUG Entity ${entity} Invalid geometry frame: ' + i.toString())
}

geometry.boundingSphere = new Sphere().set(new Vector3(), Infinity)
meshBuffer.set(i, geometry)
pendingRequests.current -= 1

Expand Down
28 changes: 0 additions & 28 deletions packages/server/key.pem

This file was deleted.