Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add AudioListener/AudioPanner DOM interfaces #21502

Merged
merged 12 commits into from Aug 25, 2018

Some generated files are not rendered by default. Learn more.

@@ -10,7 +10,6 @@ use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
use servo_media::audio::node::AudioNodeInit;

#[dom_struct]
pub struct AudioDestinationNode {
@@ -23,9 +22,8 @@ impl AudioDestinationNode {
options: &AudioNodeOptions,
) -> AudioDestinationNode {
AudioDestinationNode {
node: AudioNode::new_inherited(
AudioNodeInit::DestinationNode,
Some(context.destination_node()),
node: AudioNode::new_inherited_for_id(
context.destination_node(),
context,
options,
1,
@@ -0,0 +1,190 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */

use dom::audioparam::AudioParam;
use dom::baseaudiocontext::BaseAudioContext;
use dom::bindings::codegen::Bindings::AudioListenerBinding::{self, AudioListenerMethods};
use dom::bindings::codegen::Bindings::AudioParamBinding::AutomationRate;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::bindings::root::{Dom, DomRoot};
use dom::window::Window;
use dom_struct::dom_struct;
use servo_media::audio::param::{ParamType, ParamDir};
use std::f32;

#[dom_struct]
pub struct AudioListener {
reflector_: Reflector,
position_x: Dom<AudioParam>,
position_y: Dom<AudioParam>,
position_z: Dom<AudioParam>,
forward_x: Dom<AudioParam>,
forward_y: Dom<AudioParam>,
forward_z: Dom<AudioParam>,
up_x: Dom<AudioParam>,
up_y: Dom<AudioParam>,
up_z: Dom<AudioParam>,
}

impl AudioListener {
fn new_inherited(
window: &Window,
context: &BaseAudioContext,
) -> AudioListener {
let node = context.listener();

let position_x = AudioParam::new(
window,
context,
node,
ParamType::Position(ParamDir::X),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let position_y = AudioParam::new(
window,
context,
node,
ParamType::Position(ParamDir::Y),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let position_z = AudioParam::new(
window,
context,
node,
ParamType::Position(ParamDir::Z),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let forward_x = AudioParam::new(
window,
context,
node,
ParamType::Forward(ParamDir::X),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let forward_y = AudioParam::new(
window,
context,
node,
ParamType::Forward(ParamDir::Y),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let forward_z = AudioParam::new(
window,
context,
node,
ParamType::Forward(ParamDir::Z),
AutomationRate::A_rate,
-1., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let up_x = AudioParam::new(
window,
context,
node,
ParamType::Up(ParamDir::X),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let up_y = AudioParam::new(
window,
context,
node,
ParamType::Up(ParamDir::Y),
AutomationRate::A_rate,
1., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let up_z = AudioParam::new(
window,
context,
node,
ParamType::Up(ParamDir::Z),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
AudioListener {
reflector_: Reflector::new(),
position_x: Dom::from_ref(&position_x),
position_y: Dom::from_ref(&position_y),
position_z: Dom::from_ref(&position_z),
forward_x: Dom::from_ref(&forward_x),
forward_y: Dom::from_ref(&forward_y),
forward_z: Dom::from_ref(&forward_z),
up_x: Dom::from_ref(&up_x),
up_y: Dom::from_ref(&up_y),
up_z: Dom::from_ref(&up_z),
}
}

#[allow(unrooted_must_root)]
pub fn new(
window: &Window,
context: &BaseAudioContext,
) -> DomRoot<AudioListener> {
let node = AudioListener::new_inherited(window, context);
reflect_dom_object(Box::new(node), window, AudioListenerBinding::Wrap)
}
}

impl AudioListenerMethods for AudioListener {
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-positionx
fn PositionX(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_x)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-positiony
fn PositionY(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_y)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-positionz
fn PositionZ(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_z)
}

// https://webaudio.github.io/web-audio-api/#dom-audiolistener-forwardx
fn ForwardX(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.forward_x)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-forwardy
fn ForwardY(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.forward_y)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-forwardz
fn ForwardZ(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.forward_z)
}

// https://webaudio.github.io/web-audio-api/#dom-audiolistener-upx
fn UpX(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.up_x)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-upy
fn UpY(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.up_y)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-upz
fn UpZ(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.up_z)
}
}
@@ -36,17 +36,26 @@ pub struct AudioNode {
channel_interpretation: Cell<ChannelInterpretation>,
}


impl AudioNode {
pub fn new_inherited(
node_type: AudioNodeInit,
node_id: Option<NodeId>,
context: &BaseAudioContext,
options: &AudioNodeOptions,
number_of_inputs: u32,
number_of_outputs: u32,
) -> AudioNode {
let node_id =
node_id.unwrap_or_else(|| context.audio_context_impl().create_node(node_type));
let node_id = context.audio_context_impl().create_node(node_type);
AudioNode::new_inherited_for_id(node_id, context, options, number_of_inputs, number_of_outputs)
}

pub fn new_inherited_for_id(
node_id: NodeId,
context: &BaseAudioContext,
options: &AudioNodeOptions,
number_of_inputs: u32,
number_of_outputs: u32,
) -> AudioNode {
AudioNode {
eventtarget: EventTarget::new_inherited(),
node_id,
@@ -204,6 +213,11 @@ impl AudioNodeMethods for AudioNode {
return Err(Error::IndexSize);
}
},
EventTargetTypeId::AudioNode(AudioNodeTypeId::PannerNode) => {
if value > 2 {
return Err(Error::NotSupported)
}
}
// XXX We do not support any of the other AudioNodes with
// constraints yet. Add more cases here as we add support
// for new AudioNodes.
@@ -237,6 +251,11 @@ impl AudioNodeMethods for AudioNode {
return Err(Error::InvalidState);
}
},
EventTargetTypeId::AudioNode(AudioNodeTypeId::PannerNode) => {
if value == ChannelCountMode::Max {
return Err(Error::NotSupported)
}
}
// XXX We do not support any of the other AudioNodes with
// constraints yet. Add more cases here as we add support
// for new AudioNodes.
@@ -34,7 +34,6 @@ impl AudioScheduledSourceNode {
AudioScheduledSourceNode {
node: AudioNode::new_inherited(
node_type,
None, /* node_id */
context,
options,
number_of_inputs,
ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.