Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow consuming stream data from a client app #372

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
12 changes: 10 additions & 2 deletions backends/dummy/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ use servo_media_player::{audio, video, Player, PlayerError, PlayerEvent, StreamT
use servo_media_streams::capture::MediaTrackConstraintSet;
use servo_media_streams::device_monitor::{MediaDeviceInfo, MediaDeviceMonitor};
use servo_media_streams::registry::{register_stream, unregister_stream, MediaStreamId};
use servo_media_streams::{MediaOutput, MediaSocket, MediaStream, MediaStreamType};
use servo_media_streams::{MediaOutput, MediaSocket, MediaSource, MediaStream, MediaStreamType};
use servo_media_traits::{ClientContextId, MediaInstance};
use servo_media_webrtc::{
thread, BundlePolicy, DataChannelId, DataChannelInit, DataChannelMessage, IceCandidate,
Expand Down Expand Up @@ -74,7 +74,11 @@ impl Backend for DummyBackend {
(Box::new(DummySocket), id)
}

fn create_videoinput_stream(&self, _: MediaTrackConstraintSet) -> Option<MediaStreamId> {
fn create_videoinput_stream(
&self,
_: MediaTrackConstraintSet,
_: MediaSource,
) -> Option<MediaStreamId> {
Some(register_stream(Arc::new(Mutex::new(DummyMediaStream {
id: MediaStreamId::new(),
}))))
Expand Down Expand Up @@ -118,6 +122,8 @@ impl Backend for DummyBackend {
fn get_device_monitor(&self) -> Box<dyn MediaDeviceMonitor> {
Box::new(DummyMediaDeviceMonitor {})
}

fn push_stream_data(&self, _: &MediaStreamId, _: Vec<u8>) {}
}

impl AudioBackend for DummyBackend {
Expand Down Expand Up @@ -242,6 +248,8 @@ impl MediaStream for DummyMediaStream {
fn ty(&self) -> MediaStreamType {
MediaStreamType::Audio
}

fn push_data(&self, _: Vec<u8>) {}
}

impl Drop for DummyMediaStream {
Expand Down
14 changes: 11 additions & 3 deletions backends/gstreamer/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ use servo_media_player::{Player, PlayerEvent, StreamType};
use servo_media_streams::capture::MediaTrackConstraintSet;
use servo_media_streams::device_monitor::MediaDeviceMonitor;
use servo_media_streams::registry::MediaStreamId;
use servo_media_streams::{MediaOutput, MediaSocket, MediaStreamType};
use servo_media_streams::{MediaOutput, MediaSocket, MediaSource, MediaStreamType};
use servo_media_traits::{BackendMsg, ClientContextId, MediaInstance};
use servo_media_webrtc::{WebRtcBackend, WebRtcController, WebRtcSignaller};
use std::collections::HashMap;
Expand Down Expand Up @@ -245,12 +245,20 @@ impl Backend for GStreamerBackend {
media_capture::create_audioinput_stream(set)
}

fn create_videoinput_stream(&self, set: MediaTrackConstraintSet) -> Option<MediaStreamId> {
fn create_videoinput_stream(
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think we should be doing this in create_videoinput_stream, that's specifically for screen capture input. We should have a separate create_capture_stream IMO.

&self,
set: MediaTrackConstraintSet,
source: MediaSource,
) -> Option<MediaStreamId> {
if self.capture_mocking.load(Ordering::Acquire) {
// XXXManishearth we should caps filter this
return Some(self.create_videostream());
}
media_capture::create_videoinput_stream(set)
media_capture::create_videoinput_stream(set, source)
}

fn push_stream_data(&self, stream: &MediaStreamId, data: Vec<u8>) {
GStreamerMediaStream::push_data(stream, data);
}

fn can_play_type(&self, media_type: &str) -> SupportsMediaType {
Expand Down
19 changes: 14 additions & 5 deletions backends/gstreamer/media_capture.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use gst;
use gst::prelude::*;
use servo_media_streams::capture::*;
use servo_media_streams::registry::MediaStreamId;
use servo_media_streams::MediaStreamType;
use servo_media_streams::{MediaSource, MediaStreamType};
use std::i32;

trait AddToCaps {
Expand Down Expand Up @@ -150,6 +150,7 @@ pub struct GstMediaTrack {
fn create_input_stream(
stream_type: MediaStreamType,
constraint_set: MediaTrackConstraintSet,
source: MediaSource,
) -> Option<MediaStreamId> {
let devices = GstMediaDevices::new();
devices
Expand All @@ -159,14 +160,22 @@ fn create_input_stream(
MediaStreamType::Audio => GStreamerMediaStream::create_audio_from,
MediaStreamType::Video => GStreamerMediaStream::create_video_from,
};
f(track.element)
f(match source {
MediaSource::Device => track.element,
MediaSource::App => {
gst::ElementFactory::make("appsrc", None).expect("appsrc creation failed")
}
})
})
}

pub fn create_audioinput_stream(constraint_set: MediaTrackConstraintSet) -> Option<MediaStreamId> {
create_input_stream(MediaStreamType::Audio, constraint_set)
create_input_stream(MediaStreamType::Audio, constraint_set, MediaSource::Device)
}

pub fn create_videoinput_stream(constraint_set: MediaTrackConstraintSet) -> Option<MediaStreamId> {
create_input_stream(MediaStreamType::Video, constraint_set)
pub fn create_videoinput_stream(
constraint_set: MediaTrackConstraintSet,
source: MediaSource,
) -> Option<MediaStreamId> {
create_input_stream(MediaStreamType::Video, constraint_set, source)
}
22 changes: 22 additions & 0 deletions backends/gstreamer/media_stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use super::BACKEND_BASE_TIME;
use glib::prelude::*;
use gst;
use gst::prelude::*;
use gst_app::AppSrc;
use servo_media_streams::registry::{
get_stream, register_stream, unregister_stream, MediaStreamId,
};
Expand Down Expand Up @@ -47,6 +48,17 @@ impl MediaStream for GStreamerMediaStream {
fn ty(&self) -> MediaStreamType {
self.type_
}

fn push_data(&self, data: Vec<u8>) {
if let Some(source) = self.elements.last() {
if let Some(appsrc) = source.downcast_ref::<AppSrc>() {
let buffer = gst::Buffer::from_slice(data);
if let Err(error) = appsrc.push_buffer(buffer) {
warn!("{}", error);
}
}
}
}
}

impl GStreamerMediaStream {
Expand Down Expand Up @@ -217,6 +229,16 @@ impl GStreamerMediaStream {

(stream, GstreamerMediaSocket { proxy_sink })
}

pub fn push_data(stream: &MediaStreamId, data: Vec<u8>) {
let stream = get_stream(stream).expect("Media streams registry does not contain such ID");
let mut stream = stream.lock().unwrap();
let stream = stream
.as_mut_any()
.downcast_mut::<GStreamerMediaStream>()
.unwrap();
stream.push_data(data);
}
}

impl Drop for GStreamerMediaStream {
Expand Down
2 changes: 1 addition & 1 deletion examples/simple_webrtc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ impl State {
let (video, audio) = if !self.peer_id.is_some() {
(
self.media
.create_videoinput_stream(Default::default())
.create_videoinput_stream(Default::default(), MediaSource::Device)
.unwrap_or_else(|| self.media.create_videostream()),
self.media
.create_audioinput_stream(Default::default())
Expand Down
5 changes: 4 additions & 1 deletion examples/videoinput_stream.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
extern crate servo_media;
extern crate servo_media_auto;

use servo_media::streams::MediaSource;
use servo_media::ServoMedia;
use std::sync::Arc;
use std::{thread, time};

fn run_example(servo_media: Arc<ServoMedia>) {
if let Some(stream) = servo_media.create_videoinput_stream(Default::default()) {
if let Some(stream) =
servo_media.create_videoinput_stream(Default::default(), MediaSource::Device)
{
let mut output = servo_media.create_stream_output();
output.add_stream(&stream);
thread::sleep(time::Duration::from_millis(6000));
Expand Down
10 changes: 7 additions & 3 deletions servo-media/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use player::{Player, PlayerEvent, StreamType};
use streams::capture::MediaTrackConstraintSet;
use streams::device_monitor::MediaDeviceMonitor;
use streams::registry::MediaStreamId;
use streams::{MediaOutput, MediaSocket, MediaStreamType};
use streams::{MediaOutput, MediaSocket, MediaSource, MediaStreamType};
use webrtc::{WebRtcController, WebRtcSignaller};

pub struct ServoMedia(Box<dyn Backend>);
Expand Down Expand Up @@ -48,7 +48,12 @@ pub trait Backend: Send + Sync {
ty: MediaStreamType,
) -> (Box<dyn MediaSocket>, MediaStreamId);
fn create_audioinput_stream(&self, set: MediaTrackConstraintSet) -> Option<MediaStreamId>;
fn create_videoinput_stream(&self, set: MediaTrackConstraintSet) -> Option<MediaStreamId>;
fn create_videoinput_stream(
&self,
set: MediaTrackConstraintSet,
source: MediaSource,
) -> Option<MediaStreamId>;
fn push_stream_data(&self, stream: &MediaStreamId, data: Vec<u8>);
fn create_audio_context(
&self,
id: &ClientContextId,
Expand Down Expand Up @@ -76,7 +81,6 @@ pub trait Backend: Send + Sync {
/// and the media instances created for these contexts.
/// The client context identifier is currently an abstraction of Servo's PipelineId.
fn resume(&self, _id: &ClientContextId) {}

fn get_device_monitor(&self) -> Box<dyn MediaDeviceMonitor>;
}

Expand Down
11 changes: 11 additions & 0 deletions streams/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ pub trait MediaStream: Any + Send {
fn as_mut_any(&mut self) -> &mut dyn Any;
fn set_id(&mut self, id: registry::MediaStreamId);
fn ty(&self) -> MediaStreamType;
fn push_data(&self, data: Vec<u8>);
}

/// A MediaSocket is a way for a backend to represent a
Expand All @@ -22,6 +23,16 @@ pub trait MediaSocket: Any + Send {
fn as_any(&self) -> &dyn Any;
}

/// Determines the source of the media stream.
pub enum MediaSource {
// The media stream source is a capture device.
// i.e. getUserMedia
Device,
// The media stream source is the client application.
// i.e. captureStream
App,
}

/// This isn't part of the webrtc spec; it's a leaky abstaction while media streams
/// are under development and example consumers need to be able to inspect them.
pub trait MediaOutput: Send {
Expand Down