This media server will allow you to receive and send media streams from remote WebRTC peers and manage how you want to route them.
npm i --save medooze-media-server
const MediaServer = require('medooze-media-server');
You can check the full object documentation here.
To discuss issues related to this project or ask for help please join the google comunity group.
You can check a demo application here
We intent to implement support the following features:
- MP4 multitrack recording support for all WebRTC codecs: H264,VP8,VP9, OPUS and PCMU/A.
- VP9 SVC layer selection
- Simulcast
- RTP transport wide congestion control
- Sender side BitRate estimation: algorithm not decided yet candidates are GCC, NADA or SCREAM
- Flex FEC draft 3
- NACK and RTX support
- [RTCP reduced size] (https://tools.ietf.org/html/rfc5506)
- Bundle
- ICE lite
- Frame Marking
- PERC double encryption
- Plain RTP broadcasting/streaming
- MPEG DASH
- Datachannels
const SemanticSDP = require("semantic-sdp");
//Process the sdp
var offer = SemanticSDP.SDPInfo.process(sdp);
//Get the Medooze Media Server interface
const MediaServer = require('medooze-media-server');
//Create UDP server endpoint
const endpoint = MediaServer.createEndpoint(ip);
//Create an DTLS ICE transport in that enpoint
const transport = endpoint.createTransport({
dtls : offer.getDTLS(),
ice : offer.getICE()
});
//Set RTP remote properties
transport.setRemoteProperties({
audio : offer.getMedia("audio"),
video : offer.getMedia("video")
});
//Get local DTLS and ICE info
const dtls = transport.getLocalDTLSInfo();
const ice = transport.getLocalICEInfo();
//Get local candidates
const candidates = endpoint.getLocalCandidates();
//Create local SDP info
let answer = new SDPInfo();
//Add ice and dtls info
answer.setDTLS(dtls);
answer.setICE(ice);
//Add candidates
for (let i=0;i<candidates.length;++i)
//Add candidate to media info
answer.addCandidate(candidates[i]);
//Get remote audio m-line info
let audioOffer = offer.getMedia("audio");
//If we have audio
if (audioOffer)
{
//Create audio media
let audio = new MediaInfo("audio", "audio");
//Get codec type
let opus = audioOffer.getCodec("opus");
//Add opus codec
audio.addCodec(opus);
//Add audio extensions
for (let [id, uri] of audioOffer.getExtensions().entries())
//Add it
audio.addExtension(id, uri);
//Add it to answer
answer.addMedia(audio);
}
//Get remote video m-line info
let videoOffer = offer.getMedia("video");
//If offer had video
if (videoOffer)
{
//Create video media
let video = new MediaInfo("video", "video");
//Get codec types
let vp9 = videoOffer.getCodec("vp9");
let fec = videoOffer.getCodec("flexfec-03");
//Add video codecs
video.addCodec(vp9);
if (fec!=null)
video.addCodec(fec);
//Limit incoming bitrate
video.setBitrate(1024);
//Add video extensions
for (let [id, uri] of videoOffer.getExtensions().entries())
//Add it
video.addExtension(id, uri);
//Add it to answer
answer.addMedia(video);
}
//Set RTP local properties
transport.setLocalProperties({
audio : answer.getMedia("audio"),
video : answer.getMedia("video")
});
//For each stream offered
for (let offered of offer.getStreams().values())
{
//Create the remote stream into the transport
const incomingStream = transport.createIncomingStream(offered);
//Record it
recorder.record(incomingStream);
//Create new local stream
const outgoingStream = transport.createOutgoingStream({
audio: true,
video: true
});
//Get local stream info
const info = outgoingStream.getStreamInfo();
//Copy incoming data from the remote stream to the local one
outgoingStream.attachTo(incomingStream);
//Add local stream info it to the answer
answer.addStream(info);
}
//Get answer SDP
const str = answer.toString();
Sergio Garcia Murillo @ Medooze
MIT