Permalink
Browse files

ref(RTC): store remote tracks in peer TPC

In order to implement P2P <-> JVB connection switching we need to be
able to associate remote tracks with the TraceablePeerConnections.
  • Loading branch information...
paweldomas committed Jan 16, 2017
1 parent 12ad6dd commit 061721a12941748d572567bdfeb7deb3f0dfa922
@@ -320,20 +320,6 @@ JitsiConferenceEventManager.prototype.setupChatRoomListeners = function () {
}
});
chatRoom.addPresenceListener("videomuted", function (values, from) {
conference.rtc.handleRemoteTrackMute(MediaType.VIDEO,
values.value == "true", from);
});
chatRoom.addPresenceListener("audiomuted", function (values, from) {
conference.rtc.handleRemoteTrackMute(MediaType.AUDIO,
values.value == "true", from);
});
chatRoom.addPresenceListener("videoType", function(data, from) {
conference.rtc.handleRemoteTrackVideoTypeChanged(data.value, from);
});
chatRoom.addPresenceListener("devices", function (data, from) {
var isAudioAvailable = false;
var isVideoAvailable = false;
@@ -532,13 +518,10 @@ JitsiConferenceEventManager.prototype.setupStatisticsListeners = function () {
if(!conference.statistics)
return;
conference.statistics.addAudioLevelListener(function (ssrc, level) {
var resource = conference.rtc.getResourceBySSRC(ssrc);
if (!resource)
return;
conference.rtc.setAudioLevel(resource, level);
});
conference.statistics.addAudioLevelListener(
function (ssrc, level) {
conference.rtc.setAudioLevel(ssrc, level);
});
// Forward the "before stats disposed" event
conference.statistics.addBeforeDisposedListener(function () {
conference.eventEmitter.emit(
@@ -541,6 +541,15 @@ JitsiLocalTrack.prototype.getDeviceId = function () {
return this._realDeviceId || this.deviceId;
};
/**
* Returns the participant id which owns the track.
* @returns {string} the id of the participants. It corresponds to the Colibri
* endpoint id/MUC nickname in case of Jitsi-meet.
*/
JitsiLocalTrack.prototype.getParticipantId = function() {
return this.conference ? this.conference.myUserId() : undefined;
};
/**
* Sets the value of bytes sent statistic.
* @param bytesSent {integer} the new value (FIXME: what is an integer in js?)
View
@@ -7,7 +7,6 @@ var JitsiLocalTrack = require("./JitsiLocalTrack.js");
import JitsiTrackError from "../../JitsiTrackError";
import * as JitsiTrackErrors from "../../JitsiTrackErrors";
var DataChannels = require("./DataChannels");
var JitsiRemoteTrack = require("./JitsiRemoteTrack.js");
import * as MediaType from "../../service/RTC/MediaType";
var TraceablePeerConnection = require("./TraceablePeerConnection");
var VideoType = require("../../service/RTC/VideoType");
@@ -53,8 +52,6 @@ export default class RTC extends Listenable {
this.peerConnIdCounter = 1;
this.localTracks = [];
//FIXME: We should support multiple streams per jid.
this.remoteTracks = {};
this.options = options;
// A flag whether we had received that the data channel had opened
// we can get this flag out of sync if for some reason data channel got
@@ -288,71 +285,40 @@ export default class RTC extends Listenable {
return tracks;
}
/**
* Runs a callback on each PeerConnection currently stored in the RTC
* module. If callback return any non-null value during execution
* the execution loop will be aborted and the result will be returned.
* @param {function(TraceablePeerConnection)} callback the function to be
* executed
* @return {*} any first non-null nor undefined value returned by
* the callback.
* @private
*/
_iteratePeerConnections (callback) {
return Object.keys(this.peerConnections).find(function (id) {
const pc = this.peerConnections[id];
return callback.apply(this, [pc]);
}, this);
}
/**
* Obtains all remote tracks currently known to this RTC module instance.
* @param {MediaType} [mediaType] the remote tracks will be filtered
* by their media type if this argument is specified.
* @return {Array<JitsiRemoteTrack>}
*/
getRemoteTracks (mediaType) {
const remoteTracks = [];
Object.keys(this.remoteTracks).forEach(
function(endpoint) {
const endpointTracks = this.remoteTracks[endpoint];
endpointTracks && Object.keys(endpointTracks).forEach(
function (trackMediaType) {
// per media type filtering
if (mediaType && mediaType !== trackMediaType) {
return;
}
const mediaTrack = endpointTracks[trackMediaType];
if (mediaTrack) {
remoteTracks.push(mediaTrack);
}
});
}, this);
let remoteTracks = [];
this._iteratePeerConnections(function (pc) {
const pcRemoteTracks = pc.getRemoteTracks(undefined, mediaType);
if (pcRemoteTracks) {
remoteTracks = remoteTracks.concat(pcRemoteTracks);
}
});
return remoteTracks;
}
/**
* Gets JitsiRemoteTrack for the passed MediaType associated with given MUC
* nickname (resource part of the JID).
* @param type audio or video.
* @param resource the resource part of the MUC JID
* @returns {JitsiRemoteTrack|null}
*/
getRemoteTrackByType (type, resource) {
if (this.remoteTracks[resource])
return this.remoteTracks[resource][type];
else
return null;
}
/**
* Gets JitsiRemoteTrack for AUDIO MediaType associated with given MUC nickname
* (resource part of the JID).
* @param resource the resource part of the MUC JID
* @returns {JitsiRemoteTrack|null}
*/
getRemoteAudioTrack (resource) {
return this.getRemoteTrackByType(MediaType.AUDIO, resource);
}
/**
* Gets JitsiRemoteTrack for VIDEO MediaType associated with given MUC nickname
* (resource part of the JID).
* @param resource the resource part of the MUC JID
* @returns {JitsiRemoteTrack|null}
*/
getRemoteVideoTrack (resource) {
return this.getRemoteTrackByType(MediaType.VIDEO, resource);
}
/**
* Set mute for all local audio streams attached to the conference.
* @param value the mute value
@@ -377,35 +343,6 @@ export default class RTC extends Listenable {
this.localTracks.splice(pos, 1);
}
/**
* Initializes a new JitsiRemoteTrack instance with the data provided by
* the signalling layer and SDP.
*
* @param {string} owner
* @param {MediaStream} stream
* @param {MediaStreamTrack} track
* @param {MediaType} mediaType
* @param {VideoType|undefined} videoType
* @param {string} ssrc
* @param {boolean} muted
*/
_createRemoteTrack (owner,
stream, track, mediaType, videoType, ssrc, muted) {
const remoteTrack
= new JitsiRemoteTrack(
this, this.conference, owner, stream, track,
mediaType, videoType, ssrc, muted);
const remoteTracks
= this.remoteTracks[owner] || (this.remoteTracks[owner] = {});
if (remoteTracks[mediaType]) {
logger.error("Overwriting remote track!", owner, mediaType);
}
remoteTracks[mediaType] = remoteTrack;
this.eventEmitter.emit(RTCEvents.REMOTE_TRACK_ADDED, remoteTrack);
}
/**
* Removes all JitsiRemoteTracks associated with given MUC nickname
* (resource part of the JID). Returns array of removed tracks.
@@ -414,81 +351,20 @@ export default class RTC extends Listenable {
* @returns {JitsiRemoteTrack[]}
*/
removeRemoteTracks (owner) {
const removedTracks = [];
let removedTracks = [];
if (this.remoteTracks[owner]) {
const removedAudioTrack
= this.remoteTracks[owner][MediaType.AUDIO];
const removedVideoTrack
= this.remoteTracks[owner][MediaType.VIDEO];
this._iteratePeerConnections(function (pc) {
const pcRemovedTracks = pc.removeRemoteTracks(owner);
removedTracks = removedTracks.concat(pcRemovedTracks);
});
removedAudioTrack && removedTracks.push(removedAudioTrack);
removedVideoTrack && removedTracks.push(removedVideoTrack);
logger.debug(
"Removed remote tracks for " + owner
+ " count: " + removedTracks.length);
delete this.remoteTracks[owner];
}
return removedTracks;
}
/**
* Finds remote track by it's stream and track ids.
* @param {string} streamId the media stream id as defined by the WebRTC
* @param {string} trackId the media track id as defined by the WebRTC
* @return {JitsiRemoteTrack|undefined}
* @private
*/
_getRemoteTrackById (streamId, trackId) {
let result = undefined;
// .find will break the loop once the first match is found
Object.keys(this.remoteTracks).find(function(endpoint) {
const endpointTracks = this.remoteTracks[endpoint];
return endpointTracks && Object.keys(endpointTracks).find(
function (mediaType) {
const mediaTrack = endpointTracks[mediaType];
if (mediaTrack
&& mediaTrack.getStreamId() == streamId
&& mediaTrack.getTrackId() == trackId) {
result = mediaTrack;
return true;
} else {
return false;
}
});
}, this);
return result;
}
/**
* Removes <tt>JitsiRemoteTrack</tt> identified by given stream and track
* ids.
*
* @param {string} streamId the media stream id as defined by the WebRTC
* @param {string} trackId the media track id as defined by the WebRTC
* @returns {JitsiRemoteTrack|undefined} the track which has been removed or
* <tt>undefined</tt> if no track matching given stream and track ids was
* found.
*/
_removeRemoteTrack (streamId, trackId) {
const toBeRemoved = this._getRemoteTrackById(streamId, trackId);
if (toBeRemoved) {
toBeRemoved.dispose();
delete this.remoteTracks[
toBeRemoved.getParticipantId()][toBeRemoved.getType()];
this.rtc.eventEmitter.emit(
RTCEvents.REMOTE_TRACK_REMOVED, toBeRemoved);
}
return toBeRemoved;
}
static getPCConstraints () {
return RTCUtils.pc_constraints;
}
@@ -627,13 +503,18 @@ export default class RTC extends Listenable {
dispose () { }
setAudioLevel (resource, audioLevel) {
if(!resource)
setAudioLevel (ssrc, audioLevel) {
const track = this._getTrackBySSRC(ssrc);
if(!track) {
return;
}
if (!track.isAudioTrack()) {
logger.warn("Received audio level for non-audio track: " + ssrc);
return;
var audioTrack = this.getRemoteAudioTrack(resource);
if(audioTrack) {
audioTrack.setAudioLevel(audioLevel);
}
track.setAudioLevel(audioLevel);
}
/**
@@ -642,13 +523,19 @@ export default class RTC extends Listenable {
* @param ssrc the ssrc to check.
*/
getResourceBySSRC (ssrc) {
if (this.getLocalTracks().find(
function(localTrack){ return localTrack.getSSRC() == ssrc; })) {
return this.conference.myUserId();
const track = this._getTrackBySSRC(ssrc);
return track ? track.getParticipantId() : undefined;
}
_getTrackBySSRC (ssrc) {
let track
= this.getLocalTracks().find(
function(localTrack){ return localTrack.getSSRC() == ssrc; });
if (!track) {
track = this._getRemoteTrackBySSRC(ssrc);
}
const track = this.getRemoteTrackBySSRC(ssrc);
return track ? track.getParticipantId() : null;
return track;
}
/**
@@ -657,38 +544,14 @@ export default class RTC extends Listenable {
* @param ssrc the ssrc to check.
* @return {JitsiRemoteTrack|undefined} return the first remote tracks that
* matches given SSRC or <tt>undefined</tt> if no such track was found.
* @private
*/
getRemoteTrackBySSRC (ssrc) {
_getRemoteTrackBySSRC (ssrc) {
return this.getRemoteTracks().find(function (remoteTrack) {
return ssrc == remoteTrack.getSSRC();
});
}
/**
* Handles remote track mute / unmute events.
* @param type {string} "audio" or "video"
* @param isMuted {boolean} the new mute state
* @param from {string} user id
*/
handleRemoteTrackMute (type, isMuted, from) {
var track = this.getRemoteTrackByType(type, from);
if (track) {
track.setMute(isMuted);
}
}
/**
* Handles remote track video type events
* @param value {string} the new video type
* @param from {string} user id
*/
handleRemoteTrackVideoTypeChanged (value, from) {
var videoTrack = this.getRemoteVideoTrack(from);
if (videoTrack) {
videoTrack._setVideoType(value);
}
}
/**
* Sends message via the datachannels.
* @param to {string} the id of the endpoint that should receive the
Oops, something went wrong.

0 comments on commit 061721a

Please sign in to comment.