From a4ff2711f2c36989854299af54ba4c6a1efb3e50 Mon Sep 17 00:00:00 2001 From: Timothy Drews Date: Tue, 8 Dec 2015 15:21:24 -0800 Subject: [PATCH] Add StreamingEngine and Playhead. Closes #101 Closes #186 Change-Id: I8d1a8d6c0f8cfb5abdd81a149318377282b2bea0 --- lib/media/media_source_engine.js | 97 ++- lib/media/playhead.js | 226 ++++++ lib/media/presentation_timeline.js | 61 +- lib/media/streaming_engine.js | 995 ++++++++++++++++++++++++ lib/media/time_ranges_utils.js | 77 ++ lib/util/error.js | 9 +- spec/media_source_engine_spec.js | 71 +- spec/playhead_spec.js | 292 +++++++ spec/presentation_timeline_spec.js | 73 +- spec/streaming_engine_spec.js | 1139 ++++++++++++++++++++++++++++ 10 files changed, 2955 insertions(+), 85 deletions(-) create mode 100644 lib/media/playhead.js create mode 100644 lib/media/streaming_engine.js create mode 100644 lib/media/time_ranges_utils.js create mode 100644 spec/playhead_spec.js create mode 100644 spec/streaming_engine_spec.js diff --git a/lib/media/media_source_engine.js b/lib/media/media_source_engine.js index 31d848fed7..e328e20388 100644 --- a/lib/media/media_source_engine.js +++ b/lib/media/media_source_engine.js @@ -19,6 +19,7 @@ goog.provide('shaka.media.MediaSourceEngine'); goog.require('shaka.asserts'); goog.require('shaka.media.TextSourceBuffer'); +goog.require('shaka.media.TimeRangesUtils'); goog.require('shaka.util.EventManager'); goog.require('shaka.util.IDestroyable'); goog.require('shaka.util.PublicPromise'); @@ -225,26 +226,41 @@ shaka.media.MediaSourceEngine.prototype.init = function(typeConfig) { /** - * Computes how far ahead of the given timestamp we have buffered. + * Gets the first timestamp in buffer for the given content type. + * + * @param {string} contentType + * @return {?number} The timestamp in seconds, or null if nothing is buffered. + */ +shaka.media.MediaSourceEngine.prototype.bufferStart = function(contentType) { + return shaka.media.TimeRangesUtils.bufferStart( + this.sourceBuffers_[contentType].buffered); +}; + + +/** + * Gets the last timestamp in buffer for the given content type. + * + * @param {string} contentType + * @return {?number} The timestamp in seconds, or null if nothing is buffered. + */ +shaka.media.MediaSourceEngine.prototype.bufferEnd = function(contentType) { + return shaka.media.TimeRangesUtils.bufferEnd( + this.sourceBuffers_[contentType].buffered); +}; + + +/** + * Computes how far ahead of the given timestamp is buffered for the given + * content type. * * @param {string} contentType * @param {number} time - * @return {number} in seconds + * @return {number} The amount of time buffered ahead in seconds. */ shaka.media.MediaSourceEngine.prototype.bufferedAheadOf = function(contentType, time) { - // NOTE: On IE11, buffered ranges may show appended data before the associated - // append operation is complete. - var b = this.sourceBuffers_[contentType].buffered; - var fudge = 0.000001; // 1us - // NOTE: The 1us fudge is needed on Safari, where removal up to X may leave a - // range which starts at X + 1us. - for (var i = 0; i < b.length; ++i) { - if (time + fudge >= b.start(i) && time < b.end(i)) { - return b.end(i) - time; - } - } - return 0; + return shaka.media.TimeRangesUtils.bufferedAheadOf( + this.sourceBuffers_[contentType].buffered, time); }; @@ -283,6 +299,40 @@ shaka.media.MediaSourceEngine.prototype.remove = }; +/** + * Enqueue an operation to clear the SourceBuffer. + * + * @param {string} contentType + * @return {!Promise} + */ +shaka.media.MediaSourceEngine.prototype.clear = function(contentType) { + // Note that not all platforms allow clearing to Number.POSITIVE_INFINITY. + return this.enqueueOperation_( + contentType, + this.remove_.bind(this, contentType, 0, this.mediaSource_.duration)); +}; + + +/** + * Sets the timestamp offset for the given content type. + * + * @param {string} contentType + * @param {number} timestampOffset The timestamp offset. Segments which start + * at time t will be inserted at time t + timestampOffset instead. This + * value does not affect segments which have already been inserted. + * @return {!Promise} + */ +shaka.media.MediaSourceEngine.prototype.setTimestampOffset = function( + contentType, timestampOffset) { + if (this.sourceBuffers_[contentType].timestampOffset == timestampOffset) + return Promise.resolve(); + + return this.enqueueOperation_( + contentType, + this.setTimestampOffset_.bind(this, contentType, timestampOffset)); +}; + + /** * @param {string=} opt_reason Valid reasons are 'network' and 'decode'. * @return {!Promise} @@ -349,6 +399,21 @@ shaka.media.MediaSourceEngine.prototype.remove_ = }; +/** + * Set the SourceBuffer's timestamp offset. + * @param {string} contentType + * @param {number} timestampOffset + * @private + */ +shaka.media.MediaSourceEngine.prototype.setTimestampOffset_ = + function(contentType, timestampOffset) { + this.sourceBuffers_[contentType].timestampOffset = timestampOffset; + + // Fake 'updateend' event to resolve the operation. + this.onUpdateEnd_(contentType); +}; + + /** * @param {string} contentType * @param {!Event} event @@ -371,11 +436,9 @@ shaka.media.MediaSourceEngine.prototype.onError_ = /** * @param {string} contentType - * @param {!Event} event * @private */ -shaka.media.MediaSourceEngine.prototype.onUpdateEnd_ = - function(contentType, event) { +shaka.media.MediaSourceEngine.prototype.onUpdateEnd_ = function(contentType) { var operation = this.queues_[contentType][0]; shaka.asserts.assert(operation, 'Spurious updateend event!'); shaka.asserts.assert(!this.sourceBuffers_[contentType].updating, diff --git a/lib/media/playhead.js b/lib/media/playhead.js new file mode 100644 index 0000000000..affa64b8f9 --- /dev/null +++ b/lib/media/playhead.js @@ -0,0 +1,226 @@ +/** + * @license + * Copyright 2015 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +goog.provide('shaka.media.Playhead'); + +goog.require('shaka.asserts'); +goog.require('shaka.media.PresentationTimeline'); +goog.require('shaka.media.TimeRangesUtils'); +goog.require('shaka.util.EventManager'); +goog.require('shaka.util.IDestroyable'); + + + +/** + * Creates a Playhead, which manages the video's current time. + * + * The Playhead provides mechanisms for setting the presentation's start time, + * restricting seeking to valid time ranges, and stopping playback for startup + * and re- buffering. + * + * @param {!HTMLVideoElement} video + * @param {!shaka.media.PresentationTimeline} timeline + * @param {number} minBufferTime + * @param {number} startTime The time, in seconds, to start the presentation. + * This time should be within the presentation timeline. + * @param {function(boolean)} onBuffering Called and passed true when stopped + * for buffering; called and passed false when proceeding after buffering. + * @param {function()} onSeek Called when the user agent seeks to a time within + * the presentation timeline. + * + * @constructor + * @struct + * @implements {shaka.util.IDestroyable} + */ +shaka.media.Playhead = function( + video, timeline, minBufferTime, startTime, onBuffering, onSeek) { + /** @private {HTMLVideoElement} */ + this.video_ = video; + + /** @private {shaka.media.PresentationTimeline} */ + this.timeline_ = timeline; + + /** @private {number} */ + this.minBufferTime_ = minBufferTime; + + /** + * The presentation time, in seconds, at which to begin playback. + * @private {number} + */ + this.startTime_ = startTime; + + /** @private {?function(boolean)} */ + this.onBuffering_ = onBuffering; + + /** @private {?function()} */ + this.onSeek_ = onSeek; + + /** @private {shaka.util.EventManager} */ + this.eventManager_ = new shaka.util.EventManager(); + + /** @private {boolean} */ + this.buffering_ = false; + + /** @private {number} */ + this.lastPlaybackRate_ = 0; + + // Check if the video has already loaded some metadata. + if (video.readyState > 0) { + this.onLoadedMetadata_(); + } else { + this.eventManager_.listen( + video, 'loadedmetadata', this.onLoadedMetadata_.bind(this)); + } +}; + + +/** @override */ +shaka.media.Playhead.prototype.destroy = function() { + var p = this.eventManager_.destroy(); + this.eventManager_ = null; + + this.video_ = null; + this.timeline_ = null; + this.onBuffering_ = null; + this.onSeek_ = null; + + return p; +}; + + +/** + * Gets the playhead's current (logical) position. + * + * @return {number} + */ +shaka.media.Playhead.prototype.getTime = function() { + var time = this.video_.readyState > 0 ? + this.video_.currentTime : + this.startTime_; + // Although we restrict the video's currentTime elsewhere, clamp it here to + // ensure any timing issues (e.g., the user agent seeks and calls this + // function before we receive the 'seeking' event) don't cause us to return a + // time outside the segment availability window. + return this.clampTime_(time); +}; + + +/** + * Stops the playhead for buffering, or resumes the playhead after buffering. + * + * @param {boolean} buffering True to stop the playhead; false to allow it to + * continue. + */ +shaka.media.Playhead.prototype.setBuffering = function(buffering) { + if (buffering && !this.buffering_) { + this.lastPlaybackRate_ = this.video_.playbackRate; + this.video_.playbackRate = 0; + this.buffering_ = true; + this.onBuffering_(true); + } else if (!buffering && this.buffering_) { + if (this.video_.playbackRate == 0) { + // The app hasn't set a new playback rate, so restore the old one. + this.video_.playbackRate = this.lastPlaybackRate_; + } else { + // There's nothing we could have done to stop the app from setting a new + // rate, so we don't need to do anything here. + } + this.buffering_ = false; + this.onBuffering_(false); + } +}; + + +/** + * Handles a 'loadedmetadata' event. + * + * @private + */ +shaka.media.Playhead.prototype.onLoadedMetadata_ = function() { + var video = /** @type {!HTMLVideoElement} */(this.video_); + this.eventManager_.unlisten(video, 'loadedmetadata'); + this.eventManager_.listen(video, 'seeking', this.onSeeking_.bind(this)); + + // Trigger call to onSeeking_(). + this.video_.currentTime = this.clampTime_(this.startTime_); +}; + + +/** + * Handles a 'seeking' event. + * + * @private + */ +shaka.media.Playhead.prototype.onSeeking_ = function() { + shaka.asserts.assert(this.video_.readyState > 0, + 'readyState should be greater than 0'); + + var currentTime = this.video_.currentTime; + var targetTime = currentTime; + + var d = this.timeline_.getSegmentAvailabilityDuration(); + var live = (d != null) && (d < Number.POSITIVE_INFINITY); + + var start = this.timeline_.getSegmentAvailabilityStart(); + var end = this.timeline_.getSegmentAvailabilityEnd(); + + if (!live && (currentTime < start)) { + targetTime = start; + } else if (live && (currentTime < start + this.minBufferTime_)) { + targetTime = Math.max(currentTime, start); + var bufferedAhead = shaka.media.TimeRangesUtils.bufferedAheadOf( + this.video_.buffered, targetTime); + if (bufferedAhead == 0) { + // The playhead is in an unbuffered region, so buffering will be + // required at the seek target; since the segment availability window + // is moving, we cannot seek to the seek target exactly; otherwise, we + // would fall outside the segment availability window again... so seek + // a bit ahead of the seek target. + targetTime = this.clampTime_(start + this.minBufferTime_); + } + } else if (currentTime > end) { + targetTime = end; + } + + if (targetTime != currentTime) { + shaka.log.debug('Cannot seek outside segment availability window.'); + // Triggers another call to onSeeking_(). + this.video_.currentTime = targetTime; + return; + } + + this.onSeek_(); +}; + + +/** + * Clamps the given time to the segment availability window. + * + * @param {number} time The time in seconds. + * @return {number} The clamped time in seconds. + * @private + */ +shaka.media.Playhead.prototype.clampTime_ = function(time) { + var start = this.timeline_.getSegmentAvailabilityStart(); + if (time < start) return start; + + var end = this.timeline_.getSegmentAvailabilityEnd(); + if (time > end) return end; + + return time; +}; + diff --git a/lib/media/presentation_timeline.js b/lib/media/presentation_timeline.js index e69eadcafe..a94821fbb6 100644 --- a/lib/media/presentation_timeline.js +++ b/lib/media/presentation_timeline.js @@ -60,12 +60,44 @@ shaka.media.PresentationTimeline = function( }; +/** + * @return {number} The presentation's duration in seconds. + * POSITIVE_INFINITY indicates that the presentation continues indefinitely. + */ +shaka.media.PresentationTimeline.prototype.getDuration = function() { + return this.duration_; +}; + + +/** + * Sets the presentation's duration. The duration may be updated at any time. + * + * @param {number} duration The presentation's duration in seconds. + * POSITIVE_INFINITY indicates that the presentation continues indefinitely. + */ +shaka.media.PresentationTimeline.prototype.setDuration = function(duration) { + shaka.asserts.assert(duration > 0, 'Timeline duration must be > 0'); + this.duration_ = duration; +}; + + +/** + * @return {?number} The presentation's segment availability duration. + * Always returns null for video-on-demand, and never returns null for live. + */ +shaka.media.PresentationTimeline.prototype.getSegmentAvailabilityDuration = + function() { + return this.segmentAvailabilityDuration_; +}; + + /** * Gets the presentation's current segment availability start time. Segments * ending at or before this time should be assumed to be unavailable. * * @return {number} The current segment availability start time, in seconds, - * relative to the start of the presentation. + * relative to the start of the presentation. Always returns 0 for + * video-on-demand. */ shaka.media.PresentationTimeline.prototype.getSegmentAvailabilityStart = function() { @@ -84,7 +116,8 @@ shaka.media.PresentationTimeline.prototype.getSegmentAvailabilityStart = * starting after this time should be assumed to be unavailable. * * @return {number} The current segment availability end time, in seconds, - * relative to the start of the presentation. + * relative to the start of the presentation. Always returns the + * presentation's duration for video-on-demand. */ shaka.media.PresentationTimeline.prototype.getSegmentAvailabilityEnd = function() { @@ -95,30 +128,6 @@ shaka.media.PresentationTimeline.prototype.getSegmentAvailabilityEnd = }; -/** - * @return {boolean} true if the presentation has ended; otherwise, return - * false. - */ -shaka.media.PresentationTimeline.prototype.hasEnded = function() { - if (this.presentationStartTime_ == null) - return false; - - return this.getLiveEdge_() >= this.duration_; -}; - - -/** - * Sets the presentation's duration. The duration may be updated at any time. - * - * @param {number} duration The presentation's duration in seconds. - * POSITIVE_INFINITY indicates that the presentation continues indefinitely. - */ -shaka.media.PresentationTimeline.prototype.setDuration = function(duration) { - shaka.asserts.assert(duration > 0, 'Timeline duration must be > 0'); - this.duration_ = duration; -}; - - /** * @return {number} The current presentation time in seconds. * @private diff --git a/lib/media/streaming_engine.js b/lib/media/streaming_engine.js new file mode 100644 index 0000000000..8cfa448602 --- /dev/null +++ b/lib/media/streaming_engine.js @@ -0,0 +1,995 @@ +/** + * @license + * Copyright 2015 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +goog.provide('shaka.media.StreamingEngine'); + +goog.require('shaka.asserts'); +goog.require('shaka.media.MediaSourceEngine'); +goog.require('shaka.net.NetworkingEngine'); +goog.require('shaka.util.Error'); +goog.require('shaka.util.IDestroyable'); + + + +/** + * Creates a StreamingEngine. + * + * The StreamingEngine is responsible for creating SegmentIndexes (in the + * Manifest), and for downloading segments and passing them to the + * MediaSourceEngine. It manages audio, video, and text streams simultaneously + * and provides an interface to switch streams at the Stream level, i.e., it + * does not handle switching to alternate StreamSets or Periods directly. + * + * The StreamingEngine notifies its owner when Streams within a Period can be + * switched to and when another Period must be buffered, so its owner may + * switch to new Streams within that Period. + * + * The SegmentIndexes behind the Manifest may change at any time and the + * StreamingEngine does not care about these changes; however, is must be + * notified of new Periods, so that it can create the associated + * SegmentIndexes. + * + * Before anything else, the owner must call init() with an initial set of + * Streams, in particular, one Stream for each content type (these Streams + * should be from the same Period, but the StreamingEngine doesn't actually + * care). The owner must then call newPeriod() each time a new Period is added + * to the Manifest and seeked() each time the playhead moves to a new location + * within the presentation timeline (the owner may forego calling seeked() when + * the playhead moves to an invalid location). + * + * When the StreamingEngine calls onCanSwitch(p), the owner may call switch() + * with any Stream within Period p; when the StreamingEngine calls + * onBufferNewPeriod(p), the owner should call switch() with a Stream from + * Period p for each content type. Note: the StreamingEngine may call + * onBufferNewPeriod(p) before onCanSwitch(p), if this occurs, the owner must + * still wait to call switch() until onCanSwitch(p) is called. + * + * @param {shaka.media.StreamingEngine.Config} config The initial + * configuration. + * @param {!shaka.media.Playhead} playhead The Playhead. The caller retains + * ownership. + * @param {!shaka.media.MediaSourceEngine} mediaSourceEngine The + * MediaSourceEngine. The caller retains ownership. + * @param {!shaka.net.NetworkingEngine} netEngine + * @param {shakaExtern.Manifest} manifest + * @param {function(!shakaExtern.Period)} onCanSwitch Called when Streams + * within the given Period can be switched to. + * @param {function(!shakaExtern.Period)} onBufferNewPeriod Called when + * the given Period should begin buffering (for all content types). + * @param {function(!shaka.util.Error)} onError Called when an error occurs. + * @param {function()=} opt_onInitialStreamsSetup Optional callback which + * is called when the initial set of Streams have been setup. Intended + * to be used by tests. + * @param {function()=} opt_onStartupComplete Optional callback which + * is called when startup has completed. Intended to be used by tests. + * + * @constructor + * @struct + * @implements {shaka.util.IDestroyable} + */ +shaka.media.StreamingEngine = function( + config, playhead, mediaSourceEngine, netEngine, manifest, + onCanSwitch, onBufferNewPeriod, onError, + opt_onInitialStreamsSetup, opt_onStartupComplete) { + /** @private {shaka.media.Playhead} */ + this.playhead_ = playhead; + + /** @private {shaka.media.MediaSourceEngine} */ + this.mediaSourceEngine_ = mediaSourceEngine; + + /** @private {shaka.net.NetworkingEngine} */ + this.netEngine_ = netEngine; + + /** @private {?shakaExtern.Manifest} */ + this.manifest_ = manifest; + + /** @private {?function(!shakaExtern.Period)} */ + this.onCanSwitch_ = onCanSwitch; + + /** @private {?function(!shakaExtern.Period)} */ + this.onBufferNewPeriod_ = onBufferNewPeriod; + + /** @private {?function(!shaka.util.Error)} */ + this.onError_ = onError; + + /** @private {?function()} */ + this.onInitialStreamsSetup_ = opt_onInitialStreamsSetup || null; + + /** @private {?function()} */ + this.onStartupComplete_ = opt_onStartupComplete || null; + + /** @private {shaka.media.StreamingEngine.Config} */ + this.config_ = config; + + /** + * Maps a Stream's ID to a boolean value which indicates if the Stream is + * ready to be used (i.e., if its SegmentIndex has been created). + * + * @private {Object.} + */ + this.isStreamReady_ = {}; + + /** + * Maps a content type, e.g., 'audio', 'video', or 'text', to a MediaState. + * + * @private {Object.} + */ + this.mediaStates_ = {}; + + /** + * Set to true once one segment from each of the initial set of Streams + * [i.e., those passed to init()] has been buffered. + * + * @private {boolean} + */ + this.startupComplete_ = false; + + /** @private {boolean} */ + this.destroyed_ = false; +}; + + +/** + * @typedef {{ + * rebufferingGoal: number, + * bufferingGoal: number, + * retryParameters: shakaExtern.RetryParameters + * }} + * + * @description + * The StreamingEngine's configuration options. + * + * @property {number} rebufferingGoal + * The minimum number of seconds of content that must be buffered before + * playback can begin at startup or can continue after entering a + * rebuffering state. + * @property {number} bufferingGoal + * The number of seconds of content that the StreamingEngine will attempt to + * keep in buffer at all times (for each content type). This value must be + * greater than or equal to the rebuffering goal. + * @property {shakaExtern.RetryParameters} retryParameters + * The retry parameters for segment requests. + */ +shaka.media.StreamingEngine.Config; + + +/** + * @typedef {{ + * type: string, + * stream: shakaExtern.Stream, + * segmentPosition: ?number, + * drift: ?number, + * needInitSegment: boolean, + * needRebuffering: boolean, + * needPeriod: shakaExtern.Period, + * done: boolean, + * performingUpdate: boolean, + * updateTimer: ?number, + * waitingToClearBuffer: boolean, + * clearingBuffer: boolean + * }} + * + * @description + * Contains the state of a logical stream, i.e., a sequence of segmented data + * for a particular content type. At any given time there is a Stream object + * associated with the state of the logical stream. + * + * @property {string} type + * The stream's content type, e.g., 'audio', 'video', or 'text'. + * @property {shakaExtern.Stream} stream + * The current Stream. + * @property {?number} segmentPosition + * Indicates the current segment position, this value increases as segments + * are appended, but it may reset upon seeking. + * @property {?number} drift + * The number of seconds that the segments' timestamps are offset from the + * SegmentReferences' timestamps. For example, a positive value indicates + * that the segments are ahead of the SegmentReferences. + * @property {boolean} needInitSegment + * True indicates that |stream|'s init segment must be inserted before the + * next media segment is appended. + * @property {boolean} needRebuffering + * True indicates that startup or re- buffering is required. + * @property {shakaExtern.Period} needPeriod + * Indicates which Period should be buffered. + * @property {boolean} done + * True indicates that the end of the buffer has hit the end of the + * presentation. + * @property {boolean} performingUpdate + * True indicates that an update is in progress. + * @property {?number} updateTimer + * A non-null value indicates that an update is scheduled. + * @property {boolean} waitingToClearBuffer + * True indicates that the buffer must be cleared after the current update + * finishes. + * @property {boolean} clearingBuffer + * True indicates that the buffer is being cleared. + */ +shaka.media.StreamingEngine.MediaState_; + + +/** @override */ +shaka.media.StreamingEngine.prototype.destroy = function() { + for (var type in this.mediaStates_) { + this.cancelUpdate_(this.mediaStates_[type]); + } + + this.playhead_ = null; + this.mediaSourceEngine_ = null; + this.netEngine_ = null; + this.manifest_ = null; + this.onCanSwitch_ = null; + this.onBufferNewPeriod_ = null; + this.isStreamReady_ = null; + this.mediaStates_ = null; + + this.destroyed_ = true; + + return Promise.resolve(); +}; + + +/** + * Initializes the StreamingEngine with an initial set of Streams. + * + * The StreamingEngine will setup the given Streams and then begin processing + * them right away. Once the StreamingEngine has inserted at least one segment + * from each Stream, it will begin setting up all other known Streams from all + * Periods. onCanSwitch_() is called whenever the Streams from a particular + * Period have all been setup. + * + * @param {!Object.} streamsByType A map from + * content type to Stream. + */ +shaka.media.StreamingEngine.prototype.init = function(streamsByType) { + // Determine which Period we must buffer. + var playheadTime = this.playhead_.getTime(); + var needPeriod = this.findPeriodContainingTime_(playheadTime); + shaka.asserts.assert(needPeriod, + 'unable to find Period for time ' + playheadTime); + if (!needPeriod) return; + + /** @type {!Object.} */ + var typeConfig = {}; + + for (var type in streamsByType) { + var stream = streamsByType[type]; + + typeConfig[type] = + stream.mimeType + + (stream.codecs ? '; codecs="' + stream.codecs + '"' : ''); + this.mediaStates_[type] = { + stream: stream, + type: type, + segmentPosition: null, + drift: null, + needInitSegment: true, + needRebuffering: false, + needPeriod: needPeriod, + done: false, + performingUpdate: false, + updateTimer: null, + waitingToClearBuffer: false, + clearingBuffer: false + }; + } + + this.mediaSourceEngine_.init(typeConfig); + this.setDuration_(); + + // Setup the initial set of Streams and then start updating them. After + // startup completes onUpdate_() will call newPeriod() for each known Period, + // which will set up all Streams known at that time. + // TODO: Use MapUtils. + var streams = Object.keys(/** @type {!Object} */(streamsByType)) + .map(function(type) { return streamsByType[type]; }); + this.setupStreams_(streams).then(function() { + shaka.log.debug('Setup initial Streams!'); + + for (var type in this.mediaStates_) { + this.scheduleUpdate_(this.mediaStates_[type], 0); + } + + if (this.onInitialStreamsSetup_) + this.onInitialStreamsSetup_(); + }.bind(this)).catch(function(error) { + this.onError_(error); + }.bind(this)); +}; + + +/** + * Configures the StreamingEngine. + * + * @param {!shaka.media.StreamingEngine.Config} config + */ +shaka.media.StreamingEngine.prototype.configure = function(config) { + this.config_ = config; +}; + + +/** + * Notifies the StreamingEngine that a new Period is available. This only has + * to be called if |period| was created after init() was called. + * + * @param {!shakaExtern.Period} period + */ +shaka.media.StreamingEngine.prototype.newPeriod = function(period) { + if (!this.startupComplete_) { + // If startup hasn't completed then we will setup the other Streams in + // the Manifest after it has, so we shouldn't setup |period| here. + shaka.log.debug('Deferring new Period setup until startup completes.'); + return; + } + + // Reset the duration to account for the new Period. + this.setDuration_(); + + var streams = period.streamSets + .map(function(ss) { return ss.streams; }) + .reduce(function(all, part) { return all.concat(part); }, []); + + this.setupStreams_(streams).then(function() { + if (this.destroyed_) return; + shaka.log.v1('Calling onCanSwitch_()...'); + this.onCanSwitch_(period); + }.bind(this)).catch(function(error) { + this.onError_(error); + }.bind(this)); +}; + + +/** + * Switches to the given Stream. |stream| may be from any StreamSet or any + * Period. + * + * @param {string} contentType |stream|'s content type. + * @param {shakaExtern.Stream} stream + */ +shaka.media.StreamingEngine.prototype.switch = function(contentType, stream) { + shaka.asserts.assert(this.isStreamReady_[stream.id], + 'Stream ' + stream.id + ' should be ready'); + if (!this.isStreamReady_[stream.id]) return; + + var mediaState = this.mediaStates_[contentType]; + shaka.asserts.assert(mediaState, 'mediaState should exist'); + if (!mediaState) return; + + var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); + + if (mediaState.stream == stream) { + shaka.log.debug(logPrefix, 'already switched to Stream ' + stream.id); + return; + } + + shaka.log.debug(logPrefix, 'switching to Stream ' + stream.id); + + // If we switch to a Stream from a different Period then we must reset the + // segment position. + var currentPeriod = this.findPeriodContainingStream_(mediaState.stream); + var nextPeriod = this.findPeriodContainingStream_(stream); + if (nextPeriod != currentPeriod) { + shaka.log.debug(logPrefix, 'switching Periods'); + mediaState.segmentPosition = null; + } + + mediaState.stream = stream; + mediaState.needInitSegment = true; + + if (mediaState.updateTimer == null) { + // Note: the update cycle stops when we've buffered to the end of the + // presentation or Period. + shaka.log.debug(logPrefix, 'restarting update cycle!'); + this.scheduleUpdate_(mediaState, 0); + } +}; + + +/** + * Notifies the StreamingEngine that the playhead has moved to a valid time + * within the presentation timeline. + */ +shaka.media.StreamingEngine.prototype.seeked = function() { + for (var type in this.mediaStates_) { + var mediaState = this.mediaStates_[type]; + var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); + + if (!this.isStreamReady_[mediaState.stream.id]) { + // The Stream hasn't even been setup yet. + continue; + } + + if (mediaState.clearingBuffer) { + // We're already clearing the buffer, so we don't need to clear the + // buffer again. + shaka.log.v1(logPrefix, 'already clearing the buffer'); + continue; + } + + var playheadTime = this.playhead_.getTime(); + var bufferedAhead = this.mediaSourceEngine_.bufferedAheadOf( + type, playheadTime); + if (bufferedAhead > 0) { + // The playhead has moved into a buffered region, so we don't need to + // clear the buffer. + shaka.log.v1(logPrefix, + 'buffered seek:', + 'playheadTime=' + playheadTime, + 'bufferedAhead=' + bufferedAhead); + mediaState.waitingToClearBuffer = false; + continue; + } + + // The playhead has moved into an unbuffered region, so we might have to + // clear the buffer. + + if (mediaState.waitingToClearBuffer) { + // The only reason we should be waiting to clear the buffer is if we're + // performing an update. + shaka.log.v1(logPrefix, 'unbuffered seek, already waiting'); + shaka.asserts.assert(mediaState.performingUpdate, + 'expected performingUpdate to be true'); + continue; + } + + if (this.mediaSourceEngine_.bufferStart(type) == null) { + // Nothing buffered. + shaka.log.v1(logPrefix, 'unbuffered seek, nothing buffered'); + if (mediaState.updateTimer == null) { + // Note: the update cycle stops when we've buffered to the end of the + // presentation or Period. + this.scheduleUpdate_(mediaState, 0); + } + continue; + } + + if (mediaState.performingUpdate) { + // We are performing an update, so we have to wait until it's finished. + // onUpdate_() will call handleUnbufferedSeek_() when the update has + // finished. + shaka.log.v1(logPrefix, 'unbuffered seek, currently updating'); + mediaState.waitingToClearBuffer = true; + continue; + } + + // An update may be scheduled, bu we can just cancel it and clear the + // buffer right away. + shaka.log.v1(logPrefix, 'unbuffered seek, handling right now'); + this.cancelUpdate_(mediaState); + this.handleUnbufferedSeek_(mediaState); + } +}; + + +/** + * Sets up the given Streams. + * + * @param {!Array.} streams + * + * @return {!Promise} + * @private + */ +shaka.media.StreamingEngine.prototype.setupStreams_ = function(streams) { + var async = streams.map(function(stream) { + return this.isStreamReady_[stream.id] ? null : stream.createSegmentIndex(); + }.bind(this)); + + return Promise.all(async).then(function() { + if (this.destroyed_) return; + for (var i = 0; i < async.length; ++i) { + if (async[i] == null) continue; + var stream = streams[i]; + shaka.asserts.assert( + !this.isStreamReady_[stream.id], + 'Stream ' + stream.id + ' should not be ready yet.'); + shaka.log.v1('Setup Stream ' + stream.id); + this.isStreamReady_[stream.id] = true; + } + }.bind(this)); +}; + + +/** + * Sets the MediaSource's duration. + * @private + */ +shaka.media.StreamingEngine.prototype.setDuration_ = function() { + var duration = this.manifest_.presentationTimeline.getDuration(); + if (duration < Number.POSITIVE_INFINITY) { + this.mediaSourceEngine_.setDuration(duration); + } else { + // TODO: Handle infinite durations (e.g., typical live case). + } +}; + + +/** + * Called when |mediaState|'s update timer has gone off or when |mediaState| + * has completed an update. + * + * @param {!shaka.media.StreamingEngine.MediaState_} mediaState + * @private + */ +shaka.media.StreamingEngine.prototype.onUpdate_ = function(mediaState) { + if (this.destroyed_) return; + + var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); + + // Sanity check. + shaka.asserts.assert( + mediaState.performingUpdate || (mediaState.updateTimer != null), + logPrefix + ' unexpected call to onUpdate_()'); + shaka.asserts.assert( + !mediaState.clearingBuffer, + logPrefix + ' onUpdate_() should not be called when clearing the buffer'); + + mediaState.performingUpdate = false; + mediaState.updateTimer = null; + + // Handle unbuffered seeks. + if (mediaState.waitingToClearBuffer) { + // Note: handleUnbufferedSeek_() will schedule the next update. + shaka.log.debug(logPrefix, 'skipping update and handling seek instead'); + this.handleUnbufferedSeek_(mediaState); + return; + } + + this.update_(mediaState); + + // Check if we need to buffer from a different Period. + // TODO: Use MapUtils. + var mediaStates = Object.keys(/** @type {!Object} */(this.mediaStates_)) + .map(function(type) { return this.mediaStates_[type]; }.bind(this)); + var needSamePeriod = mediaStates.every(function(ms) { + return ms.needPeriod == mediaState.needPeriod; + }); + + if (needSamePeriod) { + var currentPeriod = this.findPeriodContainingStream_(mediaState.stream); + if (currentPeriod != mediaState.needPeriod) { + // We may call onBufferNewPeriod_() before we call onCanSwitch_(); the + // caller must handle that. + shaka.log.v1('Calling onBufferNewPeriod_()...'); + this.onBufferNewPeriod_(mediaState.needPeriod); + } + } + + // Check if we've buffered to the end of the presentation. + if (mediaStates.every(function(ms) { return ms.done; })) { + shaka.log.v1('Calling endOfStream()...'); + this.mediaSourceEngine_.endOfStream(); + } + + // Handle startup and re- buffering. + this.playhead_.setBuffering( + mediaStates.some(function(ms) { return ms.needRebuffering; })); +}; + + +/** + * Updates the given MediaState. + * + * @param {!shaka.media.StreamingEngine.MediaState_} mediaState + * @private + */ +shaka.media.StreamingEngine.prototype.update_ = function(mediaState) { + var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); + var stream = mediaState.stream; + + // Compute how far we've buffered ahead of the playhead. + var playheadTime = this.playhead_.getTime(); + var bufferedAhead = this.mediaSourceEngine_.bufferedAheadOf( + mediaState.type, playheadTime); + + // If we've buffered to the buffering goal then schedule an update. + var bufferingGoal = Math.max(this.config_.rebufferingGoal, + this.config_.bufferingGoal); + if (bufferedAhead >= bufferingGoal) { + shaka.log.v2(logPrefix, + 'buffering goal met:', + 'playheadTime=' + playheadTime, + 'bufferedAhead=' + bufferedAhead); + mediaState.needRebuffering = false; + // Schedule the next update such that if playback continues we won't be + // at the buffering goal the next time around. + this.scheduleUpdate_(mediaState, bufferedAhead - bufferingGoal + 0.1); + return; + } + + // Get the next timestamp we need. + var bufferEnd = this.mediaSourceEngine_.bufferEnd(mediaState.type); + var timeNeeded = bufferEnd != null ? bufferEnd : playheadTime; + + var timeline = this.manifest_.presentationTimeline; + + // Check if we've buffered to the end of the presentation. + if (timeNeeded >= timeline.getDuration()) { + // We shouldn't rebuffer if we're close to the end. + shaka.log.v2(logPrefix, 'buffered to end of presentation'); + mediaState.needRebuffering = false; + mediaState.done = true; + return; + } + mediaState.done = false; + + // Handle startup and re- buffering state. + var rebufferingGoal = Math.max(this.manifest_.minBufferTime || 0, + this.config_.rebufferingGoal); + if ((!this.startupComplete_ && bufferedAhead < rebufferingGoal) || + (bufferedAhead <= 1)) { + shaka.log.v2(logPrefix, 'need startup or re- buffering'); + mediaState.needRebuffering = true; + } else if (bufferedAhead >= rebufferingGoal) { + mediaState.needRebuffering = false; + } + + // Get the current Period. This will only be null if |stream| is not part + // of the Manifest, which should never happen. + var currentPeriod = this.findPeriodContainingStream_(stream); + shaka.asserts.assert( + currentPeriod, + logPrefix + ' Stream ' + stream.id + ' ' + + 'is not contained within the Manifest'); + if (!currentPeriod) return; + + // Check if we need to begin buffering from a different Period. We do this + // before checking segment availability since the new Period may become + // available once we actually have to buffer it. + var needPeriod = this.findPeriodContainingTime_(timeNeeded); + if (needPeriod && (needPeriod != currentPeriod)) { + shaka.log.debug(logPrefix, + 'need Period:', + 'playheadTime=' + playheadTime, + 'bufferEnd=' + bufferEnd, + 'needPeriod.startTime=' + needPeriod.startTime, + 'currentPeriod.startTime=' + currentPeriod.startTime); + mediaState.needPeriod = needPeriod; + return; + } + + // Check segment availability. + if (timeNeeded < timeline.getSegmentAvailabilityStart() || + timeNeeded > timeline.getSegmentAvailabilityEnd()) { + // The next segment is not available. In the usual case, this occurs when + // we've buffered to the live-edge of a live stream, so try another update + // in a second. In the degenerate case, this may occur if the playhead is + // outside the segment availability window. + shaka.log.v1(logPrefix, + 'next segment is outside segment availability window:', + 'playheadTime=' + playheadTime, + 'bufferEnd=' + bufferEnd); + this.scheduleUpdate_(mediaState, 1); + return; + } + + // Find the next segment we need. + var periodTime = timeNeeded - currentPeriod.startTime - mediaState.drift; + var position = mediaState.segmentPosition != null ? + mediaState.segmentPosition + 1 : + stream.findSegmentPosition(periodTime); + var reference = position != null ? + stream.getSegmentReference(position) : + null; + if (!reference) { + // The next segment is unknown. This should never happen in the usual case + // because we handle segment availability and Period transitions above, so + // we should always have a segment. If this does happen then either the + // manifest is not updating fast enough for live presentations, or the + // manifest is not complete. + shaka.log.debug(logPrefix, + 'next segment does not exist:', + 'playheadTime=' + playheadTime, + 'bufferEnd=' + bufferEnd, + 'currentPeriod.startTime=' + currentPeriod.startTime, + 'mediaState.drift=' + mediaState.drift, + 'position=' + position); + this.scheduleUpdate_(mediaState, 1); + return; + } + shaka.asserts.assert( + reference.position == position, + 'reference.position=' + reference.position + ' ' + + 'should equal position=' + position); + + shaka.log.v1(logPrefix, + 'fetch and append:', + 'playheadTime=' + playheadTime, + 'bufferEnd=' + bufferEnd, + 'currentPeriod.startTime=' + currentPeriod.startTime, + 'mediaState.drift=' + mediaState.drift, + 'reference.position=' + reference.position, + 'reference.startTime=' + reference.startTime); + this.fetchAndAppend_(mediaState, reference, currentPeriod.startTime); +}; + + +/** + * Fetches and appends the given segment. Appends |mediaState|'s Stream's init + * segment if needed; sets the timestamp offset if needed; updates + * |mediaState|'s |needInitSegment| and |segmentPosition| fields; and schedules + * another update after completing. + * + * @param {!shaka.media.StreamingEngine.MediaState_} mediaState + * @param {!shaka.media.SegmentReference} reference + * @param {number} periodStartTime + * + * @private + */ +shaka.media.StreamingEngine.prototype.fetchAndAppend_ = function( + mediaState, reference, periodStartTime) { + var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); + var stream = mediaState.stream; + + mediaState.performingUpdate = true; + + // Append init segment if needed. + var appendInit; + if (mediaState.needInitSegment && stream.initSegmentReference) { + var fetchInit = this.fetch_(stream.initSegmentReference); + appendInit = fetchInit.then(function(initSegment) { + if (this.destroyed_) return; + shaka.log.v2(logPrefix, 'appending init segment'); + return this.mediaSourceEngine_.appendBuffer(mediaState.type, initSegment); + }.bind(this)); + } else { + appendInit = Promise.resolve(); + } + + // We may set |needInitSegment| to true in switch(), so set it to false here, + // since we want it to remain true if switch is called. + mediaState.needInitSegment = false; + + // We may set |segmentPosition| to false in switch(), so set it to the next + // position here, since we want it to remain null if switch is called. + mediaState.segmentPosition = reference.position; + + // Set the timestamp offset immediately after appending the init segment. + // TODO: Find a simple way to do this when switch() is called instead of + // here. + var timestampOffset = periodStartTime - stream.presentationTimeOffset; + var p = appendInit.then(function() { + if (this.destroyed_) return; + shaka.log.v2(logPrefix, 'setting timestamp offset:', timestampOffset); + return this.mediaSourceEngine_.setTimestampOffset( + mediaState.type, timestampOffset); + }.bind(this)); + + var fetchSegment = this.fetch_(reference); + Promise.all([fetchSegment, p]).then(function(results) { + if (this.destroyed_) return; + + shaka.log.v1(logPrefix, 'appending media segment'); + + // Append actual segment. + var segment = results[0]; + shaka.asserts.assert(segment, logPrefix + ' segment should not be null'); + + return this.mediaSourceEngine_.appendBuffer(mediaState.type, segment); + }.bind(this)).then(function() { + if (this.destroyed_) return; + shaka.log.v1(logPrefix, 'appended media segment'); + + // Compute drift if needed. + if (mediaState.drift == null) { + var bufferStart = this.mediaSourceEngine_.bufferStart(mediaState.type); + if (bufferStart != null) { + mediaState.drift = bufferStart - reference.startTime - periodStartTime; + shaka.log.debug(logPrefix, 'drift=', mediaState.drift); + } else { + return Promise.reject(new shaka.util.Error( + shaka.util.Error.Category.MEDIA, + shaka.util.Error.Code.BAD_SEGMENT)); + } + // We clear the segment position after the first segment is inserted + // because the drift may be large enough such that the playhead may be + // outside the segment we just inserted, we'll recompute the segment + // position in the next update. + mediaState.segmentPosition = null; + } + + // Update right away. + this.scheduleUpdate_(mediaState, 0); + + // Subtlety: handleStartup_() calls onStartupComplete_() which may call + // seeked() so we must schedule an update beforehand so |updateTimer| is in + // the right state. + this.handleStartup_(); + }.bind(this)).catch(function(error) { + this.onError_(error); + }.bind(this)); +}; + + +/** + * Sets up all known Periods if startup just completed. + * @private + */ +shaka.media.StreamingEngine.prototype.handleStartup_ = function() { + if (this.startupComplete_) + return; + + // TODO: Use MapUtils. + var mediaStates = Object.keys(/** @type {!Object} */(this.mediaStates_)) + .map(function(type) { return this.mediaStates_[type]; }.bind(this)); + this.startupComplete_ = mediaStates.every(function(ms) { + return ms.drift != null; + }); + + if (!this.startupComplete_) + return; + + shaka.log.debug('Startup complete!'); + + // Setup all known Periods. + for (var i = 0; i < this.manifest_.periods.length; ++i) { + this.newPeriod(this.manifest_.periods[i]); + } + + if (this.onStartupComplete_) + this.onStartupComplete_(); +}; + + +/** + * @param {number} time The time, in seconds, relative to the start of the + * presentation. + * @return {?shakaExtern.Period} the Period which starts after |time|, or + * null if no such Period exists. + * @private + */ +shaka.media.StreamingEngine.prototype.findPeriodContainingTime_ = function( + time) { + for (var i = this.manifest_.periods.length - 1; i >= 0; --i) { + var period = this.manifest_.periods[i]; + if (time >= period.startTime) + return period; + } + return null; +}; + + +/** + * @param {!shakaExtern.Stream} stream + * @return {?shakaExtern.Period} the Period which contains |stream|, or null + * if no Period contains |stream|. + * @private + */ +shaka.media.StreamingEngine.prototype.findPeriodContainingStream_ = function( + stream) { + for (var i = 0; i < this.manifest_.periods.length; ++i) { + var period = this.manifest_.periods[i]; + for (var j = 0; j < period.streamSets.length; ++j) { + var streamSet = period.streamSets[j]; + var index = streamSet.streams.indexOf(stream); + if (index >= 0) + return period; + } + } + return null; +}; + + +/** + * Fetches the given segment. + * + * @param {(!shaka.media.InitSegmentReference|!shaka.media.SegmentReference)} + * reference + * + * @return {!Promise.} + * @private + */ +shaka.media.StreamingEngine.prototype.fetch_ = function(reference) { + var requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT; + + // Set Range header if needed. + var headers = {}; + if ((reference.startByte != 0) || (reference.endByte != null)) { + headers['Range'] = 'bytes=' + reference.startByte + '-' + + (reference.endByte || ''); + } + + // TODO: Refactor this to use forthcoming NetworkingEngine factory function. + var request = { + uris: reference.uris, + method: 'GET', + body: null, + headers: headers, + allowCrossSiteCredentials: false, + retryParameters: this.config_.retryParameters + }; + + shaka.log.v1('Fetching:', reference); + var p = this.netEngine_.request(requestType, request); + return p.then(function(response) { + return response.data; + }); +}; + + +/** + * Handles an unbuffered seek by clearing the buffer and then scheduling an + * update. + * + * @param {!shaka.media.StreamingEngine.MediaState_} mediaState + * @private + */ +shaka.media.StreamingEngine.prototype.handleUnbufferedSeek_ = function( + mediaState) { + var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); + var stream = mediaState.stream; + + shaka.asserts.assert( + !mediaState.performingUpdate && (mediaState.updateTimer == null), + logPrefix + ' unexpected call to handleUnbufferedSeek_()'); + + mediaState.segmentPosition = null; + mediaState.waitingToClearBuffer = false; + mediaState.clearingBuffer = true; + + shaka.log.v1(logPrefix, 'clearing buffer'); + + this.mediaSourceEngine_.clear(mediaState.type).then(function() { + if (this.destroyed_) return; + shaka.log.v1(logPrefix, 'cleared buffer'); + mediaState.clearingBuffer = false; + this.scheduleUpdate_(mediaState, 0); + }.bind(this)); +}; + + +/** + * Schedules |mediaState|'s next update. + * + * @param {!shaka.media.StreamingEngine.MediaState_} mediaState + * @param {number} delay The delay in seconds. + * @private + */ +shaka.media.StreamingEngine.prototype.scheduleUpdate_ = function( + mediaState, delay) { + var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); + shaka.log.v1(logPrefix, 'updating in ' + delay + ' seconds'); + shaka.asserts.assert(mediaState.updateTimer == null, + logPrefix + ' an update is already scheduled'); + mediaState.updateTimer = window.setTimeout( + this.onUpdate_.bind(this, mediaState), delay * 1000); +}; + + +/** + * Cancels |mediaState|'s next update if one exists. + * + * @param {!shaka.media.StreamingEngine.MediaState_} mediaState + * @private + */ +shaka.media.StreamingEngine.prototype.cancelUpdate_ = function(mediaState) { + if (mediaState.updateTimer != null) { + window.clearTimeout(mediaState.updateTimer); + mediaState.updateTimer = null; + } +}; + + +/** + * @param {shaka.media.StreamingEngine.MediaState_} mediaState + * @return {string} A log prefix of the form ($CONTENT_TYPE:$STREAM_ID), e.g., + * "(audio:5)" or "(video:hd)". + * @private + */ +shaka.media.StreamingEngine.logPrefix_ = function(mediaState) { + return '(' + mediaState.type + ':' + mediaState.stream.id + ')'; +}; + diff --git a/lib/media/time_ranges_utils.js b/lib/media/time_ranges_utils.js new file mode 100644 index 0000000000..2c39479a4d --- /dev/null +++ b/lib/media/time_ranges_utils.js @@ -0,0 +1,77 @@ +/** + * @license + * Copyright 2015 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +goog.provide('shaka.media.TimeRangesUtils'); + + +/** + * @namespace shaka.media.TimeRangesUtils + * @exportDoc + * @summary A set of utility functions for dealing with TimeRanges objects. + */ + + +/** + * Gets the first timestamp in buffer. + * + * @param {TimeRanges} b + * @return {?number} The first buffered timestamp, in seconds, if |buffered| + * is non-empty; otherwise, return null. + */ +shaka.media.TimeRangesUtils.bufferStart = function(b) { + if (!b) return null; + return b.length ? b.start(0) : null; +}; + + +/** + * Gets the last timestamp in buffer. + * + * @param {TimeRanges} b + * @return {?number} The last buffered timestamp, in seconds, if |buffered| + * is non-empty; otherwise, return null. + */ +shaka.media.TimeRangesUtils.bufferEnd = function(b) { + if (!b) return null; + return b.length ? b.end(b.length - 1) : null; +}; + + +/** + * Computes how far ahead of the given timestamp is buffered. + * + * @param {TimeRanges} buffered + * @param {number} time + * @return {number} The number of seconds buffered, in seconds, ahead of the + * given time. + */ +shaka.media.TimeRangesUtils.bufferedAheadOf = function( + buffered, time) { + if (!buffered) return 0; + // NOTE: On IE11, buffered ranges may show appended data before the associated + // append operation is complete. + var fudge = 0.000001; // 1us + // NOTE: The 1us fudge is needed on Safari, where removal up to X may leave a + // range which starts at X + 1us. + for (var i = 0; i < buffered.length; ++i) { + if (time + fudge >= buffered.start(i) && time < buffered.end(i)) { + return buffered.end(i) - time; + } + } + return 0; +}; + diff --git a/lib/util/error.js b/lib/util/error.js index f356616982..b029c57ade 100644 --- a/lib/util/error.js +++ b/lib/util/error.js @@ -225,5 +225,12 @@ shaka.util.Error.Code = { 'DASH_WEBM_MISSING_INIT': 28, /** The DASH Manifest contained an unsupported container format */ - 'DASH_UNSUPPORTED_CONTAINER': 30 + 'DASH_UNSUPPORTED_CONTAINER': 29, + + /** + * The StreamingEngine inserted a media segment, but the segment did not + * contain any actual media content. + */ + 'BAD_SEGMENT': 30 }; + diff --git a/spec/media_source_engine_spec.js b/spec/media_source_engine_spec.js index e6e8200479..0cefed3847 100644 --- a/spec/media_source_engine_spec.js +++ b/spec/media_source_engine_spec.js @@ -76,6 +76,47 @@ describe('MediaSourceEngine', function() { }); }); + describe('bufferStart and bufferEnd', function() { + beforeEach(function() { + mediaSourceEngine.init({'audio': 'audio/foo'}); + }); + + it('returns correct timestamps for one range', function() { + audioSourceBuffer.buffered.length = 1; + audioSourceBuffer.buffered.start.and.returnValue(0); + audioSourceBuffer.buffered.end.and.returnValue(10); + + expect(mediaSourceEngine.bufferStart('audio', 0)).toBeCloseTo(0); + expect(mediaSourceEngine.bufferEnd('audio', 0)).toBeCloseTo(10); + }); + + it('returns correct timestamps for multiple ranges', function() { + audioSourceBuffer.buffered.length = 2; + + audioSourceBuffer.buffered.start.and.callFake(function(i) { + if (i == 0) return 5; + if (i == 1) return 20; + throw new Error('Unexpected index'); + }); + + audioSourceBuffer.buffered.end.and.callFake(function(i) { + if (i == 0) return 10; + if (i == 1) return 30; + throw new Error('Unexpected index'); + }); + + expect(mediaSourceEngine.bufferStart('audio', 0)).toBeCloseTo(5); + expect(mediaSourceEngine.bufferEnd('audio', 0)).toBeCloseTo(30); + }); + + it('returns null if there are no ranges', function() { + audioSourceBuffer.buffered.length = 0; + + expect(mediaSourceEngine.bufferStart('audio', 0)).toBeNull(); + expect(mediaSourceEngine.bufferEnd('audio', 0)).toBeNull(); + }); + }); + describe('bufferedAheadOf', function() { beforeEach(function() { mediaSourceEngine.init({'audio': 'audio/foo'}); @@ -249,7 +290,7 @@ describe('MediaSourceEngine', function() { }); }); - describe('remove', function() { + describe('remove and clear', function() { beforeEach(function() { captureEvents(audioSourceBuffer, ['updateend', 'error']); captureEvents(videoSourceBuffer, ['updateend', 'error']); @@ -373,6 +414,31 @@ describe('MediaSourceEngine', function() { done(); }); }); + + it('clears the given data', function(done) { + mockMediaSource.durationGetter_.and.returnValue(20); + mediaSourceEngine.clear('audio').then(function() { + expect(audioSourceBuffer.remove.calls.count()).toBe(1); + expect(audioSourceBuffer.remove.calls.argsFor(0)[0]).toBe(0); + expect(audioSourceBuffer.remove.calls.argsFor(0)[1] >= 20).toBeTruthy(); + done(); + }); + audioSourceBuffer.updateend(); + }); + }); + + describe('setTimestampOffset', function() { + beforeEach(function() { + mediaSourceEngine.init({'audio': 'audio/foo'}); + }); + + it('sets the timestamp offset', function(done) { + expect(audioSourceBuffer.timestampOffset).toBe(0); + mediaSourceEngine.setTimestampOffset('audio', 10).then(function() { + expect(audioSourceBuffer.timestampOffset).toBe(10); + done(); + }); + }); }); describe('endOfStream', function() { @@ -719,7 +785,8 @@ describe('MediaSourceEngine', function() { length: 0, start: jasmine.createSpy('buffered.start'), end: jasmine.createSpy('buffered.end') - } + }, + timestampOffset: 0 }; } diff --git a/spec/playhead_spec.js b/spec/playhead_spec.js new file mode 100644 index 0000000000..403c9ac57d --- /dev/null +++ b/spec/playhead_spec.js @@ -0,0 +1,292 @@ +/** + * @license + * Copyright 2015 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +goog.require('shaka.media.Playhead'); +goog.require('shaka.media.PresentationTimeline'); + +describe('Playhead', function() { + var video; + var timeline; + + // Callback to Playhead to simulate 'loadedmetadata' event from |video|. + var videoOnLoadedMetadata; + + // Callback to Playhead to simulate 'seeking' event from |video|. + var videoOnSeeking; + + // Callback to us from Playhead when the buffering state changes. + var onBuffering; + + // Callback to us from Playhead when a valid 'seeking' event occurs. + var onSeek; + + beforeEach(function() { + video = createMockVideo(); + timeline = createMockPresentationTimeline(); + + videoOnLoadedMetadata = undefined; + videoOnSeeking = undefined; + + onBuffering = jasmine.createSpy('onBuffering'); + onSeek = jasmine.createSpy('onSeek'); + + video.addEventListener.and.callFake(function(eventName, f, bubbles) { + if (eventName == 'loadedmetadata') { + videoOnLoadedMetadata = f; + } else if (eventName == 'seeking') { + videoOnSeeking = f; + } else { + throw new Error('Unexpected event:' + eventName); + } + }); + + timeline.getSegmentAvailabilityStart.and.returnValue(5); + timeline.getSegmentAvailabilityEnd.and.returnValue(60); + + // These tests should not cause these methods to be invoked. + timeline.getSegmentAvailabilityDuration.and.throwError(new Error()); + timeline.getDuration.and.throwError(new Error()); + timeline.setDuration.and.throwError(new Error()); + }); + + describe('getTime', function() { + it('returns the correct time when readyState starts at 0', function() { + var playhead = new shaka.media.Playhead( + video, + timeline, + 10 /* minBufferTime */, + 5 /* startTime */, + onBuffering, onSeek); + + expect(video.addEventListener).toHaveBeenCalledWith( + 'loadedmetadata', videoOnLoadedMetadata, false); + expect(video.addEventListener.calls.count()).toBe(1); + + expect(playhead.getTime()).toBe(5); + expect(video.currentTime).toBe(0); + + video.readyState = HTMLMediaElement.HAVE_METADATA; + videoOnLoadedMetadata(); + + expect(video.addEventListener).toHaveBeenCalledWith( + 'seeking', videoOnSeeking, false); + expect(video.addEventListener.calls.count()).toBe(2); + + expect(playhead.getTime()).toBe(5); + expect(video.currentTime).toBe(5); + + video.currentTime = 6; + expect(playhead.getTime()).toBe(6); + + // getTime() should always clamp the time even if the video element + // doesn't dispatch 'seeking' events. + video.currentTime = 120; + expect(playhead.getTime()).toBe(60); + + video.currentTime = 0; + expect(playhead.getTime()).toBe(5); + }); + + it('returns the correct time when readyState starts at 1', function() { + video.readyState = HTMLMediaElement.HAVE_METADATA; + + var playhead = new shaka.media.Playhead( + video, + timeline, + 10 /* minBufferTime */, + 5 /* startTime */, + onBuffering, onSeek); + + expect(playhead.getTime()).toBe(5); + expect(video.currentTime).toBe(5); + + video.currentTime = 6; + expect(playhead.getTime()).toBe(6); + }); + }); + + + it('sets/unsets buffering state', function() { + var playhead = new shaka.media.Playhead( + video, + timeline, + 10 /* minBufferTime */, + 5 /* startTime */, + onBuffering, onSeek); + + // Set to 2 to ensure Playhead restores the correct rate. + video.playbackRate = 2; + + playhead.setBuffering(false); + expect(onBuffering).not.toHaveBeenCalled(); + expect(video.playbackRate).toBe(2); + + playhead.setBuffering(true); + expect(onBuffering).toHaveBeenCalledWith(true); + expect(video.playbackRate).toBe(0); + + onBuffering.calls.reset(); + + playhead.setBuffering(true); + expect(onBuffering).not.toHaveBeenCalled(); + expect(video.playbackRate).toBe(0); + + playhead.setBuffering(false); + expect(onBuffering).toHaveBeenCalledWith(false); + expect(video.playbackRate).toBe(2); + }); + + it('clamps seeks', function() { + video.readyState = HTMLMediaElement.HAVE_METADATA; + + video.buffered = { + length: 1, + start: function(i) { + if (i == 0) return 5; + throw new Error('Unexpected index'); + }, + end: function(i) { + if (i == 0) return 25; + throw new Error('Unexpected index'); + } + }; + + timeline.getSegmentAvailabilityDuration.and.returnValue(30); + + var onBuffering = jasmine.createSpy('onBuffering'); + var onSeek = jasmine.createSpy('onSeek'); + var playhead = new shaka.media.Playhead( + video, + timeline, + 10 /* minBufferTime */, + 5 /* startTime */, + onBuffering, onSeek); + + expect(playhead.getTime()).toBe(5); + expect(video.currentTime).toBe(5); + + // Calling videoOnSeeking() is like dispatching a 'seeking' event. So, each + // time we change the video's current or Playhead changes the video's + // current time time we must call videoOnSeeking(), + + video.currentTime = 6; + videoOnSeeking(); + expect(video.currentTime).toBe(6); + expect(playhead.getTime()).toBe(6); + expect(onSeek).toHaveBeenCalled(); + + onSeek.calls.reset(); + + video.currentTime = 120; + videoOnSeeking(); + expect(video.currentTime).toBe(60); + expect(playhead.getTime()).toBe(60); + expect(onSeek).not.toHaveBeenCalledWith(); + videoOnSeeking(); + expect(onSeek).toHaveBeenCalled(); + + onSeek.calls.reset(); + + video.currentTime = 0; + videoOnSeeking(); + expect(video.currentTime).toBe(5); + expect(playhead.getTime()).toBe(5); + expect(onSeek).not.toHaveBeenCalledWith(); + videoOnSeeking(); + expect(onSeek).toHaveBeenCalled(); + + onSeek.calls.reset(); + + video.currentTime = 20; + videoOnSeeking(); + expect(video.currentTime).toBe(20); + expect(playhead.getTime()).toBe(20); + expect(onSeek).toHaveBeenCalled(); + + // Now remove start of buffer so we can check that current time is + // adjusted to take into account buffering. Note segment availability + // window is set so the presentation is live. + video.buffered = { + length: 1, + start: function(i) { + if (i == 0) return 20; + throw new Error('Unexpected index'); + }, + end: function(i) { + if (i == 0) return 35; // Like one segment. + throw new Error('Unexpected index'); + } + }; + + onSeek.calls.reset(); + + video.currentTime = 0; + videoOnSeeking(); + expect(video.currentTime).toBe(5 + 10); + expect(playhead.getTime()).toBe(5 + 10); + expect(onSeek).not.toHaveBeenCalledWith(); + videoOnSeeking(); + expect(onSeek).toHaveBeenCalled(); + + onSeek.calls.reset(); + + video.currentTime = 6; + videoOnSeeking(); + expect(video.currentTime).toBe(5 + 10); + expect(playhead.getTime()).toBe(5 + 10); + expect(onSeek).not.toHaveBeenCalledWith(); + videoOnSeeking(); + expect(onSeek).toHaveBeenCalled(); + + // Now do the same thing but for VOD. + timeline.getSegmentAvailabilityDuration.and.returnValue(null); + + onSeek.calls.reset(); + + video.currentTime = 6; + videoOnSeeking(); + expect(video.currentTime).toBe(6); + expect(playhead.getTime()).toBe(6); + expect(onSeek).toHaveBeenCalled(); + }); + + function createMockVideo() { + return { + currentTime: 0, + readyState: 0, + playbackRate: 1, + buffered: null, + addEventListener: jasmine.createSpy('addEventListener'), + removeEventListener: jasmine.createSpy('removeEventListener'), + dispatchEvent: jasmine.createSpy('dispatchEvent') + }; + } + + function createMockPresentationTimeline() { + return { + getDuration: jasmine.createSpy('getDuration'), + setDuration: jasmine.createSpy('setDuration'), + getSegmentAvailabilityDuration: + jasmine.createSpy('getSegmentAvailabilityDuration'), + getSegmentAvailabilityStart: + jasmine.createSpy('getSegmentAvailabilityStart'), + getSegmentAvailabilityEnd: + jasmine.createSpy('getSegmentAvailabilityEnd') + }; + } +}); + diff --git a/spec/presentation_timeline_spec.js b/spec/presentation_timeline_spec.js index 7a6549c600..6b7dc1fc3d 100644 --- a/spec/presentation_timeline_spec.js +++ b/spec/presentation_timeline_spec.js @@ -171,50 +171,26 @@ describe('PresentationTimeline', function() { }); }); - describe('hasEnded', function() { - var longDuration = 5 * 365 * 24 * 60 * 60; // 5 years. - - it('returns false for VOD', function() { + describe('getDuration', function() { + it('returns the correct value for VOD', function() { setElapsed(0); - var timeline1 = new shaka.media.PresentationTimeline(60, null, null); - expect(timeline1.hasEnded()).toBe(false); - - setElapsed(longDuration); - expect(timeline1.hasEnded()).toBe(false); + var timeline = new shaka.media.PresentationTimeline(60, null, null); + expect(timeline.getDuration()).toBe(60); - setElapsed(0); - var timeline2 = new shaka.media.PresentationTimeline( + timeline = new shaka.media.PresentationTimeline( Number.POSITIVE_INFINITY, null, null); - expect(timeline2.hasEnded()).toBe(false); - - setElapsed(longDuration); - expect(timeline2.hasEnded()).toBe(false); - }); - - it('returns the correct value for live without duration', function() { - setElapsed(0); - var timeline = new shaka.media.PresentationTimeline( - Number.POSITIVE_INFINITY, Date.now() / 1000.0, 20); - expect(timeline.hasEnded()).toBe(false); - - setElapsed(longDuration); - expect(timeline.hasEnded()).toBe(false); + expect(timeline.getDuration()).toBe(Number.POSITIVE_INFINITY); }); - it('returns the correct value for live with duration', function() { + it('returns the correct value for live', function() { setElapsed(0); var timeline = new shaka.media.PresentationTimeline( 60, Date.now() / 1000.0, 20); - expect(timeline.hasEnded()).toBe(false); + expect(timeline.getDuration()).toBe(60); - setElapsed(59); - expect(timeline.hasEnded()).toBe(false); - - setElapsed(60); - expect(timeline.hasEnded()).toBe(true); - - setElapsed(61); - expect(timeline.hasEnded()).toBe(true); + timeline = new shaka.media.PresentationTimeline( + Number.POSITIVE_INFINITY, Date.now() / 1000.0, 20); + expect(timeline.getDuration()).toBe(Number.POSITIVE_INFINITY); }); }); @@ -232,14 +208,33 @@ describe('PresentationTimeline', function() { setElapsed(0); var timeline = new shaka.media.PresentationTimeline( Number.POSITIVE_INFINITY, Date.now() / 1000.0, 20); - expect(timeline.hasEnded()).toBe(false); - setElapsed(61); - expect(timeline.hasEnded()).toBe(false); + setElapsed(30); + expect(timeline.getSegmentAvailabilityEnd()).toBe(30); + + setElapsed(90); + expect(timeline.getSegmentAvailabilityEnd()).toBe(90); timeline.setDuration(60); - expect(timeline.hasEnded()).toBe(true); + expect(timeline.getSegmentAvailabilityEnd()).toBe(60); }); }); + + it('getSegmentAvailabilityDuration', function() { + setElapsed(0); + var timeline = new shaka.media.PresentationTimeline(60, null, null); + expect(timeline.getSegmentAvailabilityDuration()).toBeNull(); + + timeline = new shaka.media.PresentationTimeline( + Number.POSITIVE_INFINITY, Date.now() / 1000.0, 20); + expect(timeline.getSegmentAvailabilityDuration()).toBe(20); + + timeline = new shaka.media.PresentationTimeline( + Number.POSITIVE_INFINITY, + Date.now() / 1000.0, + Number.POSITIVE_INFINITY); + expect(timeline.getSegmentAvailabilityDuration()).toBe( + Number.POSITIVE_INFINITY); + }); }); diff --git a/spec/streaming_engine_spec.js b/spec/streaming_engine_spec.js new file mode 100644 index 0000000000..e542900ce3 --- /dev/null +++ b/spec/streaming_engine_spec.js @@ -0,0 +1,1139 @@ +/** + * @license + * Copyright 2015 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +goog.require('shaka.media.StreamingEngine'); +goog.require('shaka.media.TextSourceBuffer'); +goog.require('shaka.net.NetworkingEngine'); +goog.require('shaka.util.PublicPromise'); + +describe('StreamingEngine', function() { + var originalSetTimeout; + var currentTime; + + var dummyInitSegments; + var dummySegments; + + var playhead; + var playheadTime; + var playing; + + var initSegments; + var segments; + var mediaSourceEngine; + + var netEngine; + var timeline; + + var audioStream1; + var videoStream1; + var textStream1; + var alternateVideoStream1; + + var audioStream2; + var videoStream2; + var textStream2; + + var manifest; + + // Dummy sizes for media segments. + var segmentSizes = {'audio': 1000, 'video': 10000, 'text': 500}; + + var onCanSwitch; + var onBufferNewPeriod; + var onError; + var onInitialStreamsSetup; + var onStartupComplete; + var streamingEngine; + + beforeAll(function() { + originalSetTimeout = window.setTimeout; + }); + + /** + * Processes some number of "instantaneous" operations. + * + * Instantaneous operations include Promise resolution (e.g., + * Promise.resolve()) and 0 second timeouts. This recursively processes + * these operations, so if for example, one wrote + * + * Promise.resolve().then(function() { + * var callback = function() { + * Promise.resolve().then(function() { + * console.log('Hello world!'); + * }); + * } + * window.setTimeout(callback, 0); + * }); + * + * var p = processInstantaneousOperations(10); + * + * After |p| resolves, "Hello world!" will be written to the console. + * + * The parameter |n| controls the number of rounds to perform. This is + * necessary since we cannot determine when there are no timeouts remaining + * at the current time; to determine this we would require access to hidden + * variables in Jasmine's Clock implementation. + * + * @param {number} n The number of rounds to perform. + * @return {!Promise} + * TODO: Move to separate file. + * TODO: Cleanup with patch to jasmine-core. + */ + function processInstantaneousOperations(n) { + if (n <= 0) return Promise.resolve(); + return realDelay(0.001).then(function() { + jasmine.clock().tick(0); + return processInstantaneousOperations(--n); + }); + } + + /** + * Calls the real window.setTimeout(). + * @param {number} seconds + * TODO: Move to separate file. + */ + function realDelay(seconds) { + return new Promise(function(resolve, reject) { + originalSetTimeout(resolve, seconds * 1000); + }); + } + + /** + * Fakes an event loop. Each tick processes some number of instantaneous + * operations and advances the simulated clock forward by 1 second. If + * |playing| is true then each tick also advances the fake playhead forward + * by 1 second as well. + * + * @return {!Promise} A promise which resolves after 60 seconds of simulated + * time. + * TODO: Move to separate file. + */ + function fakeEventLoop() { + // Simulate 60 seconds. No test will require more than this amount of + // simulated time. + var async = Promise.resolve(); + for (var i = 0; i < 60; ++i) { + async = async.then(function() { + // We shouldn't need more than 5 rounds. + return processInstantaneousOperations(5); + }).then(function() { + currentTime++; + if (playing) { + playheadTime++; + } + jasmine.clock().tick(1000); + return Promise.resolve(); + }); + } + + return async; + } + + beforeEach(function() { + jasmine.clock().install(); + + // In these tests we fake a presentation that has 2 twenty second + // Periods, where each Period has 1 StreamSet. The first Period + // has 1 audio Stream, 2 video Streams, and 1 text Stream; and the second + // Period has 1 Stream of each type. + // + // There are 4 initialization segments: 1 audio and 1 video for the + // first Period, and 1 audio and 1 video for the second Period. + // + // There are 12 media segments: 2 audio, 2 video, and 2 text for the + // first Period, and 2 audio, 2 video, and 2 text for the second Period. + // All media segments are 10 seconds long. + // + // We only use the second video Stream in the first Period to verify that + // the StreamingEngine can setup Streams correctly. It does not have init + // or media segments. + // + // Furthermore, the init segment URIs follow the pattern PERIOD_TYPE_init, + // e.g., "1_audio_init" or "2_video_init", and the media segment URIs + // follow the pattern PERIOD_TYPE_POSITION, e.g., "1_text_2" or + // "2_video_1". The first segment in each Period has position 1, the second + // segment, position 2. + + currentTime = 0; + + // Create dummy init segments. + dummyInitSegments = { + audio: [new ArrayBuffer(), new ArrayBuffer()], + video: [new ArrayBuffer(), new ArrayBuffer()], + text: [] + }; + + // Create dummy media segments. The first two ArrayBuffers in each row are + // for the first Period, and the last two, for the second Period. + dummySegments = { + audio: [new ArrayBuffer(), new ArrayBuffer(), + new ArrayBuffer(), new ArrayBuffer()], + video: [new ArrayBuffer(), new ArrayBuffer(), + new ArrayBuffer(), new ArrayBuffer()], + text: [new ArrayBuffer(), new ArrayBuffer(), + new ArrayBuffer(), new ArrayBuffer()] + }; + + // Setup Playhead. + playhead = createMockPlayhead(); + playheadTime = 0; + playing = false; + + // Setup MediaSourceEngine. + // This table keeps tracks of which init segments have been appended. + initSegments = { + 'audio': [false, false], + 'video': [false, false], + 'text': [] + }; + + // This table keeps tracks of which init segments have been appended. + segments = { + 'audio': [false, false, false, false], + 'video': [false, false, false, false], + 'text': [false, false, false, false] + }; + + mediaSourceEngine = createMockMediaSourceEngine(); + + // Setup NetworkingEngine. + // TODO: De-duplicate by implementing a generic fake? + netEngine = createMockNetworkingEngine(); + netEngine.request.and.callFake(function(requestType, request) { + if (requestType != + shaka.net.NetworkingEngine.RequestType.SEGMENT) { + throw new Error('unexpected request type'); + } + + // Return the correct ArrayBuffer given the URIs. We don't check if the + // request is fully correct here; we do that later during validation. + var data; + + if (request.uris[0] == '1_audio_init') data = dummyInitSegments.audio[0]; + if (request.uris[0] == '1_video_init') data = dummyInitSegments.video[0]; + + if (request.uris[0] == '1_audio_1') data = dummySegments.audio[0]; + if (request.uris[0] == '1_audio_2') data = dummySegments.audio[1]; + if (request.uris[0] == '1_video_1') data = dummySegments.video[0]; + if (request.uris[0] == '1_video_2') data = dummySegments.video[1]; + if (request.uris[0] == '1_text_1') data = dummySegments.text[0]; + if (request.uris[0] == '1_text_2') data = dummySegments.text[1]; + + if (request.uris[0] == '2_audio_init') data = dummyInitSegments.audio[1]; + if (request.uris[0] == '2_video_init') data = dummyInitSegments.video[1]; + + if (request.uris[0] == '2_audio_1') data = dummySegments.audio[2]; + if (request.uris[0] == '2_audio_2') data = dummySegments.audio[3]; + if (request.uris[0] == '2_video_1') data = dummySegments.video[2]; + if (request.uris[0] == '2_video_2') data = dummySegments.video[3]; + if (request.uris[0] == '2_text_1') data = dummySegments.text[2]; + if (request.uris[0] == '2_text_2') data = dummySegments.text[3]; + + if (!data) throw new Error('unexpected URI: ' + request.uris[0]); + + return Promise.resolve({data: data, headers: {}}); + }); + + // Setup PresentationTimeline. + timeline = createMockPresentationTimeline(); + + timeline.getDuration.and.returnValue(40); + timeline.getSegmentAvailabilityStart.and.returnValue(0); + timeline.getSegmentAvailabilityEnd.and.returnValue(40); + + // These methods should not be invoked. + timeline.setDuration.and.throwError( + new Error('unexpected call to setDuration()')); + timeline.getSegmentAvailabilityDuration.and.throwError( + new Error('unexpected call to getSegmentAvailabilityDuration()')); + + // Setup Streams. + + // Functions for findSegmentPosition() and getSegmentReference(). + var find = function(t) { + // Note: |t| is relative to a Period's start time. + return Math.floor(t / 10) + 1; + }; + var get = constructUri; + + audioStream1 = createMockAudioStream(0); + videoStream1 = createMockVideoStream(1); + textStream1 = createMockTextStream(2); + + alternateVideoStream1 = createMockVideoStream(3); + + // Setup first Period. + audioStream1.createSegmentIndex.and.returnValue(Promise.resolve()); + videoStream1.createSegmentIndex.and.returnValue(Promise.resolve()); + textStream1.createSegmentIndex.and.returnValue(Promise.resolve()); + alternateVideoStream1.createSegmentIndex.and.returnValue(Promise.resolve()); + + audioStream1.findSegmentPosition.and.callFake(find); + videoStream1.findSegmentPosition.and.callFake(find); + textStream1.findSegmentPosition.and.callFake(find); + alternateVideoStream1.findSegmentPosition.and.returnValue(null); + + audioStream1.getSegmentReference.and.callFake(get.bind(null, 1, 'audio')); + videoStream1.getSegmentReference.and.callFake(get.bind(null, 1, 'video')); + textStream1.getSegmentReference.and.callFake(get.bind(null, 1, 'text')); + alternateVideoStream1.getSegmentReference.and.returnValue(null); + + audioStream1.initSegmentReference = + new shaka.media.InitSegmentReference(['1_audio_init'], 0, null); + videoStream1.initSegmentReference = + new shaka.media.InitSegmentReference(['1_video_init'], 0, null); + + // Setup second Period. + audioStream2 = createMockAudioStream(4); + videoStream2 = createMockVideoStream(5); + textStream2 = createMockTextStream(6); + + audioStream2.createSegmentIndex.and.returnValue(Promise.resolve()); + videoStream2.createSegmentIndex.and.returnValue(Promise.resolve()); + textStream2.createSegmentIndex.and.returnValue(Promise.resolve()); + + audioStream2.findSegmentPosition.and.callFake(find); + videoStream2.findSegmentPosition.and.callFake(find); + textStream2.findSegmentPosition.and.callFake(find); + + audioStream2.getSegmentReference.and.callFake(get.bind(null, 2, 'audio')); + videoStream2.getSegmentReference.and.callFake(get.bind(null, 2, 'video')); + textStream2.getSegmentReference.and.callFake(get.bind(null, 2, 'text')); + + audioStream2.initSegmentReference = + new shaka.media.InitSegmentReference(['2_audio_init'], 0, null); + videoStream2.initSegmentReference = + new shaka.media.InitSegmentReference(['2_video_init'], 0, null); + + // Create Manifest. + manifest = { + presentationTimeline: timeline, + periods: [ + { + startTime: 0, + streamSets: [ + {type: 'audio', streams: [audioStream1]}, + {type: 'video', streams: [videoStream1, alternateVideoStream1]}, + {type: 'text', streams: [textStream1]} + ] + }, + { + startTime: 20, + streamSets: [ + {type: 'audio', streams: [audioStream2]}, + {type: 'video', streams: [videoStream2]}, + {type: 'text', streams: [textStream2]} + ] + } + ] + }; + + // Setup real StreamingEngine. + onCanSwitch = jasmine.createSpy('onCanSwitch'); + onBufferNewPeriod = jasmine.createSpy('onBufferNewPeriod'); + onError = jasmine.createSpy('onError'); + onInitialStreamsSetup = jasmine.createSpy('onInitialStreamsSetup'); + onStartupComplete = jasmine.createSpy('onStartupComplete'); + + var config = { + rebufferingGoal: 2, + bufferingGoal: 5, + retryParameters: {} + }; + streamingEngine = new shaka.media.StreamingEngine( + config, playhead, mediaSourceEngine, netEngine, manifest, + onCanSwitch, onBufferNewPeriod, onError, + onInitialStreamsSetup, onStartupComplete); + }); // beforeEach() + + afterEach(function() { + jasmine.clock().uninstall(); + }); + + // This test initializes the StreamingEngine (SE) and allows it to play + // through both Periods. + // + // After calling init() the following should occur: + // 1. SE should setup each of the initial Streams and then call + // onInitialStreamsSetup(). + // 2. SE should start appending segments from the initial Streams and in + // parallel setup all remaining Streams within the Manifest. + // - SE should call onStartupComplete() after it has buffered at least 1 + // segment from each of the initial Streams. + // - SE should call onCanSwitch() twice, once for each Period setup. + // 3. SE should call onBufferNewPeriod() after it has appended both segments + // from the first Period. + // 4. We must then switch to the Streams in the second Period by calling + // switch(). + // 5. SE should call MediaSourceEngine.endOfStream() after it has appended + // both segments from the second Period. At this point the playhead + // will not be at the end of the presentation, but the test will be + // effectively over since SE will have nothing else to do. + it('initializes and plays', function(done) { + playhead.getTime.and.returnValue(0); + setupFakeMediaSourceEngine(0 /* expectedTimestampOffset */); + + onInitialStreamsSetup.and.callFake(function() { + expect(mediaSourceEngine.init).toHaveBeenCalledWith( + { + 'audio': 'audio/mp4; codecs="aac"', + 'video': 'video/mp4; codecs="avc"', + 'text': 'text/vtt' + }); + expect(mediaSourceEngine.init.calls.count()).toBe(1); + mediaSourceEngine.init.calls.reset(); + + expect(mediaSourceEngine.setDuration).toHaveBeenCalledWith(40); + expect(mediaSourceEngine.setDuration.calls.count()).toBe(1); + mediaSourceEngine.setDuration.calls.reset(); + + expect(audioStream1.createSegmentIndex).toHaveBeenCalled(); + expect(videoStream1.createSegmentIndex).toHaveBeenCalled(); + expect(textStream1.createSegmentIndex).toHaveBeenCalled(); + + expect(alternateVideoStream1.createSegmentIndex).not.toHaveBeenCalled(); + }); + + onStartupComplete.and.callFake(function() { + expect(currentTime).toBe(0); + + // Verify buffers. + expect(initSegments.audio).toEqual([true, false]); + expect(initSegments.video).toEqual([true, false]); + expect(segments.audio).toEqual([true, false, false, false]); + expect(segments.video).toEqual([true, false, false, false]); + expect(segments.text).toEqual([true, false, false, false]); + + // During startup each Stream will require buffering, so there should + // be 3 calls to setBuffering(true). + expect(playhead.setBuffering).toHaveBeenCalledWith(true); + expect(playhead.setBuffering).not.toHaveBeenCalledWith(false); + expect(playhead.setBuffering.calls.count()).toBe(3); + playhead.setBuffering.calls.reset(); + + playing = true; + playhead.getTime.and.callFake(function() { + return playheadTime; + }); + }); + + onCanSwitch.and.callFake(function(period) { + if (period == manifest.periods[0]) { + expect(alternateVideoStream1.createSegmentIndex).toHaveBeenCalled(); + } else if (period == manifest.periods[1]) { + expect(audioStream2.createSegmentIndex).toHaveBeenCalled(); + expect(videoStream2.createSegmentIndex).toHaveBeenCalled(); + expect(textStream2.createSegmentIndex).toHaveBeenCalled(); + } else { + throw new Error('unexpected period'); + } + }); + + onBufferNewPeriod.and.callFake(function(period) { + expect(period).toBe(manifest.periods[1]); + + // If we need to buffer the second Period then we must have reached our + // buffering goal. + expect(playhead.setBuffering).toHaveBeenCalledWith(false); + playhead.setBuffering.calls.reset(); + + // Verify buffers. + expect(initSegments.audio).toEqual([true, false]); + expect(initSegments.video).toEqual([true, false]); + expect(segments.audio).toEqual([true, true, false, false]); + expect(segments.video).toEqual([true, true, false, false]); + expect(segments.text).toEqual([true, true, false, false]); + + verifyNetworkingEngineRequestCalls(1); + + // Switch to the second Period. + setupFakeMediaSourceEngine(20 /* expectedTimestampOffset */); + + streamingEngine.switch('audio', audioStream2); + streamingEngine.switch('video', videoStream2); + streamingEngine.switch('text', textStream2); + }); + + // Here we go! + var streamsByType = { + 'audio': audioStream1, 'video': videoStream1, 'text': textStream1 + }; + streamingEngine.init(streamsByType); + + fakeEventLoop().then(function() { + expect(mediaSourceEngine.endOfStream).toHaveBeenCalled(); + + // Verify buffers. + expect(initSegments.audio).toEqual([false, true]); + expect(initSegments.video).toEqual([false, true]); + expect(segments.audio).toEqual([true, true, true, true]); + expect(segments.video).toEqual([true, true, true, true]); + expect(segments.text).toEqual([true, true, true, true]); + + verifyNetworkingEngineRequestCalls(2); + + return streamingEngine.destroy(); + }).catch(fail).then(done); + }); + + describe('handles seeks', function() { + /** + * Sets up a fake Playhead.getTime() method. + * @param {number} startTime the playhead's starting time with respect to + * the presentation timeline. + */ + function setupFakeGetTime(startTime) { + expect(currentTime).toBe(0); + playheadTime = startTime; + playing = true; + playhead.getTime.and.callFake(function() { + return playheadTime; + }); + } + + beforeEach(function() { + onStartupComplete.and.callFake(setupFakeGetTime.bind(null, 0)); + }); + + it('into buffered regions', function(done) { + playhead.getTime.and.returnValue(0); + setupFakeMediaSourceEngine(0 /* expectedTimestampOffset */); + + onBufferNewPeriod.and.callFake(function(period) { + expect(period).toBe(manifest.periods[1]); + + // Seek backwards to a buffered region in the first Period. Note that + // since the buffering goal is 5 seconds and each segment is 10 seconds + // long, the last segment in the first Period will be appended when the + // playhead is at the 16 second mark. + expect(playhead.getTime()).toBe(16); + playheadTime -= 5; + streamingEngine.seeked(); + + // Don't switch to the second Period, just allow the fake event loop to + // finish. + onBufferNewPeriod.and.callFake(function() {}); + onBufferNewPeriod.calls.reset(); + netEngine.request.calls.reset(); + mediaSourceEngine.appendBuffer.calls.reset(); + }); + + // Here we go! + var streamsByType = { + 'audio': audioStream1, 'video': videoStream1, 'text': textStream1 + }; + streamingEngine.init(streamsByType); + + fakeEventLoop().then(function() { + expect(onBufferNewPeriod).not.toHaveBeenCalled(); + expect(mediaSourceEngine.appendBuffer).not.toHaveBeenCalled(); + expect(mediaSourceEngine.remove).not.toHaveBeenCalled(); + expect(mediaSourceEngine.clear).not.toHaveBeenCalled(); + expect(netEngine.request).not.toHaveBeenCalled(); + + // Verify buffers. + expect(initSegments.audio).toEqual([true, false]); + expect(initSegments.video).toEqual([true, false]); + expect(segments.audio).toEqual([true, true, false, false]); + expect(segments.video).toEqual([true, true, false, false]); + expect(segments.text).toEqual([true, true, false, false]); + + return streamingEngine.destroy(); + }).catch(fail).then(done); + }); + + it('into buffered regions across Periods', function(done) { + playhead.getTime.and.returnValue(0); + setupFakeMediaSourceEngine(0 /* expectedTimestampOffset */); + + onBufferNewPeriod.and.callFake(function(period) { + expect(period).toBe(manifest.periods[1]); + + // Switch to the second Period. + setupFakeMediaSourceEngine(20 /* expectedTimestampOffset */); + + streamingEngine.switch('audio', audioStream2); + streamingEngine.switch('video', videoStream2); + streamingEngine.switch('text', textStream2); + + mediaSourceEngine.endOfStream.and.callFake(function() { + // Seek backwards to a buffered region in the first Period. Note that + // since the buffering goal is 5 seconds and each segment is 10 + // seconds long, the last segment in the second Period will be + // appended when the playhead is at the 26 second mark. + expect(playhead.getTime()).toBe(26); + playheadTime -= 15; + streamingEngine.seeked(); + + // Allow the fake event loop to finish. Note that onBufferNewPeriod() + // should not be called again since we've already buffered the second + // Period. + onBufferNewPeriod.and.callFake(function() {}); + onBufferNewPeriod.calls.reset(); + mediaSourceEngine.endOfStream.and.callFake(function() {}); + mediaSourceEngine.endOfStream.calls.reset(); + }); + }); + + // Here we go! + var streamsByType = { + 'audio': audioStream1, 'video': videoStream1, 'text': textStream1 + }; + streamingEngine.init(streamsByType); + + fakeEventLoop().then(function() { + // Already buffered to the end of the presentation so neither of these + // should have been called again. + expect(onBufferNewPeriod).not.toHaveBeenCalled(); + expect(mediaSourceEngine.endOfStream).not.toHaveBeenCalled(); + expect(mediaSourceEngine.remove).not.toHaveBeenCalled(); + expect(mediaSourceEngine.clear).not.toHaveBeenCalled(); + + // Verify buffers. + expect(initSegments.audio).toEqual([false, true]); + expect(initSegments.video).toEqual([false, true]); + expect(segments.audio).toEqual([true, true, true, true]); + expect(segments.video).toEqual([true, true, true, true]); + expect(segments.text).toEqual([true, true, true, true]); + + return streamingEngine.destroy(); + }).catch(fail).then(done); + }); + + it('into unbuffered regions', function(done) { + playhead.getTime.and.returnValue(0); + setupFakeMediaSourceEngine(0 /* expectedTimestampOffset */); + + onStartupComplete.and.callFake(function() { + setupFakeGetTime(0); + + // Seek forward to an unbuffered region in the first Period. + expect(playhead.getTime()).toBe(0); + playheadTime += 15; + streamingEngine.seeked(); + + onBufferNewPeriod.and.callFake(function(period) { + expect(period).toBe(manifest.periods[1]); + + // Verify that all buffers have been cleared. + expect(mediaSourceEngine.clear).toHaveBeenCalledWith('audio'); + expect(mediaSourceEngine.clear).toHaveBeenCalledWith('video'); + expect(mediaSourceEngine.clear).toHaveBeenCalledWith('text'); + + // Verify buffers. + expect(initSegments.audio).toEqual([true, false]); + expect(initSegments.video).toEqual([true, false]); + expect(segments.audio).toEqual([false, true, false, false]); + expect(segments.video).toEqual([false, true, false, false]); + expect(segments.text).toEqual([false, true, false, false]); + + // Don't switch to the second Period, just allow the fake event loop + // to finish. + onBufferNewPeriod.and.callFake(function(period) {}); + onBufferNewPeriod.calls.reset(); + mediaSourceEngine.appendBuffer.calls.reset(); + mediaSourceEngine.clear.calls.reset(); + }); + }); + + // Here we go! + var streamsByType = { + 'audio': audioStream1, 'video': videoStream1, 'text': textStream1 + }; + streamingEngine.init(streamsByType); + + fakeEventLoop().then(function() { + expect(mediaSourceEngine.appendBuffer).not.toHaveBeenCalled(); + expect(mediaSourceEngine.remove).not.toHaveBeenCalled(); + expect(mediaSourceEngine.clear).not.toHaveBeenCalled(); + + // Verify buffers. + expect(initSegments.audio).toEqual([true, false]); + expect(initSegments.video).toEqual([true, false]); + expect(segments.audio).toEqual([false, true, false, false]); + expect(segments.video).toEqual([false, true, false, false]); + expect(segments.text).toEqual([false, true, false, false]); + + return streamingEngine.destroy(); + }).catch(fail).then(done); + }); + + it('into unbuffered regions across Periods', function(done) { + // Start from the second Period. + playhead.getTime.and.returnValue(20); + setupFakeMediaSourceEngine(20 /* expectedTimestampOffset */); + + onStartupComplete.and.callFake(setupFakeGetTime.bind(null, 20)); + + // onBufferNewPeriod() should not be called since the second Period + // is the last one; instead, endOfStream() should be called. + mediaSourceEngine.endOfStream.and.callFake(function() { + // Verify buffers. + expect(initSegments.audio).toEqual([false, true]); + expect(initSegments.video).toEqual([false, true]); + expect(segments.audio).toEqual([false, false, true, true]); + expect(segments.video).toEqual([false, false, true, true]); + expect(segments.text).toEqual([false, false, true, true]); + + // Seek backwards to an unbuffered region in the first Period. Note + // that since the buffering goal is 5 seconds and each segment is 10 + // seconds long, the last segment in the second Period will be appended + // when the playhead is at the 26 second mark. + expect(playhead.getTime()).toBe(26); + playheadTime -= 10; + streamingEngine.seeked(); + + onBufferNewPeriod.and.callFake(function(period) { + expect(period).toBe(manifest.periods[0]); + + // Verify that all buffers have been cleared. + expect(mediaSourceEngine.clear).toHaveBeenCalledWith('audio'); + expect(mediaSourceEngine.clear).toHaveBeenCalledWith('video'); + expect(mediaSourceEngine.clear).toHaveBeenCalledWith('text'); + mediaSourceEngine.clear.calls.reset(); + + // Switch to the first Period. + setupFakeMediaSourceEngine(0 /* expectedTimestampOffset */); + + streamingEngine.switch('audio', audioStream1); + streamingEngine.switch('video', videoStream1); + streamingEngine.switch('text', textStream1); + + onBufferNewPeriod.and.callFake(function(period) { + expect(period).toBe(manifest.periods[1]); + + // Verify buffers. + expect(initSegments.audio).toEqual([true, false]); + expect(initSegments.video).toEqual([true, false]); + expect(segments.audio).toEqual([false, true, false, false]); + expect(segments.video).toEqual([false, true, false, false]); + expect(segments.text).toEqual([false, true, false, false]); + + // Don't switch to the second Period, just allow the fake event + // loop to finish. + onBufferNewPeriod.and.callFake(function(period) {}); + mediaSourceEngine.appendBuffer.calls.reset(); + mediaSourceEngine.clear.calls.reset(); + }); + }); + }); + + // Here we go! + var streamsByType = { + 'audio': audioStream2, 'video': videoStream2, 'text': textStream2 + }; + streamingEngine.init(streamsByType); + + fakeEventLoop().then(function() { + expect(mediaSourceEngine.appendBuffer).not.toHaveBeenCalled(); + expect(mediaSourceEngine.remove).not.toHaveBeenCalled(); + expect(mediaSourceEngine.clear).not.toHaveBeenCalled(); + + // Verify buffers. + expect(initSegments.audio).toEqual([true, false]); + expect(initSegments.video).toEqual([true, false]); + expect(segments.audio).toEqual([false, true, false, false]); + expect(segments.video).toEqual([false, true, false, false]); + expect(segments.text).toEqual([false, true, false, false]); + + streamingEngine.destroy().then(done); + }); + }); + }); + + describe('handles errors', function() { + it('from initial Stream setup', function(done) { + playhead.getTime.and.returnValue(0); + setupFakeMediaSourceEngine(0 /* expectedTimestampOffset */); + + videoStream1.createSegmentIndex.and.returnValue( + Promise.reject('FAKE_ERROR')); + + onError.and.callFake(function(error) { + expect(onInitialStreamsSetup).not.toHaveBeenCalled(); + expect(onStartupComplete).not.toHaveBeenCalled(); + expect(error).toBe('FAKE_ERROR'); + streamingEngine.destroy().catch(fail).then(done); + }); + + // Here we go! + var streamsByType = { + 'audio': audioStream1, 'video': videoStream1, 'text': textStream1 + }; + streamingEngine.init(streamsByType); + + fakeEventLoop(); + }); + + it('from post startup Stream setup', function(done) { + playhead.getTime.and.returnValue(0); + setupFakeMediaSourceEngine(0 /* expectedTimestampOffset */); + + alternateVideoStream1.createSegmentIndex.and.returnValue( + Promise.reject('FAKE_ERROR')); + + onError.and.callFake(function(error) { + expect(onInitialStreamsSetup).toHaveBeenCalled(); + expect(onStartupComplete).toHaveBeenCalled(); + expect(error).toBe('FAKE_ERROR'); + streamingEngine.destroy().catch(fail).then(done); + }); + + // Here we go! + var streamsByType = { + 'audio': audioStream1, 'video': videoStream1, 'text': textStream1 + }; + streamingEngine.init(streamsByType); + + fakeEventLoop(); + }); + + it('from failed init segment append during startup', function(done) { + playhead.getTime.and.returnValue(0); + setupFakeMediaSourceEngine(0 /* expectedTimestampOffset */); + + mediaSourceEngine.appendBuffer.and.callFake(function(type, data) { + // Reject the first video init segment. + if (data == dummyInitSegments.video[0]) { + return Promise.reject('FAKE_ERROR'); + } else { + return fakeAppendBuffer(type, data); + } + }); + + onError.and.callFake(function(error) { + expect(onInitialStreamsSetup).toHaveBeenCalled(); + expect(onStartupComplete).not.toHaveBeenCalled(); + expect(error).toBe('FAKE_ERROR'); + streamingEngine.destroy().catch(fail).then(done); + }); + + // Here we go! + var streamsByType = { + 'audio': audioStream1, 'video': videoStream1, 'text': textStream1 + }; + streamingEngine.init(streamsByType); + + fakeEventLoop(); + }); + + it('from failed media segment append during startup', function(done) { + playhead.getTime.and.returnValue(0); + setupFakeMediaSourceEngine(0 /* expectedTimestampOffset */); + + mediaSourceEngine.appendBuffer.and.callFake(function(type, data) { + // Reject the first audio segment. + if (data == dummySegments.audio[0]) { + return Promise.reject('FAKE_ERROR'); + } else { + return fakeAppendBuffer(type, data); + } + }); + + onError.and.callFake(function(error) { + expect(onInitialStreamsSetup).toHaveBeenCalled(); + expect(onStartupComplete).not.toHaveBeenCalled(); + expect(error).toBe('FAKE_ERROR'); + streamingEngine.destroy().catch(fail).then(done); + }); + + // Here we go! + var streamsByType = { + 'audio': audioStream1, 'video': videoStream1, 'text': textStream1 + }; + streamingEngine.init(streamsByType); + + fakeEventLoop(); + }); + }); + + /** + * Verifies calls to NetworkingEngine.request(). + * @param {number} period The Period number (one-based). + */ + function verifyNetworkingEngineRequestCalls(period) { + var get = constructUri; + + expect(netEngine.request).toHaveBeenCalledWith( + shaka.net.NetworkingEngine.RequestType.SEGMENT, + jasmine.objectContaining({ + uris: [period + '_audio_init'], + method: 'GET', + headers: {}, + retryParameters: {} + })); + + expect(netEngine.request).toHaveBeenCalledWith( + shaka.net.NetworkingEngine.RequestType.SEGMENT, + jasmine.objectContaining({ + uris: [period + '_video_init'], + method: 'GET', + headers: {}, + retryParameters: {} + })); + + expect(netEngine.request).toHaveBeenCalledWith( + shaka.net.NetworkingEngine.RequestType.SEGMENT, + jasmine.objectContaining({ + uris: [period + '_audio_1'], + method: 'GET', + headers: {'Range': 'bytes=0-' + segmentSizes.audio}, + retryParameters: {} + })); + + expect(netEngine.request).toHaveBeenCalledWith( + shaka.net.NetworkingEngine.RequestType.SEGMENT, + jasmine.objectContaining({ + uris: [period + '_video_1'], + method: 'GET', + headers: {'Range': 'bytes=0-' + segmentSizes.video}, + retryParameters: {} + })); + + expect(netEngine.request).toHaveBeenCalledWith( + shaka.net.NetworkingEngine.RequestType.SEGMENT, + jasmine.objectContaining({ + uris: [period + '_text_1'], + method: 'GET', + headers: {'Range': 'bytes=0-' + segmentSizes.text}, + retryParameters: {} + })); + + expect(netEngine.request).toHaveBeenCalledWith( + shaka.net.NetworkingEngine.RequestType.SEGMENT, + jasmine.objectContaining({ + uris: [period + '_audio_2'], + method: 'GET', + headers: {'Range': 'bytes=0-' + segmentSizes.audio}, + retryParameters: {} + })); + + expect(netEngine.request).toHaveBeenCalledWith( + shaka.net.NetworkingEngine.RequestType.SEGMENT, + jasmine.objectContaining({ + uris: [period + '_video_2'], + method: 'GET', + headers: {'Range': 'bytes=0-' + segmentSizes.video}, + retryParameters: {} + })); + + expect(netEngine.request).toHaveBeenCalledWith( + shaka.net.NetworkingEngine.RequestType.SEGMENT, + jasmine.objectContaining({ + uris: [period + '_text_2'], + method: 'GET', + headers: {'Range': 'bytes=0-' + segmentSizes.text}, + retryParameters: {} + })); + + netEngine.request.calls.reset(); + } + + /** + * Makes the mock MediaSourceEngine object behave as a fake MediaSourceEngine + * object that keeps track of the segments that have been appended. + * + * Note that appending an init segment clears any init segments already + * appended for that content type. + * + * The fake ensures that setTimestampOffset() is only called with the given + * expected timestamp offset value. + */ + function setupFakeMediaSourceEngine(expectedTimestampOffset) { + mediaSourceEngine.bufferStart.and.callFake(fakeBufferStart); + mediaSourceEngine.bufferEnd.and.callFake(fakeBufferEnd); + mediaSourceEngine.bufferedAheadOf.and.callFake(fakeBufferedAheadOf); + mediaSourceEngine.appendBuffer.and.callFake(fakeAppendBuffer); + mediaSourceEngine.setTimestampOffset.and.callFake( + fakeSetTimestampOffset.bind(null, expectedTimestampOffset)); + mediaSourceEngine.remove.and.callFake(fakeRemove); + mediaSourceEngine.clear.and.callFake(function(type) { + return fakeRemove(type, 0, 40); + }); + mediaSourceEngine.setDuration.and.returnValue(Promise.resolve()); + } + + function fakeBufferStart(type, time) { + if (segments[type] === undefined) throw new Error('unexpected type'); + var first = segments[type].indexOf(true); + return first >= 0 ? first * 10 : null; + } + + function fakeBufferEnd(type, time) { + if (segments[type] === undefined) throw new Error('unexpected type'); + var last = segments[type].lastIndexOf(true); + return last >= 0 ? (last + 1) * 10 : null; + } + + function fakeBufferedAheadOf(type, time) { + if (segments[type] === undefined) throw new Error('unexpected type'); + var start = Math.floor(time / 10); + if (!segments[type][start]) return 0; // Unbuffered. + var last = segments[type].indexOf(false, start); // Find first gap. + if (last < 0) last = segments[type].length - 1; + var endTime = last * 10; + shaka.asserts.assert(endTime >= time, 'unexpected end'); + return endTime - time; + } + + function fakeAppendBuffer(type, data) { + if (segments[type] === undefined) throw new Error('unexpected type'); + + // Set init segment. + var i = dummyInitSegments[type].indexOf(data); + if (i >= 0) { + for (var j = 0; j < initSegments[type].length; ++j) { + initSegments[type][j] = false; + } + initSegments[type][i] = true; + return Promise.resolve(); + } + + // Set media segment. + var i = dummySegments[type].indexOf(data); + if (i < 0) throw new Error('unexpected data'); + + segments[type][i] = true; + return Promise.resolve(); + } + + function fakeSetTimestampOffset(expectedTimestampOffset, type, offset) { + if (segments[type] === undefined) + throw new Error('unexpected type'); + if (offset != expectedTimestampOffset) + throw new Error('unexpected timestamp offset'); + return Promise.resolve(); + } + + function fakeRemove(type, start, end) { + if (segments[type] === undefined) throw new Error('unexpected type'); + if (start != 0) throw new Error('unexpected start'); + if (end < 40) throw new Error('unexpected end'); + + for (var i = 0; i < segments[type].length; ++i) { + segments[type][i] = false; + } + + return Promise.resolve(); + } + + /** + * Constructs a media segment URI. + * @param {number} period The Period number (one-based). + * @param {string} contentType The content type. + * @param {number} position The segment's position (one-based). + */ + function constructUri(period, contentType, position) { + var size = segmentSizes[contentType]; + if (position == 1 || position == 2) { + return new shaka.media.SegmentReference( + position, (position - 1) * 10, position * 10, + ['' + period + '_' + contentType + '_' + position], + 0, size); + } else { + return null; + } + } + + function createMockPlayhead() { + return { + destroy: jasmine.createSpy('destroy'), + getTime: jasmine.createSpy('getTime'), + setBuffering: jasmine.createSpy('setBuffering') + }; + } + + function createMockMediaSourceEngine() { + return { + destroy: jasmine.createSpy('support'), + init: jasmine.createSpy('init'), + bufferStart: jasmine.createSpy('bufferStart'), + bufferEnd: jasmine.createSpy('bufferEnd'), + bufferedAheadOf: jasmine.createSpy('bufferedAheadOf'), + appendBuffer: jasmine.createSpy('appendBuffer'), + remove: jasmine.createSpy('remove'), + clear: jasmine.createSpy('clear'), + endOfStream: jasmine.createSpy('endOfStream'), + setDuration: jasmine.createSpy('setDuration'), + setTimestampOffset: jasmine.createSpy('setTimestampOffset') + }; + } + + function createMockNetworkingEngine() { + return { + destroy: jasmine.createSpy('destroy'), + request: jasmine.createSpy('request') + }; + } + + function createMockPresentationTimeline() { + return { + getDuration: jasmine.createSpy('getDuration'), + setDuration: jasmine.createSpy('setDuration'), + getSegmentAvailabilityDuration: + jasmine.createSpy('getSegmentAvailabilityDuration'), + getSegmentAvailabilityStart: + jasmine.createSpy('getSegmentAvailabilityStart'), + getSegmentAvailabilityEnd: + jasmine.createSpy('getSegmentAvailabilityEnd') + }; + } + + function createMockAudioStream(id) { + return { + id: id, + createSegmentIndex: jasmine.createSpy('createSegmentIndex'), + findSegmentPosition: jasmine.createSpy('findSegmentPosition'), + getSegmentReference: jasmine.createSpy('getSegmentReference'), + initSegmentReference: null, + presentationTimeOffset: 0, + mimeType: 'audio/mp4', + codecs: 'aac', + bandwidth: 192000 + }; + } + + function createMockVideoStream(id) { + return { + id: id, + createSegmentIndex: jasmine.createSpy('createSegmentIndex'), + findSegmentPosition: jasmine.createSpy('findSegmentPosition'), + getSegmentReference: jasmine.createSpy('getSegmentReference'), + initSegmentReference: null, + presentationTimeOffset: 0, + mimeType: 'video/mp4', + codecs: 'avc', + bandwidth: 5000000, + width: 1280, + height: 720 + }; + } + + function createMockTextStream(id) { + return { + id: id, + createSegmentIndex: jasmine.createSpy('createSegmentIndex'), + findSegmentPosition: jasmine.createSpy('findSegmentPosition'), + getSegmentReference: jasmine.createSpy('getSegmentReference'), + initSegmentReference: null, + presentationTimeOffset: 0, + mimeType: 'text/vtt', + kind: 'subtitles' + }; + } +}); +