Skip to content

Commit

Permalink
fix: Fix buffering due to re-fetch (SegmentTimeline)
Browse files Browse the repository at this point in the history
In DASH with SegmentTimeline, we had previously assumed that the
timeline would only ever have items added to its end.  In practice,
some content adds segments to the beginning of the timeline in an
update.

When this happened, assumptions higher up the stack were broken, and
StreamingEngine began streaming from the wrong position, re-fetching
segments that were already buffered.

It is relatively simple to filter the incoming segment references to
ensure that we only ever grow our list from the end.  This fixes
assumptions in other components and resolves the re-fetching issue for
SegmentTimeline-based content.

PR shaka-project#3419 will further address issues with SegmentTemplate+duration.

See also issue shaka-project#3354
b/186457868

Change-Id: Ibc03a697dd6a2a6f5e0f539cb7cf94bd9a63b495
  • Loading branch information
joeyparrish committed Jun 10, 2021
1 parent 53173c5 commit 3222370
Show file tree
Hide file tree
Showing 2 changed files with 55 additions and 9 deletions.
30 changes: 24 additions & 6 deletions lib/media/segment_index.js
Expand Up @@ -181,14 +181,18 @@ shaka.media.SegmentIndex = class {

/**
* Merges the given SegmentReferences. Supports extending the original
* references only. Will not replace old references or interleave new ones.
* references only. Will replace old references with equivalent new ones, and
* keep any unique old ones.
*
* Used, for example, by the DASH and HLS parser, where manifests may not list
* all available references, so we must keep available references in memory to
* fill the availability window.
*
* @param {!Array.<!shaka.media.SegmentReference>} references The list of
* SegmentReferences, which must be sorted first by their start times
* (ascending) and second by their end times (ascending).
* @deprecated Not used directly by our own parsers, so will become private in
* v4. Use mergeAndEvict() instead.
* @export
*/
merge(references) {
Expand All @@ -199,13 +203,13 @@ shaka.media.SegmentIndex = class {
return;
}

// Partial segments are used for live edge, and should be removed when they
// get older. Remove the old SegmentReferences after the first new
// reference's start time.
if (!references.length) {
return;
}

// Partial segments are used for live edge, and should be removed when they
// get older. Remove the old SegmentReferences after the first new
// reference's start time.
this.references = this.references.filter((r) => {
return r.startTime < references[0].startTime;
});
Expand Down Expand Up @@ -233,13 +237,27 @@ shaka.media.SegmentIndex = class {
* @export
*/
mergeAndEvict(references, windowStart) {
// FIlter out the references that are no longer available to avoid
// Filter out the references that are no longer available to avoid
// repeatedly evicting them and messing up eviction count.
references = references.filter((r) => {
return r.endTime > windowStart;
return r.endTime > windowStart &&
(this.references.length == 0 ||
r.endTime > this.references[0].startTime);
});

const oldFirstRef = this.references[0];
this.merge(references);
const newFirstRef = this.references[0];

if (oldFirstRef) {
// We don't compare the actual ref, since the object could legitimately be
// replaced with an equivalent. Even the URIs could change due to
// load-balancing actions taken by the server. However, if the time
// changes, its not an equivalent reference.
goog.asserts.assert(oldFirstRef.startTime == newFirstRef.startTime,
'SegmentIndex.merge should not change the first reference time!');
}

this.evict(windowStart);
}

Expand Down
34 changes: 31 additions & 3 deletions test/media/segment_index_unit.js
Expand Up @@ -484,12 +484,40 @@ describe('SegmentIndex', /** @suppress {accessControls} */ () => {
makeReference(uri(20), 20, 30),
];

// The first reference ends before the availabilityWindowStart, so it
// should be discarded.
index1.mergeAndEvict(references2, 19);
// The first two references end before the availabilityWindowStart, so
// they should be discarded.
index1.mergeAndEvict(references2, 21);
expect(index1.references.length).toBe(1);
expect(index1.references[0]).toEqual(references2[2]);
});

it('discards segments that end before the first old segment', () => {
/** @type {!Array.<!shaka.media.SegmentReference>} */
const references1 = [
// Assuming ref(0, 10) has been already evicted
makeReference(uri(10), 10, 20),
];
const index1 = new shaka.media.SegmentIndex(references1);
const position1 = index1.find(10);

/** @type {!Array.<!shaka.media.SegmentReference>} */
const references2 = [
makeReference(uri(0), 0, 10),
makeReference(uri(10), 10, 20),
makeReference(uri(20), 20, 30),
];

// The new first reference ends before the first old reference starts, so
// it should be discarded. We will never grow the list at the beginning.
index1.mergeAndEvict(references2, 0);
expect(index1.references.length).toBe(2);
expect(index1.references[0]).toEqual(references2[1]);
expect(index1.references[1]).toEqual(references2[2]);

// The positions should be the same, as well.
expect(index1.find(10)).toBe(position1);
goog.asserts.assert(position1 != null, 'Position should not be null!');
expect(index1.get(position1)).toBe(references2[1]);
});
});

Expand Down

0 comments on commit 3222370

Please sign in to comment.