Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Unmask errors on LL #5908

Merged
merged 2 commits into from
Nov 21, 2023
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
136 changes: 74 additions & 62 deletions lib/media/streaming_engine.js
Original file line number Diff line number Diff line change
Expand Up @@ -1325,6 +1325,44 @@ shaka.media.StreamingEngine = class {

mediaState.performingUpdate = true;

const hanleError = async (error) => {
avelad marked this conversation as resolved.
Show resolved Hide resolved
this.destroyer_.ensureNotDestroyed(error);
avelad marked this conversation as resolved.
Show resolved Hide resolved
if (this.fatalError_) {
return;
}
goog.asserts.assert(error instanceof shaka.util.Error,
'Should only receive a Shaka error');

mediaState.performingUpdate = false;

if (error.code == shaka.util.Error.Code.OPERATION_ABORTED) {
// If the network slows down, abort the current fetch request and start
// a new one, and ignore the error message.
mediaState.performingUpdate = false;
this.cancelUpdate_(mediaState);
this.scheduleUpdate_(mediaState, 0);
} else if (mediaState.type == ContentType.TEXT &&
this.config_.ignoreTextStreamFailures) {
if (error.code == shaka.util.Error.Code.BAD_HTTP_STATUS) {
shaka.log.warning(logPrefix,
'Text stream failed to download. Proceeding without it.');
} else {
shaka.log.warning(logPrefix,
'Text stream failed to parse. Proceeding without it.');
}
this.mediaStates_.delete(ContentType.TEXT);
} else if (error.code == shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR) {
this.handleQuotaExceeded_(mediaState, error);
} else {
shaka.log.error(logPrefix, 'failed fetch and append: code=' +
error.code);
mediaState.hasError = true;

error.severity = shaka.util.Error.Severity.CRITICAL;
await this.handleStreamingError_(mediaState, error);
}
};

try {
if (reference.getStatus() ==
shaka.media.SegmentReference.Status.MISSING) {
Expand All @@ -1351,6 +1389,7 @@ shaka.media.StreamingEngine = class {
let remaining = new Uint8Array(0);
let processingResult = false;
let callbackCalled = false;
let streamDataCallbackError;
const streamDataCallback = async (data) => {
if (processingResult) {
// If the fallback result processing was triggered, don't also
Expand All @@ -1363,40 +1402,47 @@ shaka.media.StreamingEngine = class {
if (this.fatalError_) {
return;
}
// Append the data with complete boxes.
// Every time streamDataCallback gets called, append the new data to
// the remaining data.
// Find the last fully completed Mdat box, and slice the data into two
// parts: the first part with completed Mdat boxes, and the second
// part with an incomplete box.
// Append the first part, and save the second part as remaining data,
// and handle it with the next streamDataCallback call.
remaining = this.concatArray_(remaining, data);
let sawMDAT = false;
let offset = 0;
new shaka.util.Mp4Parser()
.box('mdat', (box) => {
offset = box.size + box.start;
sawMDAT = true;
})
.parse(remaining, /* partialOkay= */ false,
try {
// Append the data with complete boxes.
// Every time streamDataCallback gets called, append the new data
// to the remaining data.
// Find the last fully completed Mdat box, and slice the data into
// two parts: the first part with completed Mdat boxes, and the
// second part with an incomplete box.
// Append the first part, and save the second part as remaining
// data, and handle it with the next streamDataCallback call.
remaining = this.concatArray_(remaining, data);
let sawMDAT = false;
let offset = 0;
new shaka.util.Mp4Parser()
.box('mdat', (box) => {
offset = box.size + box.start;
sawMDAT = true;
})
.parse(remaining, /* partialOkay= */ false,
/* isChunkedData= */ true);
if (sawMDAT) {
const dataToAppend = remaining.subarray(0, offset);
remaining = remaining.subarray(offset);
await this.append_(
mediaState, presentationTime, stream, reference, dataToAppend,
/* isChunkedData= */ true);
if (sawMDAT) {
const dataToAppend = remaining.subarray(0, offset);
remaining = remaining.subarray(offset);
await this.append_(
mediaState, presentationTime, stream, reference, dataToAppend,
/* isChunkedData= */ true);

if (mediaState.segmentPrefetch && mediaState.segmentIterator) {
mediaState.segmentPrefetch.prefetchSegments(
reference, /* skipFirst= */ true);

if (mediaState.segmentPrefetch && mediaState.segmentIterator) {
mediaState.segmentPrefetch.prefetchSegments(
reference, /* skipFirst= */ true);
}
}
} catch (error) {
streamDataCallbackError = error;
}
};

const result =
await this.fetch_(mediaState, reference, streamDataCallback);
if (streamDataCallbackError) {
throw streamDataCallbackError;
}
if (!callbackCalled) {
// In some environments, we might be forced to use network plugins
// that don't support streamDataCallback. In those cases, as a
Expand Down Expand Up @@ -1482,41 +1528,7 @@ shaka.media.StreamingEngine = class {
// Update right away.
this.scheduleUpdate_(mediaState, 0);
} catch (error) {
this.destroyer_.ensureNotDestroyed(error);
if (this.fatalError_) {
return;
}
goog.asserts.assert(error instanceof shaka.util.Error,
'Should only receive a Shaka error');

mediaState.performingUpdate = false;

if (error.code == shaka.util.Error.Code.OPERATION_ABORTED) {
// If the network slows down, abort the current fetch request and start
// a new one, and ignore the error message.
mediaState.performingUpdate = false;
this.cancelUpdate_(mediaState);
this.scheduleUpdate_(mediaState, 0);
} else if (mediaState.type == ContentType.TEXT &&
this.config_.ignoreTextStreamFailures) {
if (error.code == shaka.util.Error.Code.BAD_HTTP_STATUS) {
shaka.log.warning(logPrefix,
'Text stream failed to download. Proceeding without it.');
} else {
shaka.log.warning(logPrefix,
'Text stream failed to parse. Proceeding without it.');
}
this.mediaStates_.delete(ContentType.TEXT);
} else if (error.code == shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR) {
this.handleQuotaExceeded_(mediaState, error);
} else {
shaka.log.error(logPrefix, 'failed fetch and append: code=' +
error.code);
mediaState.hasError = true;

error.severity = shaka.util.Error.Severity.CRITICAL;
await this.handleStreamingError_(mediaState, error);
}
await hanleError(error);
}
}

Expand Down
Loading