Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Debug buffer placement #4345

Merged
merged 4 commits into from
Jul 14, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
81 changes: 57 additions & 24 deletions lib/media/media_source_engine.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ goog.require('goog.asserts');
goog.require('shaka.log');
goog.require('shaka.media.ContentWorkarounds');
goog.require('shaka.media.IClosedCaptionParser');
goog.require('shaka.media.SegmentReference');
goog.require('shaka.media.TimeRangesUtils');
goog.require('shaka.media.Transmuxer');
goog.require('shaka.text.TextEngine');
Expand Down Expand Up @@ -466,13 +467,13 @@ shaka.media.MediaSourceEngine = class {
*/
getBufferedInfo() {
const ContentType = shaka.util.ManifestParserUtils.ContentType;
const TimeRangeUtils = shaka.media.TimeRangesUtils;
const TimeRangesUtils = shaka.media.TimeRangesUtils;

const info = {
total: TimeRangeUtils.getBufferedInfo(this.video_.buffered),
audio: TimeRangeUtils.getBufferedInfo(
total: TimeRangesUtils.getBufferedInfo(this.video_.buffered),
audio: TimeRangesUtils.getBufferedInfo(
this.getBuffered_(ContentType.AUDIO)),
video: TimeRangeUtils.getBufferedInfo(
video: TimeRangesUtils.getBufferedInfo(
this.getBuffered_(ContentType.VIDEO)),
text: [],
};
Expand Down Expand Up @@ -516,15 +517,14 @@ shaka.media.MediaSourceEngine = class {
*
* @param {shaka.util.ManifestParserUtils.ContentType} contentType
* @param {!BufferSource} data
* @param {?number} startTime relative to the start of the presentation
* @param {?number} endTime relative to the start of the presentation
* @param {?shaka.media.SegmentReference} reference The segment reference
* we are appending, or null for init segments
* @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
* captions
* captions
* @param {boolean=} seeked True if we just seeked
* @return {!Promise}
*/
async appendBuffer(
contentType, data, startTime, endTime, hasClosedCaptions, seeked) {
async appendBuffer(contentType, data, reference, hasClosedCaptions, seeked) {
const ContentType = shaka.util.ManifestParserUtils.ContentType;

if (contentType == ContentType.TEXT) {
Expand All @@ -533,7 +533,10 @@ shaka.media.MediaSourceEngine = class {
const offset = await this.textSequenceModeOffset_;
this.textEngine_.setTimestampOffset(offset);
}
await this.textEngine_.appendBuffer(data, startTime, endTime);
await this.textEngine_.appendBuffer(
data,
reference ? reference.startTime : null,
reference ? reference.endTime : null);
return;
}

Expand All @@ -549,7 +552,10 @@ shaka.media.MediaSourceEngine = class {
if (transmuxedData.metadata) {
const timestampOffset =
this.sourceBuffers_[contentType].timestampOffset;
this.onMetadata_(transmuxedData.metadata, timestampOffset, endTime);
this.onMetadata_(
transmuxedData.metadata,
timestampOffset,
reference ? reference.endTime : null);
}
// This doesn't work for native TS support (ex. Edge/Chromecast),
// since no transmuxing is needed for native TS.
Expand All @@ -559,7 +565,10 @@ shaka.media.MediaSourceEngine = class {
const closedCaptions = this.textEngine_
.convertMuxjsCaptionsToShakaCaptions(transmuxedData.captions);
this.textEngine_.storeAndAppendClosedCaptions(
closedCaptions, startTime, endTime, videoOffset);
closedCaptions,
reference ? reference.startTime : null,
reference ? reference.endTime : null,
videoOffset);
}

data = transmuxedData.data;
Expand All @@ -569,26 +578,29 @@ shaka.media.MediaSourceEngine = class {
}
// If it is the init segment for closed captions, initialize the closed
// caption parser.
if (startTime == null && endTime == null) {
if (!reference) {
this.captionParser_.init(data);
} else {
const closedCaptions = this.captionParser_.parseFrom(data);
if (closedCaptions.length) {
const videoOffset =
this.sourceBuffers_[ContentType.VIDEO].timestampOffset;
this.textEngine_.storeAndAppendClosedCaptions(
closedCaptions, startTime, endTime, videoOffset);
closedCaptions,
reference.startTime,
reference.endTime,
videoOffset);
}
}
}

data = this.workAroundBrokenPlatforms_(data, startTime, contentType);
data = this.workAroundBrokenPlatforms_(
data, reference ? reference.startTime : null, contentType);
theodab marked this conversation as resolved.
Show resolved Hide resolved

const sourceBuffer = this.sourceBuffers_[contentType];
const SEQUENCE = shaka.media.MediaSourceEngine.SourceBufferMode_.SEQUENCE;

if (this.sequenceMode_ && sourceBuffer.mode != SEQUENCE &&
startTime != null) {
if (this.sequenceMode_ && sourceBuffer.mode != SEQUENCE && reference) {
// This is the first media segment to be appended to a SourceBuffer in
// sequence mode. We set the mode late so that we can trick MediaSource
// into extracting a timestamp for us to align text segments in sequence
Expand Down Expand Up @@ -623,7 +635,7 @@ shaka.media.MediaSourceEngine = class {
// segments.
const mediaStartTime = shaka.media.TimeRangesUtils.bufferStart(
this.getBuffered_(contentType));
const textOffset = (startTime || 0) - (mediaStartTime || 0);
const textOffset = (reference.startTime || 0) - (mediaStartTime || 0);
this.textSequenceModeOffset_.resolve(textOffset);

// Finally, clear the buffer.
Expand All @@ -636,22 +648,43 @@ shaka.media.MediaSourceEngine = class {
sourceBuffer.mode = SEQUENCE;
}

if (startTime != null && this.sequenceMode_ &&
contentType != ContentType.TEXT) {
if (reference && this.sequenceMode_ && contentType != ContentType.TEXT) {
// In sequence mode, for non-text streams, if we just cleared the buffer
// and are performing an unbuffered seek, we need to set a new
// timestampOffset on the sourceBuffer.
if (seeked) {
const timestampOffset = /** @type {number} */ (startTime);
const timestampOffset = reference.startTime;
this.enqueueOperation_(
contentType,
() => this.setTimestampOffset_(contentType, timestampOffset));
}
}

await this.enqueueOperation_(
contentType,
() => this.append_(contentType, data));
let bufferedBefore = null;

await this.enqueueOperation_(contentType, () => {
if (goog.DEBUG && reference) {
bufferedBefore = this.getBuffered_(contentType);
}
this.append_(contentType, data);
});

if (goog.DEBUG && reference) {
const bufferedAfter = this.getBuffered_(contentType);
const newBuffered = shaka.media.TimeRangesUtils.computeAddedRange(
bufferedBefore, bufferedAfter);
if (newBuffered) {
const segmentDuration = reference.endTime - reference.startTime;
if (Math.abs(newBuffered.start - reference.startTime) >
segmentDuration / 2) {
shaka.log.error('Possible encoding problem detected!',
'Unexpected buffered range for reference', reference,
'from URIs', reference.getUris(),
'should be', {start: reference.startTime, end: reference.endTime},
'but got', newBuffered);
}
}
}
}

/**
Expand Down
7 changes: 3 additions & 4 deletions lib/media/streaming_engine.js
Original file line number Diff line number Diff line change
Expand Up @@ -1555,8 +1555,8 @@ shaka.media.StreamingEngine = class {
const hasClosedCaptions = mediaState.stream.closedCaptions &&
mediaState.stream.closedCaptions.size > 0;
await this.playerInterface_.mediaSourceEngine.appendBuffer(
mediaState.type, initSegment, /* startTime= */ null,
/* endTime= */ null, hasClosedCaptions);
mediaState.type, initSegment, /* reference= */ null,
hasClosedCaptions);
} catch (error) {
mediaState.lastInitSegmentReference = null;
throw error;
Expand Down Expand Up @@ -1609,8 +1609,7 @@ shaka.media.StreamingEngine = class {
await this.playerInterface_.mediaSourceEngine.appendBuffer(
mediaState.type,
segment,
reference.startTime,
reference.endTime,
reference,
hasClosedCaptions,
seeked);
this.destroyer_.ensureNotDestroyed();
Expand Down
46 changes: 46 additions & 0 deletions lib/media/time_ranges_utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -155,4 +155,50 @@ shaka.media.TimeRangesUtils = class {
}
return ret;
}

/**
* This operation can be potentially EXPENSIVE and should only be done in
* debug builds for debugging purposes.
*
* @param {TimeRanges} oldRanges
* @param {TimeRanges} newRanges
* @return {?shaka.extern.BufferedRange} The last added range,
* chronologically by presentation time.
*/
static computeAddedRange(oldRanges, newRanges) {
const TimeRangesUtils = shaka.media.TimeRangesUtils;

if (!oldRanges || !oldRanges.length) {
return null;
}
if (!newRanges || !newRanges.length) {
return TimeRangesUtils.getBufferedInfo(newRanges).pop();
}

const newRangesReversed =
TimeRangesUtils.getBufferedInfo(newRanges).reverse();
const oldRangesReversed =
TimeRangesUtils.getBufferedInfo(oldRanges).reverse();
for (const newRange of newRangesReversed) {
let foundOverlap = false;
theodab marked this conversation as resolved.
Show resolved Hide resolved

for (const oldRange of oldRangesReversed) {
if (oldRange.end >= newRange.start && oldRange.end <= newRange.end) {
foundOverlap = true;

// If the new range goes beyond the corresponding old one, the
// difference is newly-added.
if (newRange.end > oldRange.end) {
return {start: oldRange.end, end: newRange.end};
}
}
}

if (!foundOverlap) {
return newRange;
}
}

return null;
}
};
32 changes: 24 additions & 8 deletions test/media/drm_engine_integration.js
Original file line number Diff line number Diff line change
Expand Up @@ -209,10 +209,10 @@ describe('DrmEngine', () => {
await drmEngine.initForPlayback(variants, manifest.offlineSessionIds);
await drmEngine.attach(video);
await mediaSourceEngine.appendBuffer(
ContentType.VIDEO, videoInitSegment, null, null,
ContentType.VIDEO, videoInitSegment, null,
/* hasClosedCaptions= */ false);
await mediaSourceEngine.appendBuffer(
ContentType.AUDIO, audioInitSegment, null, null,
ContentType.AUDIO, audioInitSegment, null,
/* hasClosedCaptions= */ false);
await encryptedEventSeen;
// With PlayReady, a persistent license policy can cause a different
Expand Down Expand Up @@ -245,11 +245,13 @@ describe('DrmEngine', () => {
}
}

const reference = dummyReference(0, 10);

await mediaSourceEngine.appendBuffer(
ContentType.VIDEO, videoSegment, 0, 10,
ContentType.VIDEO, videoSegment, reference,
/* hasClosedCaptions= */ false);
await mediaSourceEngine.appendBuffer(
ContentType.AUDIO, audioSegment, 0, 10,
ContentType.AUDIO, audioSegment, reference,
/* hasClosedCaptions= */ false);

expect(video.buffered.end(0)).toBeGreaterThan(0);
Expand Down Expand Up @@ -305,10 +307,10 @@ describe('DrmEngine', () => {
await drmEngine.initForPlayback(variants, manifest.offlineSessionIds);
await drmEngine.attach(video);
await mediaSourceEngine.appendBuffer(
ContentType.VIDEO, videoInitSegment, null, null,
ContentType.VIDEO, videoInitSegment, null,
/* hasClosedCaptions= */ false);
await mediaSourceEngine.appendBuffer(
ContentType.AUDIO, audioInitSegment, null, null,
ContentType.AUDIO, audioInitSegment, null,
/* hasClosedCaptions= */ false);
await encryptedEventSeen;

Expand All @@ -326,11 +328,13 @@ describe('DrmEngine', () => {
}
}

const reference = dummyReference(0, 10);

await mediaSourceEngine.appendBuffer(
ContentType.VIDEO, videoSegment, 0, 10,
ContentType.VIDEO, videoSegment, reference,
/* hasClosedCaptions= */ false);
await mediaSourceEngine.appendBuffer(
ContentType.AUDIO, audioSegment, 0, 10,
ContentType.AUDIO, audioSegment, reference,
/* hasClosedCaptions= */ false);

expect(video.buffered.end(0)).toBeGreaterThan(0);
Expand All @@ -345,4 +349,16 @@ describe('DrmEngine', () => {
expect(video.currentTime).toBeGreaterThan(0);
});
}); // describe('ClearKey')

function dummyReference(startTime, endTime) {
return new shaka.media.SegmentReference(
startTime, endTime,
/* uris= */ () => ['foo://bar'],
/* startByte= */ 0,
/* endByte= */ null,
/* initSegmentReference= */ null,
/* timestampOffset= */ 0,
/* appendWindowStart= */ 0,
/* appendWindowEnd= */ Infinity);
}
});
34 changes: 24 additions & 10 deletions test/media/media_source_engine_integration.js
Original file line number Diff line number Diff line change
Expand Up @@ -57,32 +57,32 @@ describe('MediaSourceEngine', () => {

function appendInit(type) {
const segment = generators[type].getInitSegment(Date.now() / 1000);
const reference = null;
return mediaSourceEngine.appendBuffer(
type, segment, null, null, /* hasClosedCaptions= */ false);
type, segment, reference, /* hasClosedCaptions= */ false);
}

function append(type, segmentNumber) {
const segment = generators[type]
.getSegment(segmentNumber, Date.now() / 1000);
const reference = dummyReference(type, segmentNumber);
return mediaSourceEngine.appendBuffer(
type, segment, null, null, /* hasClosedCaptions= */ false);
type, segment, reference, /* hasClosedCaptions= */ false);
}

// The start time and end time should be null for init segment with closed
// captions.
function appendInitWithClosedCaptions(type) {
const segment = generators[type].getInitSegment(Date.now() / 1000);
return mediaSourceEngine.appendBuffer(type, segment, /* startTime= */ null,
/* endTime= */ null, /* hasClosedCaptions= */ true);
const reference = null;
return mediaSourceEngine.appendBuffer(
type, segment, reference, /* hasClosedCaptions= */ true);
}

// The start time and end time should be valid for the segments with closed
// captions.
function appendWithClosedCaptions(type, segmentNumber) {
const segment = generators[type]
.getSegment(segmentNumber, Date.now() / 1000);
return mediaSourceEngine.appendBuffer(type, segment, /* startTime= */ 0,
/* endTime= */ 2, /* hasClosedCaptions= */ true);
const reference = dummyReference(type, segmentNumber);
return mediaSourceEngine.appendBuffer(
type, segment, reference, /* hasClosedCaptions= */ true);
}

function buffered(type, time) {
Expand All @@ -93,6 +93,20 @@ describe('MediaSourceEngine', () => {
return mediaSourceEngine.bufferStart(type);
}

function dummyReference(type, segmentNumber) {
const start = segmentNumber * metadata[type].segmentDuration;
const end = (segmentNumber + 1) * metadata[type].segmentDuration;
return new shaka.media.SegmentReference(
start, end,
/* uris= */ () => ['foo://bar'],
/* startByte= */ 0,
/* endByte= */ null,
/* initSegmentReference= */ null,
/* timestampOffset= */ 0,
/* appendWindowStart= */ 0,
/* appendWindowEnd= */ Infinity);
}

function remove(type, segmentNumber) {
const start = segmentNumber * metadata[type].segmentDuration;
const end = (segmentNumber + 1) * metadata[type].segmentDuration;
Expand Down
Loading