Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Create getTimestampAndDispatchMetadata_function in MediaSourceEngine #5322

Merged
merged 1 commit into from
Jun 16, 2023
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
167 changes: 81 additions & 86 deletions lib/media/media_source_engine.js
Original file line number Diff line number Diff line change
Expand Up @@ -632,75 +632,29 @@ shaka.media.MediaSourceEngine = class {
}

/**
* Enqueue an operation to append data to the SourceBuffer.
* Start and end times are needed for TextEngine, but not for MediaSource.
* Start and end times may be null for initialization segments; if present
* they are relative to the presentation timeline.
*
* @param {shaka.util.ManifestParserUtils.ContentType} contentType
* @param {!BufferSource} data
* @param {?shaka.media.SegmentReference} reference The segment reference
* we are appending, or null for init segments
* @param {shaka.extern.Stream} stream
* @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
* captions
* @param {boolean=} seeked True if we just seeked
* @param {boolean=} adaptation True if we just automatically switched active
* variant(s).
* @return {!Promise}
* @param {!string} mimeType
* @param {!number} timestampOffset
* @return {number}
* @private
*/
async appendBuffer(
contentType, data, reference, stream, hasClosedCaptions, seeked = false,
adaptation = false) {
const ContentType = shaka.util.ManifestParserUtils.ContentType;

if (contentType == ContentType.TEXT) {
if (this.sequenceMode_) {
// This won't be known until the first video segment is appended.
const offset = await this.textSequenceModeOffset_;
this.textEngine_.setTimestampOffset(offset);
}
await this.textEngine_.appendBuffer(
data,
reference ? reference.startTime : null,
reference ? reference.endTime : null);
return;
}

const attemptTimestampOffsetCalculation = !this.sequenceMode_ &&
this.manifestType_ == shaka.media.ManifestParser.HLS &&
!this.ignoreManifestTimestampsInSegmentsMode_;

let timestampOffset = this.sourceBuffers_[contentType].timestampOffset;
getTimestampAndDispatchMetadata_(contentType, data, reference, mimeType,
timestampOffset) {
let timestamp = null;

const uint8ArrayData = shaka.util.BufferUtils.toUint8(data);
let mimeType = this.sourceBufferTypes_[contentType];
if (this.transmuxers_[contentType]) {
mimeType = this.transmuxers_[contentType].getOrginalMimeType();
}
if (shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType)) {
const frames = shaka.util.Id3Utils.getID3Frames(uint8ArrayData);
if (frames.length && reference) {
if (attemptTimestampOffsetCalculation) {
const metadataTimestamp = frames.find((frame) => {
return frame.description ===
'com.apple.streaming.transportStreamTimestamp';
});
if (metadataTimestamp && metadataTimestamp.data) {
const calculatedTimestampOffset = reference.startTime -
Math.round(metadataTimestamp.data) / 1000;
const timestampOffsetDifference =
Math.abs(timestampOffset - calculatedTimestampOffset);
if (timestampOffsetDifference >= 0.1 || seeked || adaptation) {
timestampOffset = calculatedTimestampOffset;
this.enqueueOperation_(contentType, () =>
this.abort_(contentType));
this.enqueueOperation_(
contentType,
() => this.setTimestampOffset_(
contentType, timestampOffset));
}
}
const metadataTimestamp = frames.find((frame) => {
return frame.description ===
'com.apple.streaming.transportStreamTimestamp';
});
if (metadataTimestamp && metadataTimestamp.data) {
timestamp = Math.round(metadataTimestamp.data) / 1000;
}
/** @private {shaka.extern.ID3Metadata} */
const metadata = {
Expand All @@ -712,8 +666,7 @@ shaka.media.MediaSourceEngine = class {
};
this.onMetadata_([metadata], /* offset= */ 0, reference.endTime);
}
} else if (attemptTimestampOffsetCalculation &&
mimeType.includes('/mp4') &&
} else if (mimeType.includes('/mp4') &&
reference && reference.timestampOffset == 0 &&
reference.initSegmentReference &&
reference.initSegmentReference.timescale) {
Expand All @@ -738,44 +691,86 @@ shaka.media.MediaSourceEngine = class {
box.parser.stop();
}).parse(data, /* partialOkay= */ true);
if (parsedMedia) {
const calculatedTimestampOffset = reference.startTime - startTime;
const timestampOffsetDifference =
Math.abs(timestampOffset - calculatedTimestampOffset);
if (timestampOffsetDifference >= 0.1 || seeked || adaptation) {
timestampOffset = calculatedTimestampOffset;
this.enqueueOperation_(contentType, () => this.abort_(contentType));
this.enqueueOperation_(
contentType,
() => this.setTimestampOffset_(contentType, timestampOffset));
}
timestamp = startTime;
}
}
} else if (shaka.util.TsParser.probe(uint8ArrayData)) {
const tsParser = new shaka.util.TsParser().parse(uint8ArrayData);
const startTime = tsParser.getStartTime()[contentType];
if (startTime != null) {
const calculatedTimestampOffset = reference.startTime - startTime;
const timestampOffsetDifference =
Math.abs(timestampOffset - calculatedTimestampOffset);
if (timestampOffsetDifference >= 0.1 || seeked || adaptation) {
timestampOffset = calculatedTimestampOffset;
// The SourceBuffer timestampOffset may or may not be set yet,
// so this is the timestamp offset that would eventually compute
// for this segment either way.
if (attemptTimestampOffsetCalculation) {
this.enqueueOperation_(contentType, () => this.abort_(contentType));
this.enqueueOperation_(
contentType,
() => this.setTimestampOffset_(contentType, timestampOffset));
}
}
timestamp = startTime;
}
const metadata = tsParser.getMetadata();
if (metadata.length) {
this.onMetadata_(metadata, timestampOffset,
reference ? reference.endTime : null);
}
}
return timestamp;
}

/**
* Enqueue an operation to append data to the SourceBuffer.
* Start and end times are needed for TextEngine, but not for MediaSource.
* Start and end times may be null for initialization segments; if present
* they are relative to the presentation timeline.
*
* @param {shaka.util.ManifestParserUtils.ContentType} contentType
* @param {!BufferSource} data
* @param {?shaka.media.SegmentReference} reference The segment reference
* we are appending, or null for init segments
* @param {shaka.extern.Stream} stream
* @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
* captions
* @param {boolean=} seeked True if we just seeked
* @param {boolean=} adaptation True if we just automatically switched active
* variant(s).
* @return {!Promise}
*/
async appendBuffer(
contentType, data, reference, stream, hasClosedCaptions, seeked = false,
adaptation = false) {
const ContentType = shaka.util.ManifestParserUtils.ContentType;

if (contentType == ContentType.TEXT) {
if (this.sequenceMode_) {
// This won't be known until the first video segment is appended.
const offset = await this.textSequenceModeOffset_;
this.textEngine_.setTimestampOffset(offset);
}
await this.textEngine_.appendBuffer(
data,
reference ? reference.startTime : null,
reference ? reference.endTime : null);
return;
}

const attemptTimestampOffsetCalculation = !this.sequenceMode_ &&
this.manifestType_ == shaka.media.ManifestParser.HLS &&
!this.ignoreManifestTimestampsInSegmentsMode_;

let timestampOffset = this.sourceBuffers_[contentType].timestampOffset;

let mimeType = this.sourceBufferTypes_[contentType];
if (this.transmuxers_[contentType]) {
mimeType = this.transmuxers_[contentType].getOrginalMimeType();
}
const timestamp = this.getTimestampAndDispatchMetadata_(
contentType, data, reference, mimeType, timestampOffset);
if (attemptTimestampOffsetCalculation) {
const calculatedTimestampOffset = reference.startTime - timestamp;
const timestampOffsetDifference =
Math.abs(timestampOffset - calculatedTimestampOffset);
if (timestampOffsetDifference >= 0.1 || seeked || adaptation) {
timestampOffset = calculatedTimestampOffset;
if (attemptTimestampOffsetCalculation) {
this.enqueueOperation_(contentType, () => this.abort_(contentType));
this.enqueueOperation_(
contentType,
() => this.setTimestampOffset_(contentType, timestampOffset));
}
}
}
if (hasClosedCaptions && contentType == ContentType.VIDEO) {
if (!this.textEngine_) {
this.reinitText(shaka.util.MimeUtils.CEA608_CLOSED_CAPTION_MIMETYPE,
Expand Down