diff --git a/demo/full/index.html b/demo/full/index.html index ca5924355c..87df6b5e3b 100644 --- a/demo/full/index.html +++ b/demo/full/index.html @@ -8,7 +8,6 @@ - RxPlayer - CANAL+ diff --git a/demo/full/scripts/components/GitHubButton.tsx b/demo/full/scripts/components/GitHubButton.tsx new file mode 100644 index 0000000000..21ca68885b --- /dev/null +++ b/demo/full/scripts/components/GitHubButton.tsx @@ -0,0 +1,61 @@ +import * as React from "react"; +import { render } from "github-buttons"; + +const { + useEffect, + useRef, +} = React; + +const GitHubButton = ({ + href, + ariaLabel, + dataColorScheme = "dark_high_contrast", + dataIcon, + dataShowCount, + dataSize = "large", + dataText, + title, + children, +}: { + href: string; + ariaLabel?: string; + dataColorScheme?: string; + dataIcon?: string; + dataShowCount?: boolean | string; + dataSize?: string; + dataText?: string; + title?: string; + children?: React.ReactNode; +}): JSX.Element => { + const aRef = useRef(null); + + useEffect(() => { + const aElement = aRef.current; + if (aElement !== null) { + render( + aElement, + newA => aElement?.parentNode?.replaceChild(newA, aElement), + ); + } + }); + + return ( + + + {children} + + + ); +}; + +export default GitHubButton; \ No newline at end of file diff --git a/demo/full/scripts/controllers/Main.tsx b/demo/full/scripts/controllers/Main.tsx index 088f729521..0c3b8da97d 100644 --- a/demo/full/scripts/controllers/Main.tsx +++ b/demo/full/scripts/controllers/Main.tsx @@ -1,5 +1,6 @@ import RxPlayer from "../../../../src/minimal"; import * as React from "react"; +import GitHubButton from "../components/GitHubButton"; import Player from "./Player"; function MainComponent(): JSX.Element { @@ -20,22 +21,19 @@ function MainComponent(): JSX.Element { CANAL+ - - Star - - + - Fork - + ariaLabel="Fork the RxPlayer on GitHub" + dataIcon="octicon-repo-forked" + dataText="Fork" + /> diff --git a/demo/full/styles/style.css b/demo/full/styles/style.css index 40c3a08b47..38840c421f 100644 --- a/demo/full/styles/style.css +++ b/demo/full/styles/style.css @@ -1062,7 +1062,7 @@ input:checked + .slider:before { .header-links-buttons { display: flex; - align-items: baseline; + align-items: center; } .header-links-buttons > * { diff --git a/doc/api/Player_Errors.md b/doc/api/Player_Errors.md index 5fc9841340..c405903b0c 100644 --- a/doc/api/Player_Errors.md +++ b/doc/api/Player_Errors.md @@ -64,7 +64,7 @@ all have a `type` property equal to `"NETWORK_ERROR"`. #### codes -A NetworkError can only have the following code (`code` property): +An error of `type` `NETWORK_ERROR` can only have the following code (`code` property): - `"PIPELINE_LOAD_ERROR"`: the [Manifest](../Getting_Started/Glossary.md#manifest) or @@ -107,16 +107,30 @@ parsing) or from the browser itself (content playback). They all have a `type` property equal to `"MEDIA_ERROR"`. +Depending on its `code` property (listed below), a `MEDIA_ERROR` may also have +a supplementary `trackInfo` property, describing the track related to the issue. +The format of that property is decribed in the chapter below listed codes, and +the codes for which it is set are indicated in the corresponding code's +description below. + #### codes -A MediaError can have the following codes (`code` property): +An error of `type` `MEDIA_ERROR` can have the following codes (`code` property): - `"BUFFER_APPEND_ERROR"`: A media segment could not have been added to the corresponding media buffer. This often happens with malformed segments. + For those errors, you may be able to know the characteristics of the track + linked to that segment by inspecting the error's `trackInfo` property, + described below. + - `"BUFFER_FULL_ERROR"`: The needed segment could not have been added because the corresponding media buffer was full. + For those errors, you may be able to know the characteristics of the track + linked to that segment by inspecting the error's `trackInfo` property, + described below. + - `"BUFFER_TYPE_UNKNOWN"`: The type of buffer considered (e.g. "audio" / "video" / "text") has no media buffer implementation in your build. @@ -125,6 +139,9 @@ A MediaError can have the following codes (`code` property): [Representations](../Getting_Started/Glossary.md#representation) (read quality) in a supported codec. + For those errors, you may be able to know the characteristics of the track + linked to that codec by inspecting the error's `trackInfo` property, described below. + - `"MANIFEST_PARSE_ERROR"`: Generic error to signal than the [Manifest](../Getting_Started/Glossary.md#structure_of_a_manifest_object) could not be parsed. @@ -193,10 +210,14 @@ A MediaError can have the following codes (`code` property): This is rarely a problem and may be encountered at a very start of a content when the initial segment's start is much later than expected. -- `"NO_PLAYABLE_REPRESENTATION"`: The currently chosen Adaptation does not +- `"NO_PLAYABLE_REPRESENTATION"`: One of the currently chosen track does not contain any playable Representation. This usually happens when every Representation has been blacklisted due to encryption limitations. + For those errors, you may be able to know the characteristics of the + corresponding track by inspecting the error's `trackInfo` property, described + below. + - `"MANIFEST_UPDATE_ERROR"`: This error should never be emitted as it is handled internally by the RxPlayer. Please open an issue if you encounter it. @@ -211,16 +232,174 @@ A MediaError can have the following codes (`code` property): It is triggered when a time we initially thought to be in the bounds of the Manifest actually does not link to any "Period" of the Manifest. +#### `trackInfo` property + +As described in the corresponding code's documentation, A aupplementary +`trackInfo` property may be set on `MEDIA_ERROR` depending on its `code` +property. + +That `trackInfo` describes, when it makes sense, the characteristics of the track +linked to an error. For example, you may want to know which video track led to a +`BUFFER_APPEND_ERROR` and thus might be linked to corrupted segments. + +The `trackInfo` property has itself two sub-properties: + + - `type`: The type of track: `"audio"` for an audio track, `"text"` for a text + track, or `"video"` for a video track. + + - `track`: Characteristics of the track. Its format depends on the + `trackInfo`'s `type` property and is described below. + +##### For video tracks + +When `trackInfo.type` is set to `"video"`, `track` describes a video track. It +contains the following properties: + + - `id` (`string`): The id used to identify this track. No other + video track for the same [Period](../Getting_Started/Glossary.md#period) + will have the same `id`. + + - `label` (`string|undefined`): A human readable label that may be displayed in + the user interface providing a choice between video tracks. + + This information is usually set only if the current Manifest contains one. + + - `representations` (`Array.`): + [Representations](../Getting_Started/Glossary.md#representation) of this + video track, with attributes: + + - `id` (`string`): The id used to identify this Representation. + No other Representation from this track will have the same `id`. + + - `bitrate` (`Number`): The bitrate of this Representation, in bits per + seconds. + + - `width` (`Number|undefined`): The width of video, in pixels. + + - `height` (`Number|undefined`): The height of video, in pixels. + + - `codec` (`string|undefined`): The video codec the Representation is + in, as announced in the corresponding Manifest. + + - `frameRate` (`string|undefined`): The video frame rate. + + - `hdrInfo` (`Object|undefined`) Information about the hdr + characteristics of the track. + (see [HDR support documentation](./Miscellaneous/hdr.md#hdrinfo)) + + - `signInterpreted` (`Boolean|undefined`): If set to `true`, this track is + known to contain an interpretation in sign language. + If set to `false`, the track is known to not contain that type of content. + If not set or set to undefined we don't know whether that video track + contains an interpretation in sign language. + + - `isTrickModeTrack` (`Boolean|undefined`): If set to `true`, this track + is a trick mode track. This type of tracks proposes video content that is + often encoded with a very low framerate with the purpose to be played more + efficiently at a much higher speed. + + - `trickModeTracks` (`Array. | undefined`): Trick mode video tracks + attached to this video track. + + Each of those objects contain the same properties that a regular video track + (same properties than what is documented here). + + It this property is either `undefined` or not set, then this track has no + linked trickmode video track. + +##### For audio tracks + +When `trackInfo.type` is set to `"audio"`, `track` describes an audio track. It +contains the following properties: + +- `id` (`Number|string`): The id used to identify this track. No other + audio track for the same [Period](../Getting_Started/Glossary.md#period) + will have the same `id`. + +- `language` (`string`): The language the audio track is in, as set in the + [Manifest](../Getting_Started/Glossary.md#manifest). + +- `normalized` (`string`): An attempt to translate the `language` + property into an ISO 639-3 language code (for now only support translations + from ISO 639-1 and ISO 639-3 language codes). If the translation attempt + fails (no corresponding ISO 639-3 language code is found), it will equal the + value of `language` + +- `audioDescription` (`Boolean`): Whether the track is an audio + description of what is happening at the screen. + +- `dub` (`Boolean|undefined`): If set to `true`, this audio track is a + "dub", meaning it was recorded in another language than the original. + If set to `false`, we know that this audio track is in an original language. + This property is `undefined` if we do not known whether it is in an original + language. + +- `label` (`string|undefined`): A human readable label that may be displayed in + the user interface providing a choice between audio tracks. + + This information is usually set only if the current Manifest contains one. + +- `representations` (`Array.`): + [Representations](../Getting_Started/Glossary.md#representation) of this video track, with + attributes: + + - `id` (`string`): The id used to identify this Representation. + No other Representation from this track will have the same `id`. + + - `bitrate` (`Number`): The bitrate of this Representation, in bits per + seconds. + + - `codec` (`string|undefined`): The audio codec the Representation is + in, as announced in the corresponding Manifest. + +##### For text tracks + +When `trackInfo.type` is set to `"text"`, `track` describes a text track. It +contains the following properties: + +- `id` (`string`): The id used to identify this track. No other + text track for the same [Period](../Getting_Started/Glossary.md#period) + will have the same `id`. + +- `language` (`string`): The language the text trac./../Basic_Methods/loadVideo.md#transport set in the + [Manifest](../Getting_Started/Glossary.md#manifest). + +- `normalized` (`string`): An attempt to translate the `language` + property into an ISO 639-3 language code (for now only support translations + from ISO 639-1 and ISO 639-3 language codes). If the translation attempt + fails (no corresponding ISO./../Basic_Methods/loadVideo.md#transport found), it will equal the + value of `language` + +- `label` (`string|undefined`): A human readable label that may be displayed in + the user interface providing a choice between text tracks. + + This information is usually set only if the current Manifest contains one. + +- `closedCaption` (`Boolean`): Whether the track is specially adapted for + the hard of hearing or not. + +- `forced` (`Boolean`): If `true` this text track is meant to be displayed by + default if no other text track is selected. + + It is often used to clarify dialogue, alternate languages, texted graphics or + location and person identification. + + ### ENCRYPTED_MEDIA_ERROR -Those errors are linked to the Encrypted Media Extensions. They concern various -DRM-related problems. +Those errors are linked to the "Encrypted Media Extensions" API. +They concern various DRM-related problems. They all have a `type` property equal to `"ENCRYPTED_MEDIA_ERROR"`. +When its code is set to `KEY_STATUS_CHANGE_ERROR`, an ENCRYPTED_MEDIA_ERROR +generally also have a `keyStatuses` property, which is documented in the +corresponding `KEY_STATUS_CHANGE_ERROR` code explanation below. + #### codes -An EncryptedMediaError can have the following codes (`code` property): +An error of `type` `ENCRYPTED_MEDIA_ERROR` can have the following codes (`code` +property): - `"INCOMPATIBLE_KEYSYSTEMS"`: None of the provided key systems was compatible with the current browser. @@ -295,7 +474,7 @@ They all have a `type` property equal to `"OTHER_ERROR"`. #### codes -An OtherError can have the following codes (`code` property): +An error of `type` `OTHER_ERROR` can have the following codes (`code` property): - `"PIPELINE_LOAD_ERROR"`: The [Manifest](../Getting_Started/Glossary.md#structure_of_a_manifest_object) or segment diff --git a/package-lock.json b/package-lock.json index e6e98638f1..2e56256b05 100644 --- a/package-lock.json +++ b/package-lock.json @@ -30,7 +30,7 @@ "babel-loader": "9.1.2", "chai": "4.3.7", "core-js": "3.30.0", - "docgen.ico": "0.1.2", + "docgen.ico": "^0.2.2", "esbuild": "0.17.15", "eslint": "8.37.0", "eslint-plugin-ban": "1.6.0", @@ -39,6 +39,7 @@ "eslint-plugin-react": "7.32.2", "esm": "3.2.25", "express": "4.18.2", + "github-buttons": "2.27.0", "html-entities": "2.3.3", "jest": "29.5.0", "jest-environment-jsdom": "29.5.0", @@ -5324,9 +5325,9 @@ } }, "node_modules/docgen.ico": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/docgen.ico/-/docgen.ico-0.1.2.tgz", - "integrity": "sha512-57pfNrW7yVCaKAjqs+PRoDs4CjBgqXpJpQSe+M9uIvWuLp1Yn18th9omVV5LkY8zv92JofSqWukvLpsJ7mC76Q==", + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/docgen.ico/-/docgen.ico-0.2.2.tgz", + "integrity": "sha512-vXOwHYuHR+l+/V7R22Ph2bmnG9+Km1CdxLGZ/chJF8sDz4dP/axNwyvmt6Mvy1vJUBMTa4HbNdO1JhLHLCjnvw==", "dev": true, "dependencies": { "cheerio": "1.0.0-rc.12", @@ -5335,7 +5336,7 @@ "markdown-it": "13.0.1" }, "bin": { - "docgen.ico": "src/index.js" + "docgen.ico": "build/index.js" } }, "node_modules/doctrine": { @@ -7085,6 +7086,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/github-buttons": { + "version": "2.27.0", + "resolved": "https://registry.npmjs.org/github-buttons/-/github-buttons-2.27.0.tgz", + "integrity": "sha512-PmfRMI2Rttg/2jDfKBeSl621sEznrsKF019SuoLdoNlO7qRUZaOyEI5Li4uW+79pVqnDtKfIEVuHTIJ5lgy64w==", + "dev": true + }, "node_modules/glob": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", @@ -17741,9 +17748,9 @@ } }, "docgen.ico": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/docgen.ico/-/docgen.ico-0.1.2.tgz", - "integrity": "sha512-57pfNrW7yVCaKAjqs+PRoDs4CjBgqXpJpQSe+M9uIvWuLp1Yn18th9omVV5LkY8zv92JofSqWukvLpsJ7mC76Q==", + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/docgen.ico/-/docgen.ico-0.2.2.tgz", + "integrity": "sha512-vXOwHYuHR+l+/V7R22Ph2bmnG9+Km1CdxLGZ/chJF8sDz4dP/axNwyvmt6Mvy1vJUBMTa4HbNdO1JhLHLCjnvw==", "dev": true, "requires": { "cheerio": "1.0.0-rc.12", @@ -19063,6 +19070,12 @@ "get-intrinsic": "^1.1.1" } }, + "github-buttons": { + "version": "2.27.0", + "resolved": "https://registry.npmjs.org/github-buttons/-/github-buttons-2.27.0.tgz", + "integrity": "sha512-PmfRMI2Rttg/2jDfKBeSl621sEznrsKF019SuoLdoNlO7qRUZaOyEI5Li4uW+79pVqnDtKfIEVuHTIJ5lgy64w==", + "dev": true + }, "glob": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", diff --git a/package.json b/package.json index 4904fdb695..fcb849b742 100644 --- a/package.json +++ b/package.json @@ -98,7 +98,7 @@ "babel-loader": "9.1.2", "chai": "4.3.7", "core-js": "3.30.0", - "docgen.ico": "0.1.2", + "docgen.ico": "^0.2.2", "esbuild": "0.17.15", "eslint": "8.37.0", "eslint-plugin-ban": "1.6.0", @@ -107,6 +107,7 @@ "eslint-plugin-react": "7.32.2", "esm": "3.2.25", "express": "4.18.2", + "github-buttons": "2.27.0", "html-entities": "2.3.3", "jest": "29.5.0", "jest-environment-jsdom": "29.5.0", diff --git a/src/compat/browser_detection.ts b/src/compat/browser_detection.ts index 87dd321829..4215b27d2f 100644 --- a/src/compat/browser_detection.ts +++ b/src/compat/browser_detection.ts @@ -60,6 +60,9 @@ let isWebOs2022 = false; /** `true` for Panasonic devices. */ let isPanasonic = false; +/** `true` for the PlayStation 5 game console. */ +let isPlayStation5 = false; + ((function findCurrentBrowser() : void { if (isNode) { return ; @@ -101,7 +104,9 @@ let isPanasonic = false; isSamsungBrowser = true; } - if (/Tizen/.test(navigator.userAgent)) { + if (navigator.userAgent.indexOf("PlayStation 5") !== -1) { + isPlayStation5 = true; + } else if (/Tizen/.test(navigator.userAgent)) { isTizen = true; // Inspired form: http://webostv.developer.lge.com/discover/specifications/web-engine/ @@ -136,6 +141,7 @@ export { isIEOrEdge, isFirefox, isPanasonic, + isPlayStation5, isSafariDesktop, isSafariMobile, isSamsungBrowser, diff --git a/src/compat/has_issues_with_high_media_source_duration.ts b/src/compat/has_issues_with_high_media_source_duration.ts new file mode 100644 index 0000000000..e3a4007289 --- /dev/null +++ b/src/compat/has_issues_with_high_media_source_duration.ts @@ -0,0 +1,27 @@ +import { isPlayStation5 } from "./browser_detection"; + +/** + * Some platforms have issues when the `MediaSource`'s `duration` property + * is set to a very high value (playback freezes) but not when setting it + * to `Infinity`, which is what the HTML spec as of now (2023-05-15) recommends + * for live contents. + * + * However setting the `MediaSource`'s `duration` property to `Infinity` seems + * more risky, considering all platforms we now support, than setting it at a + * relatively high ~2**32 value which is what we do generally. + * + * Moreover, setting it to `Infinity` require us to use another MSE API, + * `setLiveSeekableRange` to properly allow seeking. We're used to MSE issues so + * I'm not too confident of using another MSE API for all platforms directly. + * + * So this methods just return `true` based on a whitelist of platform for which + * it has been detected that high `duration` values cause issues but setting it + * to Infinity AND playing with `setLiveSeekableRange` does not. + * + * @returns {boolean} + */ +export default function hasIssuesWithHighMediaSourceDuration(): boolean { + // For now only seen on the Webkit present in the PlayStation 5, for which the + // alternative is known to work. + return isPlayStation5; +} diff --git a/src/core/api/debug/render.ts b/src/core/api/debug/render.ts index d7826c1659..6f643ea4b6 100644 --- a/src/core/api/debug/render.ts +++ b/src/core/api/debug/render.ts @@ -29,7 +29,7 @@ export default function renderDebugElement( debugWrapperElt.style.backgroundColor = "#00000099"; debugWrapperElt.style.padding = "7px"; debugWrapperElt.style.fontSize = "13px"; - debugWrapperElt.style.fontFamily = "mono"; + debugWrapperElt.style.fontFamily = "mono, monospace"; debugWrapperElt.style.color = "white"; debugWrapperElt.style.display = "inline-block"; debugWrapperElt.style.bottom = "0px"; diff --git a/src/core/api/tracks_management/track_choice_manager.ts b/src/core/api/tracks_management/track_choice_manager.ts index f6b7b75b00..be92300a78 100644 --- a/src/core/api/tracks_management/track_choice_manager.ts +++ b/src/core/api/tracks_management/track_choice_manager.ts @@ -26,7 +26,6 @@ import { Representation, } from "../../../manifest"; import { - IAudioRepresentation, IAudioTrack, IAudioTrackPreference, IAvailableAudioTrack, @@ -34,7 +33,6 @@ import { IAvailableVideoTrack, ITextTrack, ITextTrackPreference, - IVideoRepresentation, IVideoTrack, IVideoTrackPreference, } from "../../../public_types"; @@ -42,6 +40,7 @@ import arrayFind from "../../../utils/array_find"; import arrayIncludes from "../../../utils/array_includes"; import isNullOrUndefined from "../../../utils/is_null_or_undefined"; import normalizeLanguage from "../../../utils/languages"; +import objectAssign from "../../../utils/object_assign"; import { ISharedReference } from "../../../utils/reference"; import SortedList from "../../../utils/sorted_list"; import takeFirstSet from "../../../utils/take_first_set"; @@ -618,19 +617,7 @@ export default class TrackChoiceManager { if (isNullOrUndefined(chosenTrack)) { return null; } - - const audioTrack : IAudioTrack = { - language: takeFirstSet(chosenTrack.language, ""), - normalized: takeFirstSet(chosenTrack.normalizedLanguage, ""), - audioDescription: chosenTrack.isAudioDescription === true, - id: chosenTrack.id, - representations: chosenTrack.representations.map(parseAudioRepresentation), - label: chosenTrack.label, - }; - if (chosenTrack.isDub === true) { - audioTrack.dub = true; - } - return audioTrack; + return chosenTrack.toAudioTrack(); } /** @@ -655,16 +642,7 @@ export default class TrackChoiceManager { if (isNullOrUndefined(chosenTextAdaptation)) { return null; } - - const formatted : ITextTrack = { - language: takeFirstSet(chosenTextAdaptation.language, ""), - normalized: takeFirstSet(chosenTextAdaptation.normalizedLanguage, ""), - closedCaption: chosenTextAdaptation.isClosedCaption === true, - id: chosenTextAdaptation.id, - label: chosenTextAdaptation.label, - forced: chosenTextAdaptation.isForcedSubtitles, - }; - return formatted; + return chosenTextAdaptation.toTextTrack(); } /** @@ -690,36 +668,7 @@ export default class TrackChoiceManager { return null; } const currAdaptation = chosenVideoAdaptation.adaptation; - - const trickModeTracks = currAdaptation.trickModeTracks !== undefined ? - currAdaptation.trickModeTracks.map((trickModeAdaptation) => { - const representations = trickModeAdaptation.representations - .map(parseVideoRepresentation); - const trickMode : IVideoTrack = { id: trickModeAdaptation.id, - representations, - isTrickModeTrack: true }; - if (trickModeAdaptation.isSignInterpreted === true) { - trickMode.signInterpreted = true; - } - return trickMode; - }) : - undefined; - - const videoTrack: IVideoTrack = { - id: currAdaptation.id, - representations: currAdaptation.representations.map(parseVideoRepresentation), - label: currAdaptation.label, - }; - if (currAdaptation.isSignInterpreted === true) { - videoTrack.signInterpreted = true; - } - if (currAdaptation.isTrickModeTrack === true) { - videoTrack.isTrickModeTrack = true; - } - if (trickModeTracks !== undefined) { - videoTrack.trickModeTracks = trickModeTracks; - } - return videoTrack; + return currAdaptation.toVideoTrack(); } /** @@ -743,20 +692,9 @@ export default class TrackChoiceManager { return audioInfos.adaptations .map((adaptation) => { - const formatted : IAvailableAudioTrack = { - language: takeFirstSet(adaptation.language, ""), - normalized: takeFirstSet(adaptation.normalizedLanguage, ""), - audioDescription: adaptation.isAudioDescription === true, - id: adaptation.id, - active: currentId === null ? false : - currentId === adaptation.id, - representations: adaptation.representations.map(parseAudioRepresentation), - label: adaptation.label, - }; - if (adaptation.isDub === true) { - formatted.dub = true; - } - return formatted; + const active = currentId === null ? false : + currentId === adaptation.id; + return objectAssign(adaptation.toAudioTrack(), { active }); }); } @@ -782,17 +720,9 @@ export default class TrackChoiceManager { return textInfos.adaptations .map((adaptation) => { - const formatted : IAvailableTextTrack = { - language: takeFirstSet(adaptation.language, ""), - normalized: takeFirstSet(adaptation.normalizedLanguage, ""), - closedCaption: adaptation.isClosedCaption === true, - id: adaptation.id, - active: currentId === null ? false : - currentId === adaptation.id, - label: adaptation.label, - forced: adaptation.isForcedSubtitles, - }; - return formatted; + const active = currentId === null ? false : + currentId === adaptation.id; + return objectAssign(adaptation.toTextTrack(), { active }); }); } @@ -817,37 +747,21 @@ export default class TrackChoiceManager { return videoInfos.adaptations .map((adaptation) => { - const trickModeTracks = adaptation.trickModeTracks !== undefined ? - adaptation.trickModeTracks.map((trickModeAdaptation) => { + const active = currentId === null ? false : + currentId === adaptation.id; + const track = adaptation.toVideoTrack(); + const trickModeTracks = track.trickModeTracks !== undefined ? + track.trickModeTracks.map((trickModeAdaptation) => { const isActive = currentId === null ? false : currentId === trickModeAdaptation.id; - const representations = trickModeAdaptation.representations - .map(parseVideoRepresentation); - const trickMode : IAvailableVideoTrack = { id: trickModeAdaptation.id, - representations, - isTrickModeTrack: true, - active: isActive }; - if (trickModeAdaptation.isSignInterpreted === true) { - trickMode.signInterpreted = true; - } - return trickMode; + return objectAssign(trickModeAdaptation, { active: isActive }); }) : - undefined; - - const formatted: IAvailableVideoTrack = { - id: adaptation.id, - active: currentId === null ? false : - currentId === adaptation.id, - representations: adaptation.representations.map(parseVideoRepresentation), - label: adaptation.label, - }; - if (adaptation.isSignInterpreted === true) { - formatted.signInterpreted = true; - } + []; + const availableTrack = objectAssign(track, { active }); if (trickModeTracks !== undefined) { - formatted.trickModeTracks = trickModeTracks; + availableTrack.trickModeTracks = trickModeTracks; } - return formatted; + return availableTrack; }); } @@ -1355,28 +1269,6 @@ function getPeriodItem( } } -/** - * Parse video Representation into a IVideoRepresentation. - * @param {Object} representation - * @returns {Object} - */ -function parseVideoRepresentation( - { id, bitrate, frameRate, width, height, codec, hdrInfo } : Representation -) : IVideoRepresentation { - return { id, bitrate, frameRate, width, height, codec, hdrInfo }; -} - -/** - * Parse audio Representation into a ITMAudioRepresentation. - * @param {Object} representation - * @returns {Object} - */ -function parseAudioRepresentation( - { id, bitrate, codec } : Representation -) : IAudioRepresentation { - return { id, bitrate, codec }; -} - function getRightVideoTrack( adaptation : Adaptation, isTrickModeEnabled : boolean diff --git a/src/core/init/media_source_content_initializer.ts b/src/core/init/media_source_content_initializer.ts index 914bf6c539..cebe87d166 100644 --- a/src/core/init/media_source_content_initializer.ts +++ b/src/core/init/media_source_content_initializer.ts @@ -502,8 +502,36 @@ export default class MediaSourceContentInitializer extends ContentInitializer { */ function handleStreamOrchestratorCallbacks() : IStreamOrchestratorCallbacks { return { - needsBufferFlush: () => - playbackObserver.setCurrentTime(mediaElement.currentTime + 0.001), + needsBufferFlush: () => { + const seekedTime = mediaElement.currentTime + 0.001; + playbackObserver.setCurrentTime(seekedTime); + + // Seek again once data begins to be buffered. + // This is sadly necessary on some browsers to avoid decoding + // issues after a flush. + // + // NOTE: there's in theory a potential race condition in the following + // logic as the callback could be called when media data is still + // being removed by the browser - which is an asynchronous process. + // The following condition checking for buffered data could thus lead + // to a false positive where we're actually checking previous data. + // For now, such scenario is avoided by setting the + // `includeLastObservation` option to `false` and calling + // `needsBufferFlush` once MSE media removal operations have been + // explicitely validated by the browser, but that's a complex and easy + // to break system. + playbackObserver.listen((obs, stopListening) => { + if ( + // Data is buffered around the current position + obs.currentRange !== null || + // Or, for whatever reason, playback is already advancing + obs.position > seekedTime + 0.1 + ) { + stopListening(); + playbackObserver.setCurrentTime(obs.position + 0.001); + } + }, { includeLastObservation: false, clearSignal: cancelSignal }); + }, streamStatusUpdate(value) { // Announce discontinuities if found @@ -664,7 +692,7 @@ export default class MediaSourceContentInitializer extends ContentInitializer { this.trigger("activePeriodChanged", { period }); }); contentTimeBoundariesObserver.addEventListener("durationUpdate", (newDuration) => { - mediaSourceDurationUpdater.updateDuration(newDuration.duration, !newDuration.isEnd); + mediaSourceDurationUpdater.updateDuration(newDuration.duration, newDuration.isEnd); }); contentTimeBoundariesObserver.addEventListener("endOfStream", () => { if (endOfStreamCanceller === null) { @@ -683,7 +711,7 @@ export default class MediaSourceContentInitializer extends ContentInitializer { }); const currentDuration = contentTimeBoundariesObserver.getCurrentDuration(); mediaSourceDurationUpdater.updateDuration(currentDuration.duration, - !currentDuration.isEnd); + currentDuration.isEnd); return contentTimeBoundariesObserver; } diff --git a/src/core/init/utils/media_source_duration_updater.ts b/src/core/init/utils/media_source_duration_updater.ts index c8ce07a89e..44b00fbc3a 100644 --- a/src/core/init/utils/media_source_duration_updater.ts +++ b/src/core/init/utils/media_source_duration_updater.ts @@ -19,6 +19,8 @@ import { onSourceEnded, onSourceClose, } from "../../../compat/event_listeners"; +/* eslint-disable-next-line max-len */ +import hasIssuesWithHighMediaSourceDuration from "../../../compat/has_issues_with_high_media_source_duration"; import log from "../../../log"; import createSharedReference, { IReadOnlySharedReference, @@ -65,7 +67,7 @@ export default class MediaSourceDurationUpdater { * which `duration` attribute should be set on the `MediaSource` associated * * @param {number} newDuration - * @param {boolean} addTimeMargin - If set to `true`, the current content is + * @param {boolean} isRealEndKnown - If set to `false`, the current content is * a dynamic content (it might evolve in the future) and the `newDuration` * communicated might be greater still. In effect the * `MediaSourceDurationUpdater` will actually set a much higher value to the @@ -75,7 +77,7 @@ export default class MediaSourceDurationUpdater { */ public updateDuration( newDuration : number, - addTimeMargin : boolean + isRealEndKnown : boolean ) : void { if (this._currentMediaSourceDurationUpdateCanceller !== null) { this._currentMediaSourceDurationUpdateCanceller.cancel(); @@ -119,7 +121,7 @@ export default class MediaSourceDurationUpdater { recursivelyForceDurationUpdate(mediaSource, newDuration, - addTimeMargin, + isRealEndKnown, sourceBuffersUpdatingCanceller.signal); }, { clearSignal: msOpenStatusCanceller.signal, emitCurrentValue: true }); } @@ -146,26 +148,20 @@ export default class MediaSourceDurationUpdater { * * @param {MediaSource} mediaSource * @param {number} duration - * @param {boolean} addTimeMargin + * @param {boolean} isRealEndKnown * @returns {string} */ function setMediaSourceDuration( mediaSource: MediaSource, duration : number, - addTimeMargin : boolean + isRealEndKnown : boolean ) : MediaSourceDurationUpdateStatus { let newDuration = duration; - if (addTimeMargin) { - // Some targets poorly support setting a very high number for durations. - // Yet, in contents whose end is not yet known (e.g. live contents), we - // would prefer setting a value as high as possible to still be able to - // seek anywhere we want to (even ahead of the Manifest if we want to). - // As such, we put it at a safe default value of 2^32 excepted when the - // maximum position is already relatively close to that value, where we - // authorize exceptionally going over it. - newDuration = Math.max(Math.pow(2, 32), - newDuration + YEAR_IN_SECONDS); + if (!isRealEndKnown) { + newDuration = hasIssuesWithHighMediaSourceDuration() ? + Infinity : + getMaximumLiveSeekablePosition(duration); } let maxBufferedEnd : number = 0; @@ -198,6 +194,10 @@ function setMediaSourceDuration( try { log.info("Init: Updating duration", newDuration); mediaSource.duration = newDuration; + if (mediaSource.readyState === "open" && !isFinite(newDuration)) { + mediaSource.setLiveSeekableRange(0, + getMaximumLiveSeekablePosition(duration)); + } } catch (err) { log.warn("Duration Updater: Can't update duration on the MediaSource.", err instanceof Error ? err : ""); @@ -310,24 +310,36 @@ function createMediaSourceOpenReference( * * @param {MediaSource} mediaSource * @param {number} duration - * @param {boolean} addTimeMargin + * @param {boolean} isRealEndKnown * @param {Object} cancelSignal */ function recursivelyForceDurationUpdate( mediaSource : MediaSource, duration : number, - addTimeMargin : boolean, + isRealEndKnown : boolean, cancelSignal : CancellationSignal ) : void { - const res = setMediaSourceDuration(mediaSource, duration, addTimeMargin); + const res = setMediaSourceDuration(mediaSource, duration, isRealEndKnown); if (res === MediaSourceDurationUpdateStatus.Success) { return ; } const timeoutId = setTimeout(() => { unregisterClear(); - recursivelyForceDurationUpdate(mediaSource, duration, addTimeMargin, cancelSignal); + recursivelyForceDurationUpdate(mediaSource, duration, isRealEndKnown, cancelSignal); }, 2000); const unregisterClear = cancelSignal.register(() => { clearTimeout(timeoutId); }); } + +function getMaximumLiveSeekablePosition(contentLastPosition : number) : number { + // Some targets poorly support setting a very high number for seekable + // ranges. + // Yet, in contents whose end is not yet known (e.g. live contents), we + // would prefer setting a value as high as possible to still be able to + // seek anywhere we want to (even ahead of the Manifest if we want to). + // As such, we put it at a safe default value of 2^32 excepted when the + // maximum position is already relatively close to that value, where we + // authorize exceptionally going over it. + return Math.max(Math.pow(2, 32), contentLastPosition + YEAR_IN_SECONDS); +} diff --git a/src/core/segment_buffers/index.ts b/src/core/segment_buffers/index.ts index 5b90fec51c..462a852b8c 100644 --- a/src/core/segment_buffers/index.ts +++ b/src/core/segment_buffers/index.ts @@ -30,6 +30,7 @@ import { import { IBufferedChunk, IChunkContext, + IInsertedChunkInfos, } from "./inventory"; import SegmentBuffersStore, { ISegmentBufferOptions, @@ -49,6 +50,7 @@ export { IBufferedChunk, IChunkContext, + IInsertedChunkInfos, IPushChunkInfos, IPushedChunkData, diff --git a/src/core/stream/adaptation/utils/create_representation_estimator.ts b/src/core/stream/adaptation/utils/create_representation_estimator.ts index 1885ab74b7..76404ec18c 100644 --- a/src/core/stream/adaptation/utils/create_representation_estimator.ts +++ b/src/core/stream/adaptation/utils/create_representation_estimator.ts @@ -86,7 +86,8 @@ export default function getRepresentationEstimate( if (newRepr.length === 0) { const noRepErr = new MediaError("NO_PLAYABLE_REPRESENTATION", "No Representation in the chosen " + - adaptation.type + " Adaptation can be played"); + adaptation.type + " Adaptation can be played", + { adaptation }); cleanUp(); onFatalError(noRepErr); return; diff --git a/src/core/stream/period/period_stream.ts b/src/core/stream/period/period_stream.ts index a06db4fd66..87a71df09c 100644 --- a/src/core/stream/period/period_stream.ts +++ b/src/core/stream/period/period_stream.ts @@ -380,7 +380,8 @@ function getFirstDeclaredMimeType(adaptation : Adaptation) : string { if (representations.length === 0) { const noRepErr = new MediaError("NO_PLAYABLE_REPRESENTATION", "No Representation in the chosen " + - adaptation.type + " Adaptation can be played"); + adaptation.type + " Adaptation can be played", + { adaptation }); throw noRepErr; } return representations[0].getMimeTypeString(); diff --git a/src/core/stream/representation/utils/append_segment_to_buffer.ts b/src/core/stream/representation/utils/append_segment_to_buffer.ts index ef29535890..da9a180f36 100644 --- a/src/core/stream/representation/utils/append_segment_to_buffer.ts +++ b/src/core/stream/representation/utils/append_segment_to_buffer.ts @@ -22,6 +22,7 @@ import { MediaError } from "../../../../errors"; import { CancellationError, CancellationSignal } from "../../../../utils/task_canceller"; import { IReadOnlyPlaybackObserver } from "../../../api"; import { + IInsertedChunkInfos, IPushChunkInfos, SegmentBuffer, } from "../../../segment_buffers"; @@ -41,7 +42,7 @@ import forceGarbageCollection from "./force_garbage_collection"; export default async function appendSegmentToBuffer( playbackObserver : IReadOnlyPlaybackObserver, segmentBuffer : SegmentBuffer, - dataInfos : IPushChunkInfos, + dataInfos : IPushChunkInfos & { inventoryInfos: IInsertedChunkInfos }, cancellationSignal : CancellationSignal ) : Promise { try { @@ -55,7 +56,9 @@ export default async function appendSegmentToBuffer( const reason = appendError instanceof Error ? appendError.toString() : "An unknown error happened when pushing content"; - throw new MediaError("BUFFER_APPEND_ERROR", reason); + throw new MediaError("BUFFER_APPEND_ERROR", + reason, + { adaptation: dataInfos.inventoryInfos.adaptation }); } const { position } = playbackObserver.getReference().getValue(); const currentPos = position.pending ?? position.last; @@ -66,7 +69,9 @@ export default async function appendSegmentToBuffer( const reason = err2 instanceof Error ? err2.toString() : "Could not clean the buffer"; - throw new MediaError("BUFFER_FULL_ERROR", reason); + throw new MediaError("BUFFER_FULL_ERROR", + reason, + { adaptation: dataInfos.inventoryInfos.adaptation }); } } } diff --git a/src/core/stream/representation/utils/push_init_segment.ts b/src/core/stream/representation/utils/push_init_segment.ts index bd3c66612a..c0fecd995a 100644 --- a/src/core/stream/representation/utils/push_init_segment.ts +++ b/src/core/stream/representation/utils/push_init_segment.ts @@ -20,6 +20,7 @@ import Manifest, { Period, Representation, } from "../../../../manifest"; +import objectAssign from "../../../../utils/object_assign"; import { CancellationSignal } from "../../../../utils/task_canceller"; import { IReadOnlyPlaybackObserver } from "../../../api"; import { @@ -71,9 +72,14 @@ export default async function pushInitSegment( timestampOffset: 0, appendWindow: [ undefined, undefined ], codec }; + const inventoryInfos = objectAssign({ segment, + chunkSize: undefined, + start: 0, + end: 0 }, + content); await appendSegmentToBuffer(playbackObserver, segmentBuffer, - { data, inventoryInfos: null }, + { data, inventoryInfos }, cancelSignal); const buffered = segmentBuffer.getBufferedRanges(); return { content, segment, buffered, segmentData }; diff --git a/src/errors/__tests__/media_error.test.ts b/src/errors/__tests__/media_error.test.ts index 0fee2c42eb..782601397a 100644 --- a/src/errors/__tests__/media_error.test.ts +++ b/src/errors/__tests__/media_error.test.ts @@ -19,25 +19,25 @@ import MediaError from "../media_error"; describe("errors - MediaError", () => { it("should format a MediaError", () => { const reason = "test"; - const mediaError = new MediaError("BUFFER_FULL_ERROR", reason); + const mediaError = new MediaError("MEDIA_TIME_BEFORE_MANIFEST", reason); expect(mediaError).toBeInstanceOf(Error); expect(mediaError.name).toBe("MediaError"); expect(mediaError.type).toBe("MEDIA_ERROR"); - expect(mediaError.code).toBe("BUFFER_FULL_ERROR"); + expect(mediaError.code).toBe("MEDIA_TIME_BEFORE_MANIFEST"); expect(mediaError.fatal).toBe(false); - expect(mediaError.message).toBe("MediaError (BUFFER_FULL_ERROR) test"); + expect(mediaError.message).toBe("MediaError (MEDIA_TIME_BEFORE_MANIFEST) test"); }); it("should be able to set it as fatal", () => { const reason = "test"; - const mediaError = new MediaError("BUFFER_APPEND_ERROR", reason); + const mediaError = new MediaError("MEDIA_TIME_AFTER_MANIFEST", reason); mediaError.fatal = true; expect(mediaError).toBeInstanceOf(Error); expect(mediaError.name).toBe("MediaError"); expect(mediaError.type).toBe("MEDIA_ERROR"); - expect(mediaError.code).toBe("BUFFER_APPEND_ERROR"); + expect(mediaError.code).toBe("MEDIA_TIME_AFTER_MANIFEST"); expect(mediaError.fatal).toBe(true); - expect(mediaError.message).toBe("MediaError (BUFFER_APPEND_ERROR) test"); + expect(mediaError.message).toBe("MediaError (MEDIA_TIME_AFTER_MANIFEST) test"); }); it("should filter in a valid error code", () => { diff --git a/src/errors/index.ts b/src/errors/index.ts index 4fd05aa36b..27441c4958 100644 --- a/src/errors/index.ts +++ b/src/errors/index.ts @@ -26,7 +26,9 @@ import { } from "./error_codes"; import formatError from "./format_error"; import isKnownError from "./is_known_error"; -import MediaError from "./media_error"; +import MediaError, { + IMediaErrorTrackContext, +} from "./media_error"; import NetworkError from "./network_error"; import OtherError from "./other_error"; import RequestError from "./request_error"; @@ -39,6 +41,7 @@ export { ErrorTypes, IErrorCode, IErrorType, + IMediaErrorTrackContext, formatError, MediaError as MediaError, NetworkError, diff --git a/src/errors/media_error.ts b/src/errors/media_error.ts index 2f5d17e3d0..78d8ffd6a3 100644 --- a/src/errors/media_error.ts +++ b/src/errors/media_error.ts @@ -14,12 +14,42 @@ * limitations under the License. */ +import { Adaptation } from "../manifest"; +import { + IAudioTrack, + ITextTrack, + IVideoTrack, +} from "../public_types"; import { ErrorTypes, IMediaErrorCode, } from "./error_codes"; import errorMessage from "./error_message"; +interface IAudioTrackMediaErrorContext { + type : "audio"; + track : IAudioTrack; +} + +interface IVideoTrackMediaErrorContext { + type : "video"; + track : IVideoTrack; +} + +interface ITextTrackMediaErrorContext { + type : "text"; + track : ITextTrack; +} + +export type IMediaErrorTrackContext = IAudioTrackMediaErrorContext | + IVideoTrackMediaErrorContext | + ITextTrackMediaErrorContext; + +type ICodeWithAdaptationType = "BUFFER_APPEND_ERROR" | + "BUFFER_FULL_ERROR" | + "NO_PLAYABLE_REPRESENTATION" | + "MANIFEST_INCOMPATIBLE_CODECS_ERROR"; + /** * Error linked to the media Playback. * @@ -31,13 +61,32 @@ export default class MediaError extends Error { public readonly type : "MEDIA_ERROR"; public readonly message : string; public readonly code : IMediaErrorCode; + public readonly trackInfo : IMediaErrorTrackContext | undefined; public fatal : boolean; /** * @param {string} code * @param {string} reason + * @param {Object|undefined} [context] */ - constructor(code : IMediaErrorCode, reason : string) { + constructor( + code : ICodeWithAdaptationType, + reason : string, + context: { + adaptation : Adaptation; + } + ); + constructor( + code : Exclude, + reason : string, + ); + constructor( + code : IMediaErrorCode, + reason : string, + context? : { + adaptation? : Adaptation | undefined; + } | undefined + ) { super(); // @see https://stackoverflow.com/questions/41102060/typescript-extending-error-class Object.setPrototypeOf(this, MediaError.prototype); @@ -48,5 +97,22 @@ export default class MediaError extends Error { this.code = code; this.message = errorMessage(this.name, this.code, reason); this.fatal = false; + const adaptation = context?.adaptation; + if (adaptation !== undefined) { + switch (adaptation.type) { + case "audio": + this.trackInfo = { type: "audio", + track: adaptation.toAudioTrack() }; + break; + case "video": + this.trackInfo = { type: "video", + track: adaptation.toVideoTrack() }; + break; + case "text": + this.trackInfo = { type: "text", + track: adaptation.toTextTrack() }; + break; + } + } } } diff --git a/src/manifest/__tests__/period.test.ts b/src/manifest/__tests__/period.test.ts index fe09ea7c27..4264cdc5da 100644 --- a/src/manifest/__tests__/period.test.ts +++ b/src/manifest/__tests__/period.test.ts @@ -149,25 +149,30 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "56", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const videoAda3 = { type: "video", id: "57", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda3; } }; const video = [videoAda1, videoAda2, videoAda3]; const audioAda1 = { type: "audio", id: "58", isSupported: true, - representations: [] }; + representations: [], + toAudioTrack() { return audioAda1; } }; const audioAda2 = { type: "audio", id: "59", isSupported: true, - representations: [] }; + representations: [], + toAudioTrack() { return audioAda2; } }; const audio = [audioAda1, audioAda2]; const args = { id: "12", adaptations: { video, audio }, start: 0 }; let period = null; @@ -204,25 +209,30 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const videoAda3 = { type: "video", id: "56", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda3; } }; const video = [videoAda1, videoAda2, videoAda3]; const audioAda1 = { type: "audio", id: "57", isSupported: false, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audioAda2 = { type: "audio", id: "58", isSupported: false, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audio = [audioAda1, audioAda2]; const args = { id: "12", adaptations: { video, audio }, start: 0 }; let period = null; @@ -259,25 +269,30 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [] }; + representations: [], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [] }; + representations: [], + toVideoTrack() { return videoAda2; } }; const videoAda3 = { type: "video", id: "56", isSupported: true, - representations: [] }; + representations: [], + toVideoTrack() { return videoAda3; } }; const video = [videoAda1, videoAda2, videoAda3]; const audioAda1 = { type: "audio", id: "58", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audioAda2 = { type: "audio", id: "59", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda2; } }; const audio = [audioAda1, audioAda2]; const args = { id: "12", adaptations: { video, audio }, start: 0 }; let period = null; @@ -314,25 +329,30 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: false, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: false, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const videoAda3 = { type: "video", id: "56", isSupported: false, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda3; } }; const video = [videoAda1, videoAda2, videoAda3]; const audioAda1 = { type: "audio", id: "58", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audioAda2 = { type: "audio", id: "59", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda2; } }; const audio = [audioAda1, audioAda2]; const args = { id: "12", adaptations: { video, audio }, start: 0 }; let period = null; @@ -372,12 +392,14 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const video = [videoAda1]; const videoAda2 = { type: "video", id: "55", isSupported: false, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video2 = [videoAda2]; const args = { id: "12", adaptations: { video, video2 }, start: 0 }; const period = new Period(args); @@ -408,7 +430,8 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const video = [videoAda1]; const bar = undefined; const args = { id: "12", adaptations: { bar, video }, start: 0 }; @@ -435,11 +458,13 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video = [videoAda1, videoAda2]; const args = { id: "12", adaptations: { video }, start: 0 }; const period = new Period(args, representationFilter); @@ -467,11 +492,13 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: false, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const fooAda1 = { type: "foo", id: "12", isSupported: false, @@ -503,11 +530,13 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: false, - representations: [] }; + representations: [], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const fooAda1 = { type: "foo", id: "12", isSupported: false, @@ -532,11 +561,13 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video = [videoAda1, videoAda2]; const args = { id: "12", adaptations: { video }, start: 72 }; const period = new Period(args); @@ -557,11 +588,13 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video = [videoAda1, videoAda2]; const args = { id: "12", adaptations: { video }, start: 0, duration: 12 }; const period = new Period(args); @@ -582,11 +615,13 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video = [videoAda1, videoAda2]; const args = { id: "12", adaptations: { video }, start: 50, duration: 12 }; const period = new Period(args); @@ -607,17 +642,20 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video = [videoAda1, videoAda2]; const audioAda1 = { type: "audio", id: "56", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audio = [audioAda1]; const args = { id: "12", adaptations: { video, audio }, start: 50, duration: 12 }; @@ -643,17 +681,20 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video = [videoAda1, videoAda2]; const audioAda1 = { type: "audio", id: "56", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audio = [audioAda1]; const args = { id: "12", adaptations: { video, audio }, start: 50, duration: 12 }; @@ -686,21 +727,25 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const videoAda3 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda3; } }; const video = [videoAda1, videoAda2, videoAda3]; const audioAda1 = { type: "audio", id: "56", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audio = [audioAda1]; const args = { id: "12", adaptations: { video, audio }, start: 50, duration: 12 }; diff --git a/src/manifest/adaptation.ts b/src/manifest/adaptation.ts index 128a4a87a5..277df1e3bf 100644 --- a/src/manifest/adaptation.ts +++ b/src/manifest/adaptation.ts @@ -16,7 +16,12 @@ import log from "../log"; import { IParsedAdaptation } from "../parsers/manifest"; -import { IRepresentationFilter } from "../public_types"; +import { + IAudioTrack, + IRepresentationFilter, + ITextTrack, + IVideoTrack, +} from "../public_types"; import arrayFind from "../utils/array_find"; import isNullOrUndefined from "../utils/is_null_or_undefined"; import normalizeLanguage from "../utils/languages"; @@ -32,7 +37,7 @@ export const SUPPORTED_ADAPTATIONS_TYPE: IAdaptationType[] = [ "audio", /** * Normalized Adaptation structure. - * An Adaptation describes a single `Track`. For example a specific audio + * An `Adaptation` describes a single `Track`. For example a specific audio * track (in a given language) or a specific video track. * It istelf can be represented in different qualities, which we call here * `Representation`. @@ -216,4 +221,74 @@ export default class Adaptation { getRepresentation(wantedId : number|string) : Representation|undefined { return arrayFind(this.representations, ({ id }) => wantedId === id); } + + /** + * Format an `Adaptation`, generally of type `"audio"`, as an `IAudioTrack`. + * @returns {Object} + */ + public toAudioTrack() : IAudioTrack { + const formatted : IAudioTrack = { + language: this.language ?? "", + normalized: this.normalizedLanguage ?? "", + audioDescription: this.isAudioDescription === true, + id: this.id, + representations: this.representations.map(r => r.toAudioRepresentation()), + label: this.label, + }; + if (this.isDub === true) { + formatted.dub = true; + } + return formatted; + } + + /** + * Format an `Adaptation`, generally of type `"audio"`, as an `IAudioTrack`. + * @returns {Object} + */ + public toTextTrack() : ITextTrack { + return { + language: this.language ?? "", + normalized: this.normalizedLanguage ?? "", + closedCaption: this.isClosedCaption === true, + id: this.id, + label: this.label, + forced: this.isForcedSubtitles, + }; + } + + /** + * Format an `Adaptation`, generally of type `"video"`, as an `IAudioTrack`. + * @returns {Object} + */ + public toVideoTrack() : IVideoTrack { + const trickModeTracks = this.trickModeTracks !== undefined ? + this.trickModeTracks.map((trickModeAdaptation) => { + const representations = trickModeAdaptation.representations + .map(r => r.toVideoRepresentation()); + const trickMode : IVideoTrack = { id: trickModeAdaptation.id, + representations, + isTrickModeTrack: true }; + if (trickModeAdaptation.isSignInterpreted === true) { + trickMode.signInterpreted = true; + } + return trickMode; + }) : + undefined; + + const videoTrack: IVideoTrack = { + id: this.id, + representations: this.representations.map(r => r.toVideoRepresentation()), + label: this.label, + }; + if (this.isSignInterpreted === true) { + videoTrack.signInterpreted = true; + } + if (this.isTrickModeTrack === true) { + videoTrack.isTrickModeTrack = true; + } + if (trickModeTracks !== undefined) { + videoTrack.trickModeTracks = trickModeTracks; + } + return videoTrack; + } } diff --git a/src/manifest/manifest.ts b/src/manifest/manifest.ts index 9619585fd7..11cd1344e7 100644 --- a/src/manifest/manifest.ts +++ b/src/manifest/manifest.ts @@ -632,7 +632,8 @@ export default class Manifest extends EventEmitter { if (newAdaptation.representations.length > 0 && !newAdaptation.isSupported) { const error = new MediaError("MANIFEST_INCOMPATIBLE_CODECS_ERROR", - "An Adaptation contains only incompatible codecs."); + "An Adaptation contains only incompatible codecs.", + { adaptation: newAdaptation }); this.contentWarnings.push(error); } return newAdaptation; @@ -694,7 +695,8 @@ export default class Manifest extends EventEmitter { if (newAdaptation.representations.length > 0 && !newAdaptation.isSupported) { const error = new MediaError("MANIFEST_INCOMPATIBLE_CODECS_ERROR", - "An Adaptation contains only incompatible codecs."); + "An Adaptation contains only incompatible codecs.", + { adaptation: newAdaptation }); this.contentWarnings.push(error); } return newAdaptation; @@ -711,7 +713,7 @@ export default class Manifest extends EventEmitter { /** * @param {Object} newManifest - * @param {number} type + * @param {number} updateType */ private _performUpdate( newManifest : Manifest, diff --git a/src/manifest/period.ts b/src/manifest/period.ts index d019e32958..ff11856760 100644 --- a/src/manifest/period.ts +++ b/src/manifest/period.ts @@ -89,7 +89,8 @@ export default class Period { if (newAdaptation.representations.length > 0 && !newAdaptation.isSupported) { const error = new MediaError("MANIFEST_INCOMPATIBLE_CODECS_ERROR", - "An Adaptation contains only incompatible codecs."); + "An Adaptation contains only incompatible codecs.", + { adaptation: newAdaptation }); this.contentWarnings.push(error); } return newAdaptation; diff --git a/src/manifest/representation.ts b/src/manifest/representation.ts index ae89e915d3..3976c03ea9 100644 --- a/src/manifest/representation.ts +++ b/src/manifest/representation.ts @@ -21,7 +21,11 @@ import { IContentProtections, IParsedRepresentation, } from "../parsers/manifest"; -import { IHDRInformation } from "../public_types"; +import { + IAudioRepresentation, + IHDRInformation, + IVideoRepresentation, +} from "../public_types"; import areArraysOfNumbersEqual from "../utils/are_arrays_of_numbers_equal"; import { IRepresentationIndex } from "./representation_index"; import { @@ -334,6 +338,24 @@ class Representation { values: data }); return true; } + + /** + * Format Representation as an `IAudioRepresentation`. + * @returns {Object} + */ + public toAudioRepresentation(): IAudioRepresentation { + const { id, bitrate, codec } = this; + return { id, bitrate, codec }; + } + + /** + * Format Representation as an `IVideoRepresentation`. + * @returns {Object} + */ + public toVideoRepresentation(): IVideoRepresentation { + const { id, bitrate, frameRate, width, height, codec, hdrInfo } = this; + return { id, bitrate, frameRate, width, height, codec, hdrInfo }; + } } /** Protection data as returned by a Representation. */ diff --git a/src/public_types.ts b/src/public_types.ts index bed8f24abf..1a8c79a5dc 100644 --- a/src/public_types.ts +++ b/src/public_types.ts @@ -24,6 +24,7 @@ import { } from "./core/decrypt"; import { IBufferType } from "./core/segment_buffers"; import { + IMediaErrorTrackContext, EncryptedMediaError, MediaError, NetworkError, @@ -33,6 +34,8 @@ import Manifest from "./manifest"; import { ILocalManifest } from "./parsers/manifest/local"; import { IMetaPlaylist } from "./parsers/manifest/metaplaylist/metaplaylist_parser"; +export { IMediaErrorTrackContext }; + /** * This file defines and exports types we want to expose to library users. * Those types are considered as part of the API. @@ -235,11 +238,13 @@ export interface IPeriod { image? : IAdaptation[]; }; } +export type IAdaptationType = "video" | "audio" | "text" | "image"; + /** Adaptation (represents a track), as documented in the API documentation. */ export interface IAdaptation { /** String identifying the Adaptation, unique per Period. */ id : string; - type : "video" | "audio" | "text" | "image"; + type : IAdaptationType; language? : string | undefined; normalizedLanguage? : string | undefined; isAudioDescription? : boolean | undefined; diff --git a/src/transports/smooth/isobmff/create_boxes.ts b/src/transports/smooth/isobmff/create_boxes.ts index 105353fb8e..483fb71ff1 100644 --- a/src/transports/smooth/isobmff/create_boxes.ts +++ b/src/transports/smooth/isobmff/create_boxes.ts @@ -33,7 +33,7 @@ import { * @param {Number} height * @param {Number} hRes - horizontal resolution, eg 72 * @param {Number} vRes - vertical resolution, eg 72 - * @param {string} encDepth + * @param {string} encName * @param {Number} colorDepth - eg 24 * @param {Uint8Array} avcc - Uint8Array representing the avcC atom * @returns {Uint8Array} @@ -68,7 +68,7 @@ function createAVC1Box( * @param {Number} height * @param {Number} hRes - horizontal resolution, eg 72 * @param {Number} vRes - vertical resolution, eg 72 - * @param {string} encDepth + * @param {string} encName * @param {Number} colorDepth - eg 24 * @param {Uint8Array} avcc - Uint8Array representing the avcC atom * @param {Uint8Array} sinf - Uint8Array representing the sinf atom @@ -108,8 +108,6 @@ function createENCVBox( * @param {Number} packetSize * @param {Number} sampleRate * @param {Uint8Array} esds - Uint8Array representing the esds atom - * @param {Uint8Array} [sinf] - Uint8Array representing the sinf atom, - * only if name == "enca" * @returns {Uint8Array} */ function createMP4ABox( @@ -164,7 +162,7 @@ function createENCABox( } /** - * @param {url} Uint8Array + * @param {Uint8Array} url * @returns {Uint8Array} */ function createDREFBox(url : Uint8Array) : Uint8Array { @@ -415,7 +413,7 @@ function createSMHDBox() : Uint8Array { } /** - * @param {Array.} representations - arrays of Uint8Array, + * @param {Array.} reps - arrays of Uint8Array, * typically [avc1] or [encv, avc1] * @returns {Uint8Array} */