Source: lib/util/stream_utils.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.util.StreamUtils');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.config.AutoShowText');
  9. goog.require('shaka.device.DeviceFactory');
  10. goog.require('shaka.device.IDevice');
  11. goog.require('shaka.lcevc.Dec');
  12. goog.require('shaka.log');
  13. goog.require('shaka.media.Capabilities');
  14. goog.require('shaka.text.TextEngine');
  15. goog.require('shaka.util.Functional');
  16. goog.require('shaka.util.LanguageUtils');
  17. goog.require('shaka.util.ManifestParserUtils');
  18. goog.require('shaka.util.MimeUtils');
  19. goog.require('shaka.util.MultiMap');
  20. goog.require('shaka.util.ObjectUtils');
  21. goog.requireType('shaka.drm.DrmEngine');
  22. /**
  23. * @summary A set of utility functions for dealing with Streams and Manifests.
  24. * @export
  25. */
  26. shaka.util.StreamUtils = class {
  27. /**
  28. * In case of multiple usable codecs, choose one based on lowest average
  29. * bandwidth and filter out the rest.
  30. * Also filters out variants that have too many audio channels.
  31. * @param {!shaka.extern.Manifest} manifest
  32. * @param {!Array<string>} preferredVideoCodecs
  33. * @param {!Array<string>} preferredAudioCodecs
  34. * @param {!Array<string>} preferredDecodingAttributes
  35. * @param {!Array<string>} preferredTextFormats
  36. */
  37. static chooseCodecsAndFilterManifest(manifest, preferredVideoCodecs,
  38. preferredAudioCodecs, preferredDecodingAttributes, preferredTextFormats) {
  39. const StreamUtils = shaka.util.StreamUtils;
  40. const MimeUtils = shaka.util.MimeUtils;
  41. if (preferredTextFormats.length) {
  42. let subset = manifest.textStreams;
  43. for (const textFormat of preferredTextFormats) {
  44. const filtered = subset.filter((textStream) => {
  45. if (textStream.codecs.startsWith(textFormat) ||
  46. textStream.mimeType.startsWith(textFormat)) {
  47. return true;
  48. }
  49. return false;
  50. });
  51. if (filtered.length) {
  52. subset = filtered;
  53. break;
  54. }
  55. }
  56. manifest.textStreams = subset;
  57. }
  58. let variants = manifest.variants;
  59. // To start, choose the codecs based on configured preferences if available.
  60. if (preferredVideoCodecs.length || preferredAudioCodecs.length) {
  61. variants = StreamUtils.choosePreferredCodecs(variants,
  62. preferredVideoCodecs, preferredAudioCodecs);
  63. }
  64. if (preferredDecodingAttributes.length) {
  65. // group variants by resolution and choose preferred variants only
  66. /** @type {!shaka.util.MultiMap<shaka.extern.Variant>} */
  67. const variantsByResolutionMap = new shaka.util.MultiMap();
  68. for (const variant of variants) {
  69. variantsByResolutionMap
  70. .push(String(variant.video.width || 0), variant);
  71. }
  72. const bestVariants = [];
  73. variantsByResolutionMap.forEach((width, variantsByResolution) => {
  74. let highestMatch = 0;
  75. let matchingVariants = [];
  76. for (const variant of variantsByResolution) {
  77. const matchCount = preferredDecodingAttributes.filter(
  78. (attribute) => variant.decodingInfos[0][attribute],
  79. ).length;
  80. if (matchCount > highestMatch) {
  81. highestMatch = matchCount;
  82. matchingVariants = [variant];
  83. } else if (matchCount == highestMatch) {
  84. matchingVariants.push(variant);
  85. }
  86. }
  87. bestVariants.push(...matchingVariants);
  88. });
  89. variants = bestVariants;
  90. }
  91. const audioStreamsSet = new Set();
  92. const videoStreamsSet = new Set();
  93. for (const variant of variants) {
  94. if (variant.audio) {
  95. audioStreamsSet.add(variant.audio);
  96. }
  97. if (variant.video) {
  98. videoStreamsSet.add(variant.video);
  99. }
  100. }
  101. const audioStreams = Array.from(audioStreamsSet).sort((v1, v2) => {
  102. return v1.bandwidth - v2.bandwidth;
  103. });
  104. const validAudioIds = [];
  105. const validAudioStreamsMap = new Map();
  106. const getAudioId = (stream) => {
  107. let id = stream.language + (stream.channelsCount || 0) +
  108. (stream.audioSamplingRate || 0) + stream.roles.join(',') +
  109. stream.label + stream.groupId + stream.fastSwitching;
  110. if (stream.dependencyStream) {
  111. id += stream.dependencyStream.baseOriginalId || '';
  112. }
  113. return id;
  114. };
  115. for (const stream of audioStreams) {
  116. const groupId = getAudioId(stream);
  117. const validAudioStreams = validAudioStreamsMap.get(groupId) || [];
  118. if (!validAudioStreams.length) {
  119. validAudioStreams.push(stream);
  120. validAudioIds.push(stream.id);
  121. } else {
  122. const previousStream = validAudioStreams[validAudioStreams.length - 1];
  123. const previousCodec =
  124. MimeUtils.getNormalizedCodec(previousStream.codecs);
  125. const currentCodec =
  126. MimeUtils.getNormalizedCodec(stream.codecs);
  127. if (previousCodec == currentCodec) {
  128. if (!stream.bandwidth || !previousStream.bandwidth ||
  129. stream.bandwidth > previousStream.bandwidth) {
  130. validAudioStreams.push(stream);
  131. validAudioIds.push(stream.id);
  132. }
  133. }
  134. }
  135. validAudioStreamsMap.set(groupId, validAudioStreams);
  136. }
  137. // Keys based in MimeUtils.getNormalizedCodec. Lower is better
  138. const videoCodecPreference = {
  139. 'vp8': 1,
  140. 'avc': 1,
  141. 'dovi-avc': 0.95,
  142. 'vp9': 0.9,
  143. 'vp09': 0.9,
  144. 'hevc': 0.85,
  145. 'dovi-hevc': 0.8,
  146. 'dovi-p5': 0.75,
  147. 'av01': 0.7,
  148. 'dovi-av1': 0.65,
  149. 'vvc': 0.6,
  150. };
  151. const videoStreams = Array.from(videoStreamsSet)
  152. .sort((v1, v2) => {
  153. if (!v1.bandwidth || !v2.bandwidth || v1.bandwidth == v2.bandwidth) {
  154. if (v1.codecs && v2.codecs && v1.codecs != v2.codecs &&
  155. v1.width == v2.width) {
  156. const v1Codecs = MimeUtils.getNormalizedCodec(v1.codecs);
  157. const v2Codecs = MimeUtils.getNormalizedCodec(v2.codecs);
  158. if (v1Codecs != v2Codecs) {
  159. const indexV1 = videoCodecPreference[v1Codecs] || 1;
  160. const indexV2 = videoCodecPreference[v2Codecs] || 1;
  161. return indexV1 - indexV2;
  162. }
  163. }
  164. return v1.width - v2.width;
  165. }
  166. return v1.bandwidth - v2.bandwidth;
  167. });
  168. const isChangeTypeSupported =
  169. shaka.media.Capabilities.isChangeTypeSupported();
  170. const validVideoIds = [];
  171. const validVideoStreamsMap = new Map();
  172. const getVideoGroupId = (stream) => {
  173. let id = String(stream.width || '') + String(stream.height || '') +
  174. String(Math.round(stream.frameRate || 0)) + (stream.hdr || '') +
  175. stream.fastSwitching;
  176. if (stream.dependencyStream) {
  177. id += stream.dependencyStream.baseOriginalId || '';
  178. }
  179. if (stream.roles) {
  180. id += stream.roles.sort().join('_');
  181. }
  182. return id;
  183. };
  184. for (const stream of videoStreams) {
  185. const groupId = getVideoGroupId(stream);
  186. const validVideoStreams = validVideoStreamsMap.get(groupId) || [];
  187. if (!validVideoStreams.length) {
  188. validVideoStreams.push(stream);
  189. validVideoIds.push(stream.id);
  190. } else {
  191. const previousStream = validVideoStreams[validVideoStreams.length - 1];
  192. if (!isChangeTypeSupported) {
  193. const previousCodec =
  194. MimeUtils.getNormalizedCodec(previousStream.codecs);
  195. const currentCodec =
  196. MimeUtils.getNormalizedCodec(stream.codecs);
  197. if (previousCodec !== currentCodec) {
  198. continue;
  199. }
  200. }
  201. const previousCodec =
  202. MimeUtils.getNormalizedCodec(previousStream.codecs);
  203. const currentCodec =
  204. MimeUtils.getNormalizedCodec(stream.codecs);
  205. if (previousCodec == currentCodec) {
  206. if (!stream.bandwidth || !previousStream.bandwidth ||
  207. stream.bandwidth > previousStream.bandwidth) {
  208. validVideoStreams.push(stream);
  209. validVideoIds.push(stream.id);
  210. }
  211. }
  212. }
  213. validVideoStreamsMap.set(groupId, validVideoStreams);
  214. }
  215. // Filter out any variants that don't match, forcing AbrManager to choose
  216. // from a single video codec and a single audio codec possible.
  217. manifest.variants = manifest.variants.filter((variant) => {
  218. const audio = variant.audio;
  219. const video = variant.video;
  220. if (audio) {
  221. if (!validAudioIds.includes(audio.id)) {
  222. shaka.log.debug('Dropping Variant (better codec available)', variant);
  223. return false;
  224. }
  225. }
  226. if (video) {
  227. if (!validVideoIds.includes(video.id)) {
  228. shaka.log.debug('Dropping Variant (better codec available)', variant);
  229. return false;
  230. }
  231. }
  232. return true;
  233. });
  234. }
  235. /**
  236. * Choose the codecs by configured preferred audio and video codecs.
  237. *
  238. * @param {!Array<shaka.extern.Variant>} variants
  239. * @param {!Array<string>} preferredVideoCodecs
  240. * @param {!Array<string>} preferredAudioCodecs
  241. * @return {!Array<shaka.extern.Variant>}
  242. */
  243. static choosePreferredCodecs(variants, preferredVideoCodecs,
  244. preferredAudioCodecs) {
  245. let subset = variants;
  246. for (const videoCodec of preferredVideoCodecs) {
  247. const filtered = subset.filter((variant) => {
  248. return variant.video && variant.video.codecs.startsWith(videoCodec);
  249. });
  250. if (filtered.length) {
  251. subset = filtered;
  252. break;
  253. }
  254. }
  255. for (const audioCodec of preferredAudioCodecs) {
  256. const filtered = subset.filter((variant) => {
  257. return variant.audio && variant.audio.codecs.startsWith(audioCodec);
  258. });
  259. if (filtered.length) {
  260. subset = filtered;
  261. break;
  262. }
  263. }
  264. return subset;
  265. }
  266. /**
  267. * Filter the variants in |manifest| to only include the variants that meet
  268. * the given restrictions.
  269. *
  270. * @param {!shaka.extern.Manifest} manifest
  271. * @param {shaka.extern.Restrictions} restrictions
  272. * @param {shaka.extern.Resolution} maxHwResolution
  273. */
  274. static filterByRestrictions(manifest, restrictions, maxHwResolution) {
  275. manifest.variants = manifest.variants.filter((variant) => {
  276. return shaka.util.StreamUtils.meetsRestrictions(
  277. variant, restrictions, maxHwResolution);
  278. });
  279. }
  280. /**
  281. * @param {shaka.extern.Variant} variant
  282. * @param {shaka.extern.Restrictions} restrictions
  283. * Configured restrictions from the user.
  284. * @param {shaka.extern.Resolution} maxHwRes
  285. * The maximum resolution the hardware can handle.
  286. * This is applied separately from user restrictions because the setting
  287. * should not be easily replaced by the user's configuration.
  288. * @return {boolean}
  289. * @export
  290. */
  291. static meetsRestrictions(variant, restrictions, maxHwRes) {
  292. /** @type {function(number, number, number):boolean} */
  293. const inRange = (x, min, max) => {
  294. return x >= min && x <= max;
  295. };
  296. const video = variant.video;
  297. // |video.width| and |video.height| can be undefined, which breaks
  298. // the math, so make sure they are there first.
  299. if (video && video.width && video.height) {
  300. let videoWidth = video.width;
  301. let videoHeight = video.height;
  302. if (videoHeight > videoWidth) {
  303. // Vertical video.
  304. [videoWidth, videoHeight] = [videoHeight, videoWidth];
  305. }
  306. if (!inRange(videoWidth,
  307. restrictions.minWidth,
  308. Math.min(restrictions.maxWidth, maxHwRes.width))) {
  309. return false;
  310. }
  311. if (!inRange(videoHeight,
  312. restrictions.minHeight,
  313. Math.min(restrictions.maxHeight, maxHwRes.height))) {
  314. return false;
  315. }
  316. if (!inRange(video.width * video.height,
  317. restrictions.minPixels,
  318. restrictions.maxPixels)) {
  319. return false;
  320. }
  321. }
  322. // |variant.video.frameRate| can be undefined, which breaks
  323. // the math, so make sure they are there first.
  324. if (variant && variant.video && variant.video.frameRate) {
  325. if (!inRange(variant.video.frameRate,
  326. restrictions.minFrameRate,
  327. restrictions.maxFrameRate)) {
  328. return false;
  329. }
  330. }
  331. // |variant.audio.channelsCount| can be undefined, which breaks
  332. // the math, so make sure they are there first.
  333. if (variant && variant.audio && variant.audio.channelsCount) {
  334. if (!inRange(variant.audio.channelsCount,
  335. restrictions.minChannelsCount,
  336. restrictions.maxChannelsCount)) {
  337. return false;
  338. }
  339. }
  340. if (!inRange(variant.bandwidth,
  341. restrictions.minBandwidth,
  342. restrictions.maxBandwidth)) {
  343. return false;
  344. }
  345. return true;
  346. }
  347. /**
  348. * @param {!Array<shaka.extern.Variant>} variants
  349. * @param {shaka.extern.Restrictions} restrictions
  350. * @param {shaka.extern.Resolution} maxHwRes
  351. * @return {boolean} Whether the tracks changed.
  352. */
  353. static applyRestrictions(variants, restrictions, maxHwRes) {
  354. let tracksChanged = false;
  355. for (const variant of variants) {
  356. const originalAllowed = variant.allowedByApplication;
  357. variant.allowedByApplication = shaka.util.StreamUtils.meetsRestrictions(
  358. variant, restrictions, maxHwRes);
  359. if (originalAllowed != variant.allowedByApplication) {
  360. tracksChanged = true;
  361. }
  362. }
  363. return tracksChanged;
  364. }
  365. /**
  366. * Alters the given Manifest to filter out any unplayable streams.
  367. *
  368. * @param {shaka.drm.DrmEngine} drmEngine
  369. * @param {shaka.extern.Manifest} manifest
  370. * @param {!Array<string>=} preferredKeySystems
  371. * @param {!Object<string, string>=} keySystemsMapping
  372. */
  373. static async filterManifest(drmEngine, manifest, preferredKeySystems = [],
  374. keySystemsMapping = {}) {
  375. await shaka.util.StreamUtils.filterManifestByMediaCapabilities(
  376. drmEngine, manifest, manifest.offlineSessionIds.length > 0,
  377. preferredKeySystems, keySystemsMapping);
  378. shaka.util.StreamUtils.filterTextStreams_(manifest);
  379. await shaka.util.StreamUtils.filterImageStreams_(manifest);
  380. }
  381. /**
  382. * Alters the given Manifest to filter out any streams unsupported by the
  383. * platform via MediaCapabilities.decodingInfo() API.
  384. *
  385. * @param {shaka.drm.DrmEngine} drmEngine
  386. * @param {shaka.extern.Manifest} manifest
  387. * @param {boolean} usePersistentLicenses
  388. * @param {!Array<string>} preferredKeySystems
  389. * @param {!Object<string, string>} keySystemsMapping
  390. */
  391. static async filterManifestByMediaCapabilities(
  392. drmEngine, manifest, usePersistentLicenses, preferredKeySystems,
  393. keySystemsMapping) {
  394. goog.asserts.assert(navigator.mediaCapabilities,
  395. 'MediaCapabilities should be valid.');
  396. if (shaka.device.DeviceFactory.getDevice()
  397. .shouldOverrideDolbyVisionCodecs()) {
  398. shaka.util.StreamUtils.overrideDolbyVisionCodecs(manifest.variants);
  399. }
  400. await shaka.util.StreamUtils.getDecodingInfosForVariants(
  401. manifest.variants, usePersistentLicenses, /* srcEquals= */ false,
  402. preferredKeySystems);
  403. let keySystem = null;
  404. if (drmEngine) {
  405. const drmInfo = drmEngine.getDrmInfo();
  406. if (drmInfo) {
  407. keySystem = drmInfo.keySystem;
  408. }
  409. }
  410. const StreamUtils = shaka.util.StreamUtils;
  411. manifest.variants = manifest.variants.filter((variant) => {
  412. const supported = StreamUtils.checkVariantSupported_(
  413. variant, keySystem, keySystemsMapping);
  414. // Filter out all unsupported variants.
  415. if (!supported) {
  416. shaka.log.debug('Dropping variant - not compatible with platform',
  417. StreamUtils.getVariantSummaryString_(variant));
  418. }
  419. return supported;
  420. });
  421. }
  422. /**
  423. * Maps Dolby Vision codecs to H.264 and H.265 equivalents as a workaround
  424. * to make Dolby Vision playback work on some platforms.
  425. *
  426. * Mapping is done according to the relevant Dolby documentation found here:
  427. * https://professionalsupport.dolby.com/s/article/How-to-signal-Dolby-Vision-in-MPEG-DASH?language=en_US
  428. * @param {!Array<!shaka.extern.Variant>} variants
  429. */
  430. static overrideDolbyVisionCodecs(variants) {
  431. /** @type {!Map<string, string>} */
  432. const codecMap = new Map()
  433. .set('dvav', 'avc3')
  434. .set('dva1', 'avc1')
  435. .set('dvhe', 'hev1')
  436. .set('dvh1', 'hvc1')
  437. .set('dvc1', 'vvc1')
  438. .set('dvi1', 'vvi1');
  439. /** @type {!Set<!shaka.extern.Stream>} */
  440. const streams = new Set();
  441. for (const variant of variants) {
  442. if (variant.video) {
  443. streams.add(variant.video);
  444. }
  445. }
  446. for (const video of streams) {
  447. for (const [dvCodec, replacement] of codecMap) {
  448. if (video.codecs.includes(dvCodec)) {
  449. video.codecs = video.codecs.replace(dvCodec, replacement);
  450. break;
  451. }
  452. }
  453. }
  454. }
  455. /**
  456. * @param {!shaka.extern.Variant} variant
  457. * @param {?string} keySystem
  458. * @param {!Object<string, string>} keySystemsMapping
  459. * @return {boolean}
  460. * @private
  461. */
  462. static checkVariantSupported_(variant, keySystem, keySystemsMapping) {
  463. const variantSupported = variant.decodingInfos.some((decodingInfo) => {
  464. if (!decodingInfo.supported) {
  465. return false;
  466. }
  467. if (keySystem) {
  468. const keySystemAccess = decodingInfo.keySystemAccess;
  469. if (keySystemAccess) {
  470. const currentKeySystem =
  471. keySystemsMapping[keySystemAccess.keySystem] ||
  472. keySystemAccess.keySystem;
  473. if (currentKeySystem != keySystem) {
  474. return false;
  475. }
  476. }
  477. }
  478. return true;
  479. });
  480. if (!variantSupported) {
  481. return false;
  482. }
  483. const device = shaka.device.DeviceFactory.getDevice();
  484. const isXboxOne = device.getDeviceName() === 'Xbox';
  485. const isFirefoxAndroid =
  486. device.getDeviceType() === shaka.device.IDevice.DeviceType.MOBILE &&
  487. device.getBrowserEngine() === shaka.device.IDevice.BrowserEngine.GECKO;
  488. // See: https://github.com/shaka-project/shaka-player/issues/3860
  489. const video = variant.video;
  490. const videoWidth = (video && video.width) || 0;
  491. const videoHeight = (video && video.height) || 0;
  492. // See: https://github.com/shaka-project/shaka-player/issues/3380
  493. // Note: it makes sense to drop early
  494. if (isXboxOne && video && (videoWidth > 1920 || videoHeight > 1080) &&
  495. (video.codecs.includes('avc1.') || video.codecs.includes('avc3.'))) {
  496. return false;
  497. }
  498. const videoDependencyStream = video && video.dependencyStream;
  499. if (videoDependencyStream &&
  500. !shaka.lcevc.Dec.isStreamSupported(videoDependencyStream)) {
  501. return false;
  502. }
  503. const audio = variant.audio;
  504. // See: https://github.com/shaka-project/shaka-player/issues/6111
  505. // It seems that Firefox Android reports that it supports
  506. // Opus + Widevine, but it is not actually supported.
  507. // It makes sense to drop early.
  508. if (isFirefoxAndroid && audio && audio.encrypted &&
  509. audio.codecs.toLowerCase().includes('opus')) {
  510. return false;
  511. }
  512. const audioDependencyStream = audio && audio.dependencyStream;
  513. if (audioDependencyStream) {
  514. return false;
  515. }
  516. return true;
  517. }
  518. /**
  519. * Queries mediaCapabilities for the decoding info for that decoding config,
  520. * and assigns it to the given variant.
  521. * If that query has been done before, instead return a cached result.
  522. * @param {!shaka.extern.Variant} variant
  523. * @param {!Array<!MediaDecodingConfiguration>} decodingConfigs
  524. * @private
  525. */
  526. static async getDecodingInfosForVariant_(variant, decodingConfigs) {
  527. /**
  528. * @param {?MediaCapabilitiesDecodingInfo} a
  529. * @param {!MediaCapabilitiesDecodingInfo} b
  530. * @return {!MediaCapabilitiesDecodingInfo}
  531. */
  532. const merge = (a, b) => {
  533. if (!a) {
  534. return b;
  535. } else {
  536. const res = shaka.util.ObjectUtils.shallowCloneObject(a);
  537. res.supported = a.supported && b.supported;
  538. res.powerEfficient = a.powerEfficient && b.powerEfficient;
  539. res.smooth = a.smooth && b.smooth;
  540. if (b.keySystemAccess && !res.keySystemAccess) {
  541. res.keySystemAccess = b.keySystemAccess;
  542. }
  543. return res;
  544. }
  545. };
  546. const StreamUtils = shaka.util.StreamUtils;
  547. /** @type {?MediaCapabilitiesDecodingInfo} */
  548. let finalResult = null;
  549. const promises = [];
  550. for (const decodingConfig of decodingConfigs) {
  551. const cacheKey =
  552. shaka.util.ObjectUtils.alphabeticalKeyOrderStringify(decodingConfig);
  553. const cache = StreamUtils.decodingConfigCache_;
  554. if (cache.has(cacheKey)) {
  555. shaka.log.v2('Using cached results of mediaCapabilities.decodingInfo',
  556. 'for key', cacheKey);
  557. finalResult = merge(finalResult, cache.get(cacheKey));
  558. } else {
  559. // Do a final pass-over of the decoding config: if a given stream has
  560. // multiple codecs, that suggests that it switches between those codecs
  561. // at points of the go-through.
  562. // mediaCapabilities by itself will report "not supported" when you
  563. // put in multiple different codecs, so each has to be checked
  564. // individually. So check each and take the worst result, to determine
  565. // overall variant compatibility.
  566. promises.push(StreamUtils
  567. .checkEachDecodingConfigCombination_(decodingConfig).then((res) => {
  568. /** @type {?MediaCapabilitiesDecodingInfo} */
  569. let acc = null;
  570. for (const result of (res || [])) {
  571. acc = merge(acc, result);
  572. }
  573. if (acc) {
  574. cache.set(cacheKey, acc);
  575. finalResult = merge(finalResult, acc);
  576. }
  577. }));
  578. }
  579. }
  580. await Promise.all(promises);
  581. if (finalResult) {
  582. variant.decodingInfos.push(finalResult);
  583. }
  584. }
  585. /**
  586. * @param {!MediaDecodingConfiguration} decodingConfig
  587. * @return {!Promise<?Array<!MediaCapabilitiesDecodingInfo>>}
  588. * @private
  589. */
  590. static checkEachDecodingConfigCombination_(decodingConfig) {
  591. let videoCodecs = [''];
  592. if (decodingConfig.video) {
  593. videoCodecs = shaka.util.MimeUtils.getCodecs(
  594. decodingConfig.video.contentType).split(',');
  595. }
  596. let audioCodecs = [''];
  597. if (decodingConfig.audio) {
  598. audioCodecs = shaka.util.MimeUtils.getCodecs(
  599. decodingConfig.audio.contentType).split(',');
  600. }
  601. const promises = [];
  602. for (const videoCodec of videoCodecs) {
  603. for (const audioCodec of audioCodecs) {
  604. const copy = shaka.util.ObjectUtils.cloneObject(decodingConfig);
  605. if (decodingConfig.video) {
  606. const mimeType = shaka.util.MimeUtils.getBasicType(
  607. copy.video.contentType);
  608. copy.video.contentType = shaka.util.MimeUtils.getFullType(
  609. mimeType, videoCodec);
  610. }
  611. if (decodingConfig.audio) {
  612. const mimeType = shaka.util.MimeUtils.getBasicType(
  613. copy.audio.contentType);
  614. copy.audio.contentType = shaka.util.MimeUtils.getFullType(
  615. mimeType, audioCodec);
  616. }
  617. promises.push(new Promise((resolve, reject) => {
  618. // On some (Android) WebView environments, decodingInfo will
  619. // not resolve or reject, at least if RESOURCE_PROTECTED_MEDIA_ID
  620. // is not set. This is a workaround for that issue.
  621. const TIMEOUT_FOR_DECODING_INFO_IN_SECONDS = 5;
  622. let promise;
  623. const device = shaka.device.DeviceFactory.getDevice();
  624. if (device.getDeviceType() ==
  625. shaka.device.IDevice.DeviceType.MOBILE) {
  626. promise = shaka.util.Functional.promiseWithTimeout(
  627. TIMEOUT_FOR_DECODING_INFO_IN_SECONDS,
  628. navigator.mediaCapabilities.decodingInfo(copy),
  629. );
  630. } else {
  631. promise = navigator.mediaCapabilities.decodingInfo(copy);
  632. }
  633. promise.then((res) => {
  634. resolve(res);
  635. }).catch(reject);
  636. }));
  637. }
  638. }
  639. return Promise.all(promises).catch((e) => {
  640. shaka.log.info('MediaCapabilities.decodingInfo() failed.',
  641. JSON.stringify(decodingConfig), e);
  642. return null;
  643. });
  644. }
  645. /**
  646. * Get the decodingInfo results of the variants via MediaCapabilities.
  647. * This should be called after the DrmEngine is created and configured, and
  648. * before DrmEngine sets the mediaKeys.
  649. *
  650. * @param {!Array<shaka.extern.Variant>} variants
  651. * @param {boolean} usePersistentLicenses
  652. * @param {boolean} srcEquals
  653. * @param {!Array<string>} preferredKeySystems
  654. * @exportDoc
  655. */
  656. static async getDecodingInfosForVariants(variants, usePersistentLicenses,
  657. srcEquals, preferredKeySystems) {
  658. const gotDecodingInfo = variants.some((variant) =>
  659. variant.decodingInfos.length);
  660. if (gotDecodingInfo) {
  661. shaka.log.debug('Already got the variants\' decodingInfo.');
  662. return;
  663. }
  664. // Try to get preferred key systems first to avoid unneeded calls to CDM.
  665. for (const preferredKeySystem of preferredKeySystems) {
  666. let keySystemSatisfied = false;
  667. for (const variant of variants) {
  668. /** @type {!Array<!Array<!MediaDecodingConfiguration>>} */
  669. const decodingConfigs = shaka.util.StreamUtils.getDecodingConfigs_(
  670. variant, usePersistentLicenses, srcEquals)
  671. .filter((configs) => {
  672. // All configs in a batch will have the same keySystem.
  673. const config = configs[0];
  674. const keySystem = config.keySystemConfiguration &&
  675. config.keySystemConfiguration.keySystem;
  676. return keySystem === preferredKeySystem;
  677. });
  678. // The reason we are performing this await in a loop rather than
  679. // batching into a `promise.all` is performance related.
  680. // https://github.com/shaka-project/shaka-player/pull/4708#discussion_r1022581178
  681. for (const configs of decodingConfigs) {
  682. // eslint-disable-next-line no-await-in-loop
  683. await shaka.util.StreamUtils.getDecodingInfosForVariant_(
  684. variant, configs);
  685. }
  686. if (variant.decodingInfos.some((d) => d.supported)) {
  687. keySystemSatisfied = true;
  688. }
  689. } // for (const variant of variants)
  690. if (keySystemSatisfied) {
  691. // Return if any preferred key system is already satisfied.
  692. return;
  693. }
  694. } // for (const preferredKeySystem of preferredKeySystems)
  695. for (const variant of variants) {
  696. /** @type {!Array<!Array<!MediaDecodingConfiguration>>} */
  697. const decodingConfigs = shaka.util.StreamUtils.getDecodingConfigs_(
  698. variant, usePersistentLicenses, srcEquals)
  699. .filter((configs) => {
  700. // All configs in a batch will have the same keySystem.
  701. const config = configs[0];
  702. const keySystem = config.keySystemConfiguration &&
  703. config.keySystemConfiguration.keySystem;
  704. // Avoid checking preferred systems twice.
  705. return !keySystem || !preferredKeySystems.includes(keySystem);
  706. });
  707. // The reason we are performing this await in a loop rather than
  708. // batching into a `promise.all` is performance related.
  709. // https://github.com/shaka-project/shaka-player/pull/4708#discussion_r1022581178
  710. for (const configs of decodingConfigs) {
  711. // eslint-disable-next-line no-await-in-loop
  712. await shaka.util.StreamUtils.getDecodingInfosForVariant_(
  713. variant, configs);
  714. }
  715. }
  716. }
  717. /**
  718. * Generate a batch of MediaDecodingConfiguration objects to get the
  719. * decodingInfo results for each variant.
  720. * Each batch shares the same DRM information, and represents the various
  721. * fullMimeType combinations of the streams.
  722. * @param {!shaka.extern.Variant} variant
  723. * @param {boolean} usePersistentLicenses
  724. * @param {boolean} srcEquals
  725. * @return {!Array<!Array<!MediaDecodingConfiguration>>}
  726. * @private
  727. */
  728. static getDecodingConfigs_(variant, usePersistentLicenses, srcEquals) {
  729. const audio = variant.audio;
  730. const video = variant.video;
  731. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  732. const ManifestParserUtils = shaka.util.ManifestParserUtils;
  733. const MimeUtils = shaka.util.MimeUtils;
  734. const StreamUtils = shaka.util.StreamUtils;
  735. const videoConfigs = [];
  736. const audioConfigs = [];
  737. if (video) {
  738. for (const fullMimeType of video.fullMimeTypes) {
  739. let videoCodecs = MimeUtils.getCodecs(fullMimeType);
  740. // For multiplexed streams with audio+video codecs, the config should
  741. // have AudioConfiguration and VideoConfiguration.
  742. // We ignore the multiplexed audio when there is normal audio also.
  743. if (videoCodecs.includes(',') && !audio) {
  744. const allCodecs = videoCodecs.split(',');
  745. const baseMimeType = MimeUtils.getBasicType(fullMimeType);
  746. videoCodecs = ManifestParserUtils.guessCodecs(
  747. ContentType.VIDEO, allCodecs);
  748. let audioCodecs = ManifestParserUtils.guessCodecs(
  749. ContentType.AUDIO, allCodecs);
  750. audioCodecs = StreamUtils.getCorrectAudioCodecs(
  751. audioCodecs, baseMimeType);
  752. const audioFullType = MimeUtils.getFullOrConvertedType(
  753. baseMimeType, audioCodecs, ContentType.AUDIO);
  754. audioConfigs.push({
  755. contentType: audioFullType,
  756. channels: 2,
  757. bitrate: variant.bandwidth || 1,
  758. samplerate: 1,
  759. spatialRendering: false,
  760. });
  761. }
  762. videoCodecs = StreamUtils.getCorrectVideoCodecs(videoCodecs);
  763. const fullType = MimeUtils.getFullOrConvertedType(
  764. MimeUtils.getBasicType(fullMimeType), videoCodecs,
  765. ContentType.VIDEO);
  766. // VideoConfiguration
  767. const videoConfig = {
  768. contentType: fullType,
  769. // NOTE: Some decoders strictly check the width and height fields and
  770. // won't decode smaller than 64x64. So if we don't have this info (as
  771. // is the case in some of our simpler tests), assume a 64x64
  772. // resolution to fill in this required field for MediaCapabilities.
  773. //
  774. // This became an issue specifically on Firefox on M1 Macs.
  775. width: video.width || 64,
  776. height: video.height || 64,
  777. bitrate: video.bandwidth || variant.bandwidth || 1,
  778. // framerate must be greater than 0, otherwise the config is invalid.
  779. framerate: video.frameRate || 30,
  780. };
  781. if (video.hdr) {
  782. // We assume that SDR uses by default srgb, so don't set it.
  783. switch (video.hdr) {
  784. case 'PQ':
  785. videoConfig.transferFunction = 'pq';
  786. break;
  787. case 'HLG':
  788. videoConfig.transferFunction = 'hlg';
  789. break;
  790. }
  791. }
  792. if (video.colorGamut) {
  793. videoConfig.colorGamut = video.colorGamut;
  794. }
  795. videoConfigs.push(videoConfig);
  796. }
  797. }
  798. if (audio) {
  799. for (const fullMimeType of audio.fullMimeTypes) {
  800. const baseMimeType = MimeUtils.getBasicType(fullMimeType);
  801. const codecs = StreamUtils.getCorrectAudioCodecs(
  802. MimeUtils.getCodecs(fullMimeType), baseMimeType);
  803. const fullType = MimeUtils.getFullOrConvertedType(
  804. baseMimeType, codecs, ContentType.AUDIO);
  805. // AudioConfiguration
  806. audioConfigs.push({
  807. contentType: fullType,
  808. channels: audio.channelsCount || 2,
  809. bitrate: audio.bandwidth || variant.bandwidth || 1,
  810. samplerate: audio.audioSamplingRate || 1,
  811. spatialRendering: audio.spatialAudio,
  812. });
  813. }
  814. }
  815. // Generate each combination of video and audio config as a separate
  816. // MediaDecodingConfiguration, inside the main "batch".
  817. /** @type {!Array<!MediaDecodingConfiguration>} */
  818. const mediaDecodingConfigBatch = [];
  819. if (videoConfigs.length == 0) {
  820. videoConfigs.push(null);
  821. }
  822. if (audioConfigs.length == 0) {
  823. audioConfigs.push(null);
  824. }
  825. for (const videoConfig of videoConfigs) {
  826. for (const audioConfig of audioConfigs) {
  827. /** @type {!MediaDecodingConfiguration} */
  828. const mediaDecodingConfig = {
  829. type: srcEquals ? 'file' : 'media-source',
  830. };
  831. if (videoConfig) {
  832. mediaDecodingConfig.video = videoConfig;
  833. }
  834. if (audioConfig) {
  835. mediaDecodingConfig.audio = audioConfig;
  836. }
  837. mediaDecodingConfigBatch.push(mediaDecodingConfig);
  838. }
  839. }
  840. const videoDrmInfos = variant.video ? variant.video.drmInfos : [];
  841. const audioDrmInfos = variant.audio ? variant.audio.drmInfos : [];
  842. const allDrmInfos = videoDrmInfos.concat(audioDrmInfos);
  843. // Return a list containing the mediaDecodingConfig for unencrypted variant.
  844. if (!allDrmInfos.length) {
  845. return [mediaDecodingConfigBatch];
  846. }
  847. // A list of MediaDecodingConfiguration objects created for the variant.
  848. const configs = [];
  849. // Get all the drm info so that we can avoid using nested loops when we
  850. // just need the drm info.
  851. const drmInfoByKeySystems = new Map();
  852. for (const info of allDrmInfos) {
  853. if (!drmInfoByKeySystems.get(info.keySystem)) {
  854. drmInfoByKeySystems.set(info.keySystem, []);
  855. }
  856. drmInfoByKeySystems.get(info.keySystem).push(info);
  857. }
  858. const persistentState =
  859. usePersistentLicenses ? 'required' : 'optional';
  860. const sessionTypes =
  861. usePersistentLicenses ? ['persistent-license'] : ['temporary'];
  862. for (const keySystem of drmInfoByKeySystems.keys()) {
  863. const drmInfos = drmInfoByKeySystems.get(keySystem);
  864. // Get all the robustness info so that we can avoid using nested
  865. // loops when we just need the robustness.
  866. const drmInfosByRobustness = new Map();
  867. for (const info of drmInfos) {
  868. const keyName = `${info.videoRobustness},${info.audioRobustness}`;
  869. if (!drmInfosByRobustness.get(keyName)) {
  870. drmInfosByRobustness.set(keyName, []);
  871. }
  872. drmInfosByRobustness.get(keyName).push(info);
  873. }
  874. for (const drmInfosRobustness of drmInfosByRobustness.values()) {
  875. const modifiedMediaDecodingConfigBatch = [];
  876. for (const base of mediaDecodingConfigBatch) {
  877. // Create a copy of the mediaDecodingConfig.
  878. const config = /** @type {!MediaDecodingConfiguration} */
  879. (Object.assign({}, base));
  880. /** @type {!MediaCapabilitiesKeySystemConfiguration} */
  881. const keySystemConfig = {
  882. keySystem: keySystem,
  883. initDataType: 'cenc',
  884. persistentState: persistentState,
  885. distinctiveIdentifier: 'optional',
  886. sessionTypes: sessionTypes,
  887. };
  888. for (const info of drmInfosRobustness) {
  889. if (info.initData && info.initData.length) {
  890. const initDataTypes = new Set();
  891. for (const initData of info.initData) {
  892. initDataTypes.add(initData.initDataType);
  893. }
  894. if (initDataTypes.size > 1) {
  895. shaka.log.v2('DrmInfo contains more than one initDataType,',
  896. 'and we use the initDataType of the first initData.',
  897. info);
  898. }
  899. keySystemConfig.initDataType = info.initData[0].initDataType;
  900. }
  901. if (info.distinctiveIdentifierRequired) {
  902. keySystemConfig.distinctiveIdentifier = 'required';
  903. }
  904. if (info.persistentStateRequired) {
  905. keySystemConfig.persistentState = 'required';
  906. }
  907. if (info.sessionType) {
  908. keySystemConfig.sessionTypes = [info.sessionType];
  909. }
  910. if (audio) {
  911. if (!keySystemConfig.audio) {
  912. // KeySystemTrackConfiguration
  913. keySystemConfig.audio = {
  914. robustness: info.audioRobustness,
  915. };
  916. if (info.encryptionScheme) {
  917. keySystemConfig.audio.encryptionScheme =
  918. info.encryptionScheme;
  919. }
  920. } else {
  921. if (info.encryptionScheme) {
  922. keySystemConfig.audio.encryptionScheme =
  923. keySystemConfig.audio.encryptionScheme ||
  924. info.encryptionScheme;
  925. }
  926. keySystemConfig.audio.robustness =
  927. keySystemConfig.audio.robustness ||
  928. info.audioRobustness;
  929. }
  930. // See: https://github.com/shaka-project/shaka-player/issues/4659
  931. if (keySystemConfig.audio.robustness == '') {
  932. delete keySystemConfig.audio.robustness;
  933. }
  934. }
  935. if (video) {
  936. if (!keySystemConfig.video) {
  937. // KeySystemTrackConfiguration
  938. keySystemConfig.video = {
  939. robustness: info.videoRobustness,
  940. };
  941. if (info.encryptionScheme) {
  942. keySystemConfig.video.encryptionScheme =
  943. info.encryptionScheme;
  944. }
  945. } else {
  946. if (info.encryptionScheme) {
  947. keySystemConfig.video.encryptionScheme =
  948. keySystemConfig.video.encryptionScheme ||
  949. info.encryptionScheme;
  950. }
  951. keySystemConfig.video.robustness =
  952. keySystemConfig.video.robustness ||
  953. info.videoRobustness;
  954. }
  955. // See: https://github.com/shaka-project/shaka-player/issues/4659
  956. if (keySystemConfig.video.robustness == '') {
  957. delete keySystemConfig.video.robustness;
  958. }
  959. }
  960. }
  961. config.keySystemConfiguration = keySystemConfig;
  962. modifiedMediaDecodingConfigBatch.push(config);
  963. }
  964. configs.push(modifiedMediaDecodingConfigBatch);
  965. }
  966. }
  967. return configs;
  968. }
  969. /**
  970. * Generates the correct audio codec for MediaDecodingConfiguration and
  971. * for MediaSource.isTypeSupported.
  972. * @param {string} codecs
  973. * @param {string} mimeType
  974. * @return {string}
  975. */
  976. static getCorrectAudioCodecs(codecs, mimeType) {
  977. // According to RFC 6381 section 3.3, 'fLaC' is actually the correct
  978. // codec string. We still need to map it to 'flac', as some browsers
  979. // currently don't support 'fLaC', while 'flac' is supported by most
  980. // major browsers.
  981. // See https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
  982. const device = shaka.device.DeviceFactory.getDevice();
  983. const webkit = shaka.device.IDevice.BrowserEngine.WEBKIT;
  984. if (codecs.toLowerCase() == 'flac') {
  985. if (device.getBrowserEngine() != webkit) {
  986. return 'flac';
  987. } else {
  988. return 'fLaC';
  989. }
  990. }
  991. // The same is true for 'Opus'.
  992. if (codecs.toLowerCase() === 'opus') {
  993. if (device.getBrowserEngine() != webkit) {
  994. return 'opus';
  995. } else {
  996. if (shaka.util.MimeUtils.getContainerType(mimeType) == 'mp4') {
  997. return 'Opus';
  998. } else {
  999. return 'opus';
  1000. }
  1001. }
  1002. }
  1003. if (codecs.toLowerCase() == 'ac-3' && device.requiresEC3InitSegments()) {
  1004. return 'ec-3';
  1005. }
  1006. return codecs;
  1007. }
  1008. /**
  1009. * Generates the correct video codec for MediaDecodingConfiguration and
  1010. * for MediaSource.isTypeSupported.
  1011. * @param {string} codec
  1012. * @return {string}
  1013. */
  1014. static getCorrectVideoCodecs(codec) {
  1015. if (codec.includes('avc1')) {
  1016. // Convert avc1 codec string from RFC-4281 to RFC-6381 for
  1017. // MediaSource.isTypeSupported
  1018. // Example, convert avc1.66.30 to avc1.42001e (0x42 == 66 and 0x1e == 30)
  1019. const avcData = codec.split('.');
  1020. if (avcData.length == 3) {
  1021. let result = avcData.shift() + '.';
  1022. result += parseInt(avcData.shift(), 10).toString(16);
  1023. result +=
  1024. ('000' + parseInt(avcData.shift(), 10).toString(16)).slice(-4);
  1025. return result;
  1026. }
  1027. } else if (codec == 'vp9') {
  1028. // MediaCapabilities supports 'vp09...' codecs, but not 'vp9'. Translate
  1029. // vp9 codec strings into 'vp09...', to allow such content to play with
  1030. // mediaCapabilities enabled.
  1031. // This means profile 0, level 4.1, 8-bit color. This supports 1080p @
  1032. // 60Hz. See https://en.wikipedia.org/wiki/VP9#Levels
  1033. //
  1034. // If we don't have more detailed codec info, assume this profile and
  1035. // level because it's high enough to likely accommodate the parameters we
  1036. // do have, such as width and height. If an implementation is checking
  1037. // the profile and level very strictly, we want older VP9 content to
  1038. // still work to some degree. But we don't want to set a level so high
  1039. // that it is rejected by a hardware decoder that can't handle the
  1040. // maximum requirements of the level.
  1041. //
  1042. // This became an issue specifically on Firefox on M1 Macs.
  1043. return 'vp09.00.41.08';
  1044. }
  1045. return codec;
  1046. }
  1047. /**
  1048. * Alters the given Manifest to filter out any streams incompatible with the
  1049. * current variant.
  1050. *
  1051. * @param {?shaka.extern.Variant} currentVariant
  1052. * @param {shaka.extern.Manifest} manifest
  1053. */
  1054. static filterManifestByCurrentVariant(currentVariant, manifest) {
  1055. const StreamUtils = shaka.util.StreamUtils;
  1056. manifest.variants = manifest.variants.filter((variant) => {
  1057. const audio = variant.audio;
  1058. const video = variant.video;
  1059. if (audio && currentVariant && currentVariant.audio) {
  1060. if (!StreamUtils.areStreamsCompatible_(audio, currentVariant.audio)) {
  1061. shaka.log.debug('Dropping variant - not compatible with active audio',
  1062. 'active audio',
  1063. StreamUtils.getStreamSummaryString_(currentVariant.audio),
  1064. 'variant.audio',
  1065. StreamUtils.getStreamSummaryString_(audio));
  1066. return false;
  1067. }
  1068. }
  1069. if (video && currentVariant && currentVariant.video) {
  1070. if (!StreamUtils.areStreamsCompatible_(video, currentVariant.video)) {
  1071. shaka.log.debug('Dropping variant - not compatible with active video',
  1072. 'active video',
  1073. StreamUtils.getStreamSummaryString_(currentVariant.video),
  1074. 'variant.video',
  1075. StreamUtils.getStreamSummaryString_(video));
  1076. return false;
  1077. }
  1078. }
  1079. return true;
  1080. });
  1081. }
  1082. /**
  1083. * Alters the given Manifest to filter out any unsupported text streams.
  1084. *
  1085. * @param {shaka.extern.Manifest} manifest
  1086. * @private
  1087. */
  1088. static filterTextStreams_(manifest) {
  1089. // Filter text streams.
  1090. manifest.textStreams = manifest.textStreams.filter((stream) => {
  1091. const fullMimeType = shaka.util.MimeUtils.getFullType(
  1092. stream.mimeType, stream.codecs);
  1093. const keep = shaka.text.TextEngine.isTypeSupported(fullMimeType);
  1094. if (!keep) {
  1095. shaka.log.debug('Dropping text stream. Is not supported by the ' +
  1096. 'platform.', stream);
  1097. }
  1098. return keep;
  1099. });
  1100. }
  1101. /**
  1102. * Alters the given Manifest to filter out any unsupported image streams.
  1103. *
  1104. * @param {shaka.extern.Manifest} manifest
  1105. * @private
  1106. */
  1107. static async filterImageStreams_(manifest) {
  1108. const imageStreams = [];
  1109. for (const stream of manifest.imageStreams) {
  1110. let mimeType = stream.mimeType;
  1111. if (mimeType == 'application/mp4' && stream.codecs == 'mjpg') {
  1112. mimeType = 'image/jpg';
  1113. }
  1114. if (!shaka.util.StreamUtils.supportedImageMimeTypes_.has(mimeType)) {
  1115. const minImage = shaka.util.StreamUtils.minImage_.get(mimeType);
  1116. if (minImage) {
  1117. // eslint-disable-next-line no-await-in-loop
  1118. const res = await shaka.util.StreamUtils.isImageSupported_(minImage);
  1119. shaka.util.StreamUtils.supportedImageMimeTypes_.set(mimeType, res);
  1120. } else {
  1121. shaka.util.StreamUtils.supportedImageMimeTypes_.set(mimeType, false);
  1122. }
  1123. }
  1124. const keep =
  1125. shaka.util.StreamUtils.supportedImageMimeTypes_.get(mimeType);
  1126. if (!keep) {
  1127. shaka.log.debug('Dropping image stream. Is not supported by the ' +
  1128. 'platform.', stream);
  1129. } else {
  1130. imageStreams.push(stream);
  1131. }
  1132. }
  1133. manifest.imageStreams = imageStreams;
  1134. }
  1135. /**
  1136. * @param {string} minImage
  1137. * @return {!Promise<boolean>}
  1138. * @private
  1139. */
  1140. static isImageSupported_(minImage) {
  1141. return new Promise((resolve) => {
  1142. const imageElement = /** @type {HTMLImageElement} */(new Image());
  1143. imageElement.src = minImage;
  1144. if ('decode' in imageElement) {
  1145. imageElement.decode().then(() => {
  1146. resolve(true);
  1147. }).catch(() => {
  1148. resolve(false);
  1149. });
  1150. } else {
  1151. imageElement.onload = imageElement.onerror = () => {
  1152. resolve(imageElement.height === 2);
  1153. };
  1154. }
  1155. });
  1156. }
  1157. /**
  1158. * @param {shaka.extern.Stream} s0
  1159. * @param {shaka.extern.Stream} s1
  1160. * @return {boolean}
  1161. * @private
  1162. */
  1163. static areStreamsCompatible_(s0, s1) {
  1164. // Basic mime types and basic codecs need to match.
  1165. // For example, we can't adapt between WebM and MP4,
  1166. // nor can we adapt between mp4a.* to ec-3.
  1167. // We can switch between text types on the fly,
  1168. // so don't run this check on text.
  1169. if (s0.mimeType != s1.mimeType) {
  1170. return false;
  1171. }
  1172. if (s0.codecs.split('.')[0] != s1.codecs.split('.')[0]) {
  1173. return false;
  1174. }
  1175. return true;
  1176. }
  1177. /**
  1178. * @param {shaka.extern.Variant} variant
  1179. * @return {shaka.extern.Track}
  1180. */
  1181. static variantToTrack(variant) {
  1182. const ManifestParserUtils = shaka.util.ManifestParserUtils;
  1183. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1184. /** @type {?shaka.extern.Stream} */
  1185. const audio = variant.audio;
  1186. /** @type {?shaka.extern.Stream} */
  1187. const video = variant.video;
  1188. /** @type {?string} */
  1189. const audioMimeType = audio ? audio.mimeType : null;
  1190. /** @type {?string} */
  1191. const videoMimeType = video ? video.mimeType : null;
  1192. /** @type {?string} */
  1193. const audioCodec = audio ? audio.codecs : null;
  1194. /** @type {?string} */
  1195. const videoCodec = video ? video.codecs : null;
  1196. /** @type {?string} */
  1197. const audioGroupId = audio ? audio.groupId : null;
  1198. /** @type {!Array<string>} */
  1199. const mimeTypes = [];
  1200. if (video) {
  1201. mimeTypes.push(video.mimeType);
  1202. }
  1203. if (audio) {
  1204. mimeTypes.push(audio.mimeType);
  1205. }
  1206. /** @type {?string} */
  1207. const mimeType = mimeTypes[0] || null;
  1208. /** @type {!Array<string>} */
  1209. const kinds = [];
  1210. if (audio) {
  1211. kinds.push(audio.kind);
  1212. }
  1213. if (video) {
  1214. kinds.push(video.kind);
  1215. }
  1216. /** @type {?string} */
  1217. const kind = kinds[0] || null;
  1218. /** @type {!Set<string>} */
  1219. const roles = new Set();
  1220. if (audio) {
  1221. for (const role of audio.roles) {
  1222. roles.add(role);
  1223. }
  1224. }
  1225. if (video) {
  1226. for (const role of video.roles) {
  1227. roles.add(role);
  1228. }
  1229. }
  1230. /** @type {shaka.extern.Track} */
  1231. const track = {
  1232. id: variant.id,
  1233. active: false,
  1234. type: 'variant',
  1235. bandwidth: variant.bandwidth,
  1236. language: variant.language,
  1237. label: null,
  1238. videoLabel: null,
  1239. kind: kind,
  1240. width: null,
  1241. height: null,
  1242. frameRate: null,
  1243. pixelAspectRatio: null,
  1244. hdr: null,
  1245. colorGamut: null,
  1246. videoLayout: null,
  1247. mimeType: mimeType,
  1248. audioMimeType: audioMimeType,
  1249. videoMimeType: videoMimeType,
  1250. codecs: '',
  1251. audioCodec: audioCodec,
  1252. videoCodec: videoCodec,
  1253. primary: variant.primary,
  1254. roles: Array.from(roles),
  1255. audioRoles: null,
  1256. videoRoles: null,
  1257. forced: false,
  1258. videoId: null,
  1259. audioId: null,
  1260. audioGroupId: audioGroupId,
  1261. channelsCount: null,
  1262. audioSamplingRate: null,
  1263. spatialAudio: false,
  1264. tilesLayout: null,
  1265. audioBandwidth: null,
  1266. videoBandwidth: null,
  1267. originalVideoId: null,
  1268. originalAudioId: null,
  1269. originalTextId: null,
  1270. originalImageId: null,
  1271. accessibilityPurpose: null,
  1272. originalLanguage: null,
  1273. };
  1274. if (video) {
  1275. track.videoId = video.id;
  1276. track.originalVideoId = video.originalId;
  1277. track.width = video.width || null;
  1278. track.height = video.height || null;
  1279. track.frameRate = video.frameRate || null;
  1280. track.pixelAspectRatio = video.pixelAspectRatio || null;
  1281. track.videoBandwidth = video.bandwidth || null;
  1282. track.hdr = video.hdr || null;
  1283. track.colorGamut = video.colorGamut || null;
  1284. track.videoLayout = video.videoLayout || null;
  1285. track.videoRoles = video.roles;
  1286. track.videoLabel = video.label;
  1287. const dependencyStream = video.dependencyStream;
  1288. if (dependencyStream) {
  1289. track.width = dependencyStream.width || track.width;
  1290. track.height = dependencyStream.height || track.height;
  1291. track.videoCodec = dependencyStream.codecs || track.videoCodec;
  1292. if (track.videoBandwidth && dependencyStream.bandwidth) {
  1293. track.videoBandwidth += dependencyStream.bandwidth;
  1294. }
  1295. }
  1296. if (videoCodec.includes(',')) {
  1297. track.channelsCount = video.channelsCount;
  1298. track.audioSamplingRate = video.audioSamplingRate;
  1299. track.spatialAudio = video.spatialAudio;
  1300. track.originalLanguage = video.originalLanguage;
  1301. track.audioMimeType = videoMimeType;
  1302. const allCodecs = videoCodec.split(',');
  1303. try {
  1304. track.videoCodec = ManifestParserUtils.guessCodecs(
  1305. ContentType.VIDEO, allCodecs);
  1306. track.audioCodec = ManifestParserUtils.guessCodecs(
  1307. ContentType.AUDIO, allCodecs);
  1308. } catch (e) {
  1309. // Ignore this error.
  1310. }
  1311. }
  1312. }
  1313. if (audio) {
  1314. track.audioId = audio.id;
  1315. track.originalAudioId = audio.originalId;
  1316. track.channelsCount = audio.channelsCount;
  1317. track.audioSamplingRate = audio.audioSamplingRate;
  1318. track.audioBandwidth = audio.bandwidth || null;
  1319. track.spatialAudio = audio.spatialAudio;
  1320. track.label = audio.label;
  1321. track.audioRoles = audio.roles;
  1322. track.accessibilityPurpose = audio.accessibilityPurpose;
  1323. track.originalLanguage = audio.originalLanguage;
  1324. const dependencyStream = audio.dependencyStream;
  1325. if (dependencyStream) {
  1326. track.audioCodec = dependencyStream.codecs || track.audioCodec;
  1327. if (track.audioBandwidth && dependencyStream.bandwidth) {
  1328. track.audioBandwidth += dependencyStream.bandwidth;
  1329. }
  1330. }
  1331. }
  1332. if (video && !track.videoBandwidth) {
  1333. if (audio) {
  1334. if (track.audioBandwidth) {
  1335. track.videoBandwidth = track.bandwidth - track.audioBandwidth;
  1336. }
  1337. } else {
  1338. track.videoBandwidth = track.bandwidth;
  1339. }
  1340. }
  1341. if (audio && !track.audioBandwidth) {
  1342. if (video) {
  1343. if (track.videoBandwidth) {
  1344. track.audioBandwidth = track.bandwidth - track.videoBandwidth;
  1345. }
  1346. } else {
  1347. track.audioBandwidth = track.bandwidth;
  1348. }
  1349. }
  1350. /** @type {!Array<string>} */
  1351. const codecs = [];
  1352. if (track.videoCodec) {
  1353. codecs.push(track.videoCodec);
  1354. }
  1355. if (track.audioCodec) {
  1356. codecs.push(track.audioCodec);
  1357. }
  1358. track.codecs = codecs.join(', ');
  1359. return track;
  1360. }
  1361. /**
  1362. * @param {shaka.extern.Stream} stream
  1363. * @return {shaka.extern.TextTrack}
  1364. */
  1365. static textStreamToTrack(stream) {
  1366. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1367. /** @type {shaka.extern.TextTrack} */
  1368. const track = {
  1369. id: stream.id,
  1370. active: false,
  1371. type: ContentType.TEXT,
  1372. bandwidth: stream.bandwidth || 0,
  1373. language: stream.language,
  1374. label: stream.label,
  1375. kind: stream.kind || null,
  1376. mimeType: stream.mimeType,
  1377. codecs: stream.codecs || null,
  1378. primary: stream.primary,
  1379. roles: stream.roles,
  1380. accessibilityPurpose: stream.accessibilityPurpose,
  1381. forced: stream.forced,
  1382. originalTextId: stream.originalId,
  1383. originalLanguage: stream.originalLanguage,
  1384. };
  1385. return track;
  1386. }
  1387. /**
  1388. * @param {shaka.extern.Stream} stream
  1389. * @return {shaka.extern.ImageTrack}
  1390. */
  1391. static imageStreamToTrack(stream) {
  1392. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1393. let width = stream.width || null;
  1394. let height = stream.height || null;
  1395. // The stream width and height represent the size of the entire thumbnail
  1396. // sheet, so divide by the layout.
  1397. let reference = null;
  1398. // Note: segmentIndex is built by default for HLS, but not for DASH, but
  1399. // in DASH this information comes at the stream level and not at the
  1400. // segment level.
  1401. if (stream.segmentIndex) {
  1402. reference = stream.segmentIndex.earliestReference();
  1403. }
  1404. let layout = stream.tilesLayout;
  1405. if (reference) {
  1406. layout = reference.getTilesLayout() || layout;
  1407. }
  1408. if (layout && width != null) {
  1409. width /= Number(layout.split('x')[0]);
  1410. }
  1411. if (layout && height != null) {
  1412. height /= Number(layout.split('x')[1]);
  1413. }
  1414. // TODO: What happens if there are multiple grids, with different
  1415. // layout sizes, inside this image stream?
  1416. /** @type {shaka.extern.ImageTrack} */
  1417. const track = {
  1418. id: stream.id,
  1419. type: ContentType.IMAGE,
  1420. bandwidth: stream.bandwidth || 0,
  1421. width,
  1422. height,
  1423. mimeType: stream.mimeType,
  1424. codecs: stream.codecs || null,
  1425. tilesLayout: layout || null,
  1426. originalImageId: stream.originalId,
  1427. };
  1428. return track;
  1429. }
  1430. /**
  1431. * Generate and return an ID for this track, since the ID field is optional.
  1432. *
  1433. * @param {TextTrack|AudioTrack|VideoTrack} html5Track
  1434. * @return {number} The generated ID.
  1435. */
  1436. static html5TrackId(html5Track) {
  1437. if (!html5Track['__shaka_id']) {
  1438. html5Track['__shaka_id'] = shaka.util.StreamUtils.nextTrackId_++;
  1439. }
  1440. return html5Track['__shaka_id'];
  1441. }
  1442. /**
  1443. * @param {TextTrack} textTrack
  1444. * @return {shaka.extern.TextTrack}
  1445. */
  1446. static html5TextTrackToTrack(textTrack) {
  1447. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1448. /** @type {shaka.extern.TextTrack} */
  1449. const track = {
  1450. id: shaka.util.StreamUtils.html5TrackId(textTrack),
  1451. active: textTrack.mode != 'disabled',
  1452. type: ContentType.TEXT,
  1453. bandwidth: 0,
  1454. language: shaka.util.LanguageUtils.normalize(textTrack.language || 'und'),
  1455. label: textTrack.label,
  1456. kind: textTrack.kind,
  1457. mimeType: null,
  1458. codecs: null,
  1459. primary: false,
  1460. roles: [],
  1461. accessibilityPurpose: null,
  1462. forced: textTrack.kind == 'forced',
  1463. originalTextId: textTrack.id,
  1464. originalLanguage: textTrack.language,
  1465. };
  1466. if (textTrack.kind == 'captions') {
  1467. // See: https://github.com/shaka-project/shaka-player/issues/6233
  1468. track.mimeType = 'unknown';
  1469. }
  1470. if (textTrack.kind == 'subtitles') {
  1471. track.mimeType = 'text/vtt';
  1472. }
  1473. if (textTrack.kind) {
  1474. track.roles = [textTrack.kind];
  1475. }
  1476. return track;
  1477. }
  1478. /**
  1479. * @param {AudioTrack} audioTrack
  1480. * @return {shaka.extern.AudioTrack}
  1481. */
  1482. static html5AudioTrackToTrack(audioTrack) {
  1483. const language = audioTrack.language;
  1484. /** @type {shaka.extern.AudioTrack} */
  1485. const track = {
  1486. active: audioTrack.enabled,
  1487. language: shaka.util.LanguageUtils.normalize(language || 'und'),
  1488. label: audioTrack.label,
  1489. mimeType: null,
  1490. codecs: null,
  1491. primary: audioTrack.kind == 'main',
  1492. roles: [],
  1493. accessibilityPurpose: null,
  1494. channelsCount: null,
  1495. audioSamplingRate: null,
  1496. spatialAudio: false,
  1497. originalLanguage: language,
  1498. };
  1499. if (audioTrack.kind) {
  1500. track.roles.push(audioTrack.kind);
  1501. }
  1502. if (audioTrack.configuration) {
  1503. if (audioTrack.configuration.codec) {
  1504. track.codecs = audioTrack.configuration.codec;
  1505. }
  1506. if (audioTrack.configuration.sampleRate) {
  1507. track.audioSamplingRate = audioTrack.configuration.sampleRate;
  1508. }
  1509. if (audioTrack.configuration.numberOfChannels) {
  1510. track.channelsCount = audioTrack.configuration.numberOfChannels;
  1511. }
  1512. }
  1513. return track;
  1514. }
  1515. /**
  1516. * @param {?AudioTrack} audioTrack
  1517. * @param {?VideoTrack} videoTrack
  1518. * @return {shaka.extern.Track}
  1519. */
  1520. static html5TrackToShakaTrack(audioTrack, videoTrack) {
  1521. goog.asserts.assert(audioTrack || videoTrack,
  1522. 'There must be at least audioTrack or videoTrack.');
  1523. const LanguageUtils = shaka.util.LanguageUtils;
  1524. const language = audioTrack ? audioTrack.language : null;
  1525. /** @type {shaka.extern.Track} */
  1526. const track = {
  1527. id: shaka.util.StreamUtils.html5TrackId(audioTrack || videoTrack),
  1528. active: audioTrack ? audioTrack.enabled : videoTrack.selected,
  1529. type: 'variant',
  1530. bandwidth: 0,
  1531. language: LanguageUtils.normalize(language || 'und'),
  1532. label: audioTrack ? audioTrack.label : null,
  1533. videoLabel: null,
  1534. kind: audioTrack ? audioTrack.kind : null,
  1535. width: null,
  1536. height: null,
  1537. frameRate: null,
  1538. pixelAspectRatio: null,
  1539. hdr: null,
  1540. colorGamut: null,
  1541. videoLayout: null,
  1542. mimeType: null,
  1543. audioMimeType: null,
  1544. videoMimeType: null,
  1545. codecs: null,
  1546. audioCodec: null,
  1547. videoCodec: null,
  1548. primary: audioTrack ? audioTrack.kind == 'main' : false,
  1549. roles: [],
  1550. forced: false,
  1551. audioRoles: null,
  1552. videoRoles: null,
  1553. videoId: null,
  1554. audioId: null,
  1555. audioGroupId: null,
  1556. channelsCount: null,
  1557. audioSamplingRate: null,
  1558. spatialAudio: false,
  1559. tilesLayout: null,
  1560. audioBandwidth: null,
  1561. videoBandwidth: null,
  1562. originalVideoId: videoTrack ? videoTrack.id : null,
  1563. originalAudioId: audioTrack ? audioTrack.id : null,
  1564. originalTextId: null,
  1565. originalImageId: null,
  1566. accessibilityPurpose: null,
  1567. originalLanguage: language,
  1568. };
  1569. if (audioTrack && audioTrack.kind) {
  1570. track.roles = [audioTrack.kind];
  1571. track.audioRoles = [audioTrack.kind];
  1572. }
  1573. if (audioTrack && audioTrack.configuration) {
  1574. if (audioTrack.configuration.codec) {
  1575. track.audioCodec = audioTrack.configuration.codec;
  1576. track.codecs = track.audioCodec;
  1577. }
  1578. if (audioTrack.configuration.bitrate) {
  1579. track.audioBandwidth = audioTrack.configuration.bitrate;
  1580. track.bandwidth += track.audioBandwidth;
  1581. }
  1582. if (audioTrack.configuration.sampleRate) {
  1583. track.audioSamplingRate = audioTrack.configuration.sampleRate;
  1584. }
  1585. if (audioTrack.configuration.numberOfChannels) {
  1586. track.channelsCount = audioTrack.configuration.numberOfChannels;
  1587. }
  1588. }
  1589. if (videoTrack && videoTrack.configuration) {
  1590. if (videoTrack.configuration.codec) {
  1591. track.videoCodec = videoTrack.configuration.codec;
  1592. if (track.codecs) {
  1593. track.codecs += ',' + track.videoCodec;
  1594. } else {
  1595. track.codecs = track.videoCodec;
  1596. }
  1597. }
  1598. if (videoTrack.configuration.bitrate) {
  1599. track.videoBandwidth = videoTrack.configuration.bitrate;
  1600. track.bandwidth += track.videoBandwidth;
  1601. }
  1602. if (videoTrack.configuration.framerate) {
  1603. track.frameRate = videoTrack.configuration.framerate;
  1604. }
  1605. if (videoTrack.configuration.width) {
  1606. track.width = videoTrack.configuration.width;
  1607. }
  1608. if (videoTrack.configuration.height) {
  1609. track.height = videoTrack.configuration.height;
  1610. }
  1611. if (videoTrack.configuration.colorSpace &&
  1612. videoTrack.configuration.colorSpace.transfer) {
  1613. switch (videoTrack.configuration.colorSpace.transfer) {
  1614. case 'pq':
  1615. track.hdr = 'PQ';
  1616. break;
  1617. case 'hlg':
  1618. track.hdr = 'HLG';
  1619. break;
  1620. case 'bt709':
  1621. track.hdr = 'SDR';
  1622. break;
  1623. }
  1624. }
  1625. }
  1626. return track;
  1627. }
  1628. /**
  1629. * Determines if the given variant is playable.
  1630. * @param {!shaka.extern.Variant} variant
  1631. * @return {boolean}
  1632. */
  1633. static isPlayable(variant) {
  1634. return variant.allowedByApplication &&
  1635. variant.allowedByKeySystem &&
  1636. variant.disabledUntilTime == 0;
  1637. }
  1638. /**
  1639. * Filters out unplayable variants.
  1640. * @param {!Array<!shaka.extern.Variant>} variants
  1641. * @return {!Array<!shaka.extern.Variant>}
  1642. */
  1643. static getPlayableVariants(variants) {
  1644. return variants.filter((variant) => {
  1645. return shaka.util.StreamUtils.isPlayable(variant);
  1646. });
  1647. }
  1648. /**
  1649. * Chooses streams according to the given config.
  1650. * Works both for Stream and Track types due to their similarities.
  1651. *
  1652. * @param {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>} streams
  1653. * @param {string} preferredLanguage
  1654. * @param {string} preferredRole
  1655. * @param {boolean} preferredForced
  1656. * @return {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>}
  1657. */
  1658. static filterStreamsByLanguageAndRole(
  1659. streams, preferredLanguage, preferredRole, preferredForced) {
  1660. const LanguageUtils = shaka.util.LanguageUtils;
  1661. /** @type {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>} */
  1662. let chosen = streams;
  1663. // Start with the set of primary streams.
  1664. /** @type {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>} */
  1665. const primary = streams.filter((stream) => {
  1666. return stream.primary;
  1667. });
  1668. if (primary.length) {
  1669. chosen = primary;
  1670. }
  1671. // Now reduce the set to one language. This covers both arbitrary language
  1672. // choice and the reduction of the "primary" stream set to one language.
  1673. const firstLanguage = chosen.length ? chosen[0].language : '';
  1674. chosen = chosen.filter((stream) => {
  1675. return stream.language == firstLanguage;
  1676. });
  1677. // Find the streams that best match our language preference. This will
  1678. // override previous selections.
  1679. if (preferredLanguage) {
  1680. const closestLocale = LanguageUtils.findClosestLocale(
  1681. LanguageUtils.normalize(preferredLanguage),
  1682. streams.map((stream) => stream.language));
  1683. // Only replace |chosen| if we found a locale that is close to our
  1684. // preference.
  1685. if (closestLocale) {
  1686. chosen = streams.filter((stream) => {
  1687. const locale = LanguageUtils.normalize(stream.language);
  1688. return locale == closestLocale;
  1689. });
  1690. }
  1691. }
  1692. // Filter by forced preference
  1693. chosen = chosen.filter((stream) => {
  1694. return stream.forced == preferredForced;
  1695. });
  1696. // Now refine the choice based on role preference.
  1697. if (preferredRole) {
  1698. const roleMatches = shaka.util.StreamUtils.filterStreamsByRole_(
  1699. chosen, preferredRole);
  1700. if (roleMatches.length) {
  1701. return roleMatches;
  1702. } else {
  1703. shaka.log.warning('No exact match for the text role could be found.');
  1704. }
  1705. } else {
  1706. // Prefer text streams with no roles, if they exist.
  1707. const noRoleMatches = chosen.filter((stream) => {
  1708. return stream.roles.length == 0;
  1709. });
  1710. if (noRoleMatches.length) {
  1711. return noRoleMatches;
  1712. }
  1713. }
  1714. // Either there was no role preference, or it could not be satisfied.
  1715. // Choose an arbitrary role, if there are any, and filter out any other
  1716. // roles. This ensures we never adapt between roles.
  1717. const allRoles = chosen.map((stream) => {
  1718. return stream.roles;
  1719. }).reduce(shaka.util.Functional.collapseArrays, []);
  1720. if (!allRoles.length) {
  1721. return chosen;
  1722. }
  1723. return shaka.util.StreamUtils.filterStreamsByRole_(chosen, allRoles[0]);
  1724. }
  1725. /**
  1726. * Filter Streams by role.
  1727. * Works both for Stream and Track types due to their similarities.
  1728. *
  1729. * @param {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>} streams
  1730. * @param {string} preferredRole
  1731. * @return {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>}
  1732. * @private
  1733. */
  1734. static filterStreamsByRole_(streams, preferredRole) {
  1735. return streams.filter((stream) => {
  1736. return stream.roles.includes(preferredRole);
  1737. });
  1738. }
  1739. /**
  1740. * Checks if the given stream is an audio stream.
  1741. *
  1742. * @param {shaka.extern.Stream} stream
  1743. * @return {boolean}
  1744. */
  1745. static isAudio(stream) {
  1746. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1747. return stream.type == ContentType.AUDIO;
  1748. }
  1749. /**
  1750. * Checks if the given stream is a video stream.
  1751. *
  1752. * @param {shaka.extern.Stream} stream
  1753. * @return {boolean}
  1754. */
  1755. static isVideo(stream) {
  1756. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1757. return stream.type == ContentType.VIDEO;
  1758. }
  1759. /**
  1760. * Get all non-null streams in the variant as an array.
  1761. *
  1762. * @param {shaka.extern.Variant} variant
  1763. * @return {!Array<shaka.extern.Stream>}
  1764. */
  1765. static getVariantStreams(variant) {
  1766. const streams = [];
  1767. if (variant.audio) {
  1768. streams.push(variant.audio);
  1769. }
  1770. if (variant.video) {
  1771. streams.push(variant.video);
  1772. }
  1773. return streams;
  1774. }
  1775. /**
  1776. * Indicates if some of the variant's streams are fastSwitching.
  1777. *
  1778. * @param {shaka.extern.Variant} variant
  1779. * @return {boolean}
  1780. */
  1781. static isFastSwitching(variant) {
  1782. if (variant.audio && variant.audio.fastSwitching) {
  1783. return true;
  1784. }
  1785. if (variant.video && variant.video.fastSwitching) {
  1786. return true;
  1787. }
  1788. return false;
  1789. }
  1790. /**
  1791. * Set the best iframe stream to the original stream.
  1792. *
  1793. * @param {!shaka.extern.Stream} stream
  1794. * @param {!Array<!shaka.extern.Stream>} iFrameStreams
  1795. */
  1796. static setBetterIFrameStream(stream, iFrameStreams) {
  1797. if (!iFrameStreams.length) {
  1798. return;
  1799. }
  1800. const validStreams = iFrameStreams.filter((iFrameStream) =>
  1801. shaka.util.MimeUtils.getNormalizedCodec(stream.codecs) ==
  1802. shaka.util.MimeUtils.getNormalizedCodec(iFrameStream.codecs))
  1803. .sort((a, b) => {
  1804. if (!a.bandwidth || !b.bandwidth || a.bandwidth == b.bandwidth) {
  1805. return (a.width || 0) - (b.width || 0);
  1806. }
  1807. return a.bandwidth - b.bandwidth;
  1808. });
  1809. stream.trickModeVideo = validStreams[0];
  1810. if (validStreams.length > 1) {
  1811. const sameResolutionStream = validStreams.find((iFrameStream) =>
  1812. stream.width == iFrameStream.width &&
  1813. stream.height == iFrameStream.height);
  1814. if (sameResolutionStream) {
  1815. stream.trickModeVideo = sameResolutionStream;
  1816. }
  1817. }
  1818. }
  1819. /**
  1820. * Returns a string of a variant, with the attribute values of its audio
  1821. * and/or video streams for log printing.
  1822. * @param {shaka.extern.Variant} variant
  1823. * @return {string}
  1824. * @private
  1825. */
  1826. static getVariantSummaryString_(variant) {
  1827. const summaries = [];
  1828. if (variant.audio) {
  1829. summaries.push(shaka.util.StreamUtils.getStreamSummaryString_(
  1830. variant.audio));
  1831. }
  1832. if (variant.video) {
  1833. summaries.push(shaka.util.StreamUtils.getStreamSummaryString_(
  1834. variant.video));
  1835. }
  1836. return summaries.join(', ');
  1837. }
  1838. /**
  1839. * Returns a string of an audio or video stream for log printing.
  1840. * @param {shaka.extern.Stream} stream
  1841. * @return {string}
  1842. * @private
  1843. */
  1844. static getStreamSummaryString_(stream) {
  1845. // Accepted parameters for Chromecast can be found (internally) at
  1846. // go/cast-mime-params
  1847. if (shaka.util.StreamUtils.isAudio(stream)) {
  1848. return 'type=audio' +
  1849. ' codecs=' + stream.codecs +
  1850. ' bandwidth='+ stream.bandwidth +
  1851. ' channelsCount=' + stream.channelsCount +
  1852. ' audioSamplingRate=' + stream.audioSamplingRate;
  1853. }
  1854. if (shaka.util.StreamUtils.isVideo(stream)) {
  1855. return 'type=video' +
  1856. ' codecs=' + stream.codecs +
  1857. ' bandwidth=' + stream.bandwidth +
  1858. ' frameRate=' + stream.frameRate +
  1859. ' width=' + stream.width +
  1860. ' height=' + stream.height;
  1861. }
  1862. return 'unexpected stream type';
  1863. }
  1864. /**
  1865. * Clears underlying decoding config cache.
  1866. */
  1867. static clearDecodingConfigCache() {
  1868. shaka.util.StreamUtils.decodingConfigCache_.clear();
  1869. }
  1870. /**
  1871. * Check if we should show text on screen automatically.
  1872. *
  1873. * @param {?shaka.extern.Stream} audioStream
  1874. * @param {shaka.extern.Stream} textStream
  1875. * @param {!shaka.extern.PlayerConfiguration} config
  1876. * @return {boolean}
  1877. */
  1878. static shouldInitiallyShowText(audioStream, textStream, config) {
  1879. const AutoShowText = shaka.config.AutoShowText;
  1880. if (config.autoShowText == AutoShowText.NEVER) {
  1881. return false;
  1882. }
  1883. if (config.autoShowText == AutoShowText.ALWAYS) {
  1884. return true;
  1885. }
  1886. const LanguageUtils = shaka.util.LanguageUtils;
  1887. /** @type {string} */
  1888. const preferredTextLocale =
  1889. LanguageUtils.normalize(config.preferredTextLanguage);
  1890. /** @type {string} */
  1891. const textLocale = LanguageUtils.normalize(textStream.language);
  1892. if (config.autoShowText == AutoShowText.IF_PREFERRED_TEXT_LANGUAGE) {
  1893. // Only the text language match matters.
  1894. return LanguageUtils.areLanguageCompatible(
  1895. textLocale,
  1896. preferredTextLocale);
  1897. }
  1898. if (config.autoShowText == AutoShowText.IF_SUBTITLES_MAY_BE_NEEDED) {
  1899. if (!audioStream) {
  1900. return false;
  1901. }
  1902. /* The text should automatically be shown if the text is
  1903. * language-compatible with the user's text language preference, but not
  1904. * compatible with the audio. These are cases where we deduce that
  1905. * subtitles may be needed.
  1906. *
  1907. * For example:
  1908. * preferred | chosen | chosen |
  1909. * text | text | audio | show
  1910. * -----------------------------------
  1911. * en-CA | en | jp | true
  1912. * en | en-US | fr | true
  1913. * fr-CA | en-US | jp | false
  1914. * en-CA | en-US | en-US | false
  1915. *
  1916. */
  1917. /** @type {string} */
  1918. const audioLocale = LanguageUtils.normalize(audioStream.language);
  1919. return (
  1920. LanguageUtils.areLanguageCompatible(textLocale, preferredTextLocale) &&
  1921. !LanguageUtils.areLanguageCompatible(audioLocale, textLocale));
  1922. }
  1923. shaka.log.alwaysWarn('Invalid autoShowText setting!');
  1924. return false;
  1925. }
  1926. /**
  1927. * @param {!Array<string>} mimeTypes
  1928. * @return {!shaka.extern.Variant}
  1929. */
  1930. static createEmptyVariant(mimeTypes) {
  1931. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1932. /** @type {shaka.extern.Variant} */
  1933. const variant = {
  1934. id: 0,
  1935. language: 'und',
  1936. disabledUntilTime: 0,
  1937. primary: false,
  1938. audio: null,
  1939. video: null,
  1940. bandwidth: 100,
  1941. allowedByApplication: true,
  1942. allowedByKeySystem: true,
  1943. decodingInfos: [],
  1944. };
  1945. for (const mimeType of mimeTypes) {
  1946. const stream = {
  1947. id: 0,
  1948. originalId: null,
  1949. groupId: null,
  1950. createSegmentIndex: () => Promise.resolve(),
  1951. segmentIndex: null,
  1952. mimeType: mimeType ? shaka.util.MimeUtils.getBasicType(mimeType) : '',
  1953. codecs: mimeType ? shaka.util.MimeUtils.getCodecs(mimeType) : '',
  1954. encrypted: true,
  1955. drmInfos: [],
  1956. keyIds: new Set(),
  1957. language: 'und',
  1958. originalLanguage: null,
  1959. label: null,
  1960. type: ContentType.VIDEO,
  1961. primary: false,
  1962. trickModeVideo: null,
  1963. dependencyStream: null,
  1964. emsgSchemeIdUris: null,
  1965. roles: [],
  1966. forced: false,
  1967. channelsCount: null,
  1968. audioSamplingRate: null,
  1969. spatialAudio: false,
  1970. closedCaptions: null,
  1971. accessibilityPurpose: null,
  1972. external: false,
  1973. fastSwitching: false,
  1974. fullMimeTypes: new Set(),
  1975. isAudioMuxedInVideo: false,
  1976. baseOriginalId: null,
  1977. };
  1978. stream.fullMimeTypes.add(shaka.util.MimeUtils.getFullType(
  1979. stream.mimeType, stream.codecs));
  1980. if (mimeType.startsWith('audio/')) {
  1981. stream.type = ContentType.AUDIO;
  1982. variant.audio = stream;
  1983. } else {
  1984. variant.video = stream;
  1985. }
  1986. }
  1987. return variant;
  1988. }
  1989. };
  1990. /**
  1991. * A cache of results from mediaCapabilities.decodingInfo, indexed by the
  1992. * (stringified) decodingConfig.
  1993. *
  1994. * @type {Map<string, !MediaCapabilitiesDecodingInfo>}
  1995. * @private
  1996. */
  1997. shaka.util.StreamUtils.decodingConfigCache_ = new Map();
  1998. /** @private {number} */
  1999. shaka.util.StreamUtils.nextTrackId_ = 0;
  2000. /**
  2001. * @enum {string}
  2002. */
  2003. shaka.util.StreamUtils.DecodingAttributes = {
  2004. SMOOTH: 'smooth',
  2005. POWER: 'powerEfficient',
  2006. };
  2007. /**
  2008. * @private {!Map<string, boolean>}
  2009. */
  2010. shaka.util.StreamUtils.supportedImageMimeTypes_ = new Map()
  2011. .set('image/svg+xml', true)
  2012. .set('image/png', true)
  2013. .set('image/jpeg', true)
  2014. .set('image/jpg', true);
  2015. /**
  2016. * @const {string}
  2017. * @private
  2018. */
  2019. // cspell: disable-next-line
  2020. shaka.util.StreamUtils.minWebPImage_ = 'data:image/webp;base64,UklGRjoAAABXRU' +
  2021. 'JQVlA4IC4AAACyAgCdASoCAAIALmk0mk0iIiIiIgBoSygABc6WWgAA/veff/0PP8bA//LwY' +
  2022. 'AAA';
  2023. /**
  2024. * @const {string}
  2025. * @private
  2026. */
  2027. // cspell: disable-next-line
  2028. shaka.util.StreamUtils.minAvifImage_ = 'data:image/avif;base64,AAAAIGZ0eXBhdm' +
  2029. 'lmAAAAAGF2aWZtaWYxbWlhZk1BMUIAAADybWV0YQAAAAAAAAAoaGRscgAAAAAAAAAAcGljd' +
  2030. 'AAAAAAAAAAAAAAAAGxpYmF2aWYAAAAADnBpdG0AAAAAAAEAAAAeaWxvYwAAAABEAAABAAEA' +
  2031. 'AAABAAABGgAAAB0AAAAoaWluZgAAAAAAAQAAABppbmZlAgAAAAABAABhdjAxQ29sb3IAAAA' +
  2032. 'AamlwcnAAAABLaXBjbwAAABRpc3BlAAAAAAAAAAIAAAACAAAAEHBpeGkAAAAAAwgICAAAAA' +
  2033. 'xhdjFDgQ0MAAAAABNjb2xybmNseAACAAIAAYAAAAAXaXBtYQAAAAAAAAABAAEEAQKDBAAAA' +
  2034. 'CVtZGF0EgAKCBgANogQEAwgMg8f8D///8WfhwB8+ErK42A=';
  2035. /**
  2036. * @const {!Map<string, string>}
  2037. * @private
  2038. */
  2039. shaka.util.StreamUtils.minImage_ = new Map()
  2040. .set('image/webp', shaka.util.StreamUtils.minWebPImage_)
  2041. .set('image/avif', shaka.util.StreamUtils.minAvifImage_);