Source: lib/hls/hls_parser.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.hls.HlsParser');
  7. goog.require('goog.Uri');
  8. goog.require('goog.asserts');
  9. goog.require('shaka.abr.Ewma');
  10. goog.require('shaka.hls.ManifestTextParser');
  11. goog.require('shaka.hls.Playlist');
  12. goog.require('shaka.hls.PlaylistType');
  13. goog.require('shaka.hls.Tag');
  14. goog.require('shaka.hls.Utils');
  15. goog.require('shaka.log');
  16. goog.require('shaka.media.InitSegmentReference');
  17. goog.require('shaka.media.ManifestParser');
  18. goog.require('shaka.media.PresentationTimeline');
  19. goog.require('shaka.media.QualityObserver');
  20. goog.require('shaka.media.SegmentIndex');
  21. goog.require('shaka.media.SegmentReference');
  22. goog.require('shaka.net.DataUriPlugin');
  23. goog.require('shaka.net.NetworkingEngine');
  24. goog.require('shaka.net.NetworkingEngine.PendingRequest');
  25. goog.require('shaka.util.ArrayUtils');
  26. goog.require('shaka.util.BufferUtils');
  27. goog.require('shaka.util.DrmUtils');
  28. goog.require('shaka.util.ContentSteeringManager');
  29. goog.require('shaka.util.Error');
  30. goog.require('shaka.util.EventManager');
  31. goog.require('shaka.util.FakeEvent');
  32. goog.require('shaka.util.LanguageUtils');
  33. goog.require('shaka.util.ManifestParserUtils');
  34. goog.require('shaka.util.MimeUtils');
  35. goog.require('shaka.util.Networking');
  36. goog.require('shaka.util.OperationManager');
  37. goog.require('shaka.util.Pssh');
  38. goog.require('shaka.media.SegmentUtils');
  39. goog.require('shaka.util.Timer');
  40. goog.require('shaka.util.TsParser');
  41. goog.require('shaka.util.TXml');
  42. goog.require('shaka.util.Platform');
  43. goog.require('shaka.util.StreamUtils');
  44. goog.require('shaka.util.Uint8ArrayUtils');
  45. goog.requireType('shaka.hls.Segment');
  46. /**
  47. * HLS parser.
  48. *
  49. * @implements {shaka.extern.ManifestParser}
  50. * @export
  51. */
  52. shaka.hls.HlsParser = class {
  53. /**
  54. * Creates an Hls Parser object.
  55. */
  56. constructor() {
  57. /** @private {?shaka.extern.ManifestParser.PlayerInterface} */
  58. this.playerInterface_ = null;
  59. /** @private {?shaka.extern.ManifestConfiguration} */
  60. this.config_ = null;
  61. /** @private {number} */
  62. this.globalId_ = 1;
  63. /** @private {!Map.<string, string>} */
  64. this.globalVariables_ = new Map();
  65. /**
  66. * A map from group id to stream infos created from the media tags.
  67. * @private {!Map.<string, !Array.<?shaka.hls.HlsParser.StreamInfo>>}
  68. */
  69. this.groupIdToStreamInfosMap_ = new Map();
  70. /**
  71. * For media playlist lazy-loading to work in livestreams, we have to assume
  72. * that each stream of a type (video, audio, etc) has the same mappings of
  73. * sequence number to start time.
  74. * This map stores those relationships.
  75. * Only used during livestreams; we do not assume that VOD content is
  76. * aligned in that way.
  77. * @private {!Map.<string, !Map.<number, number>>}
  78. */
  79. this.mediaSequenceToStartTimeByType_ = new Map();
  80. // Set initial maps.
  81. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  82. this.mediaSequenceToStartTimeByType_.set(ContentType.VIDEO, new Map());
  83. this.mediaSequenceToStartTimeByType_.set(ContentType.AUDIO, new Map());
  84. this.mediaSequenceToStartTimeByType_.set(ContentType.TEXT, new Map());
  85. this.mediaSequenceToStartTimeByType_.set(ContentType.IMAGE, new Map());
  86. /**
  87. * The values are strings of the form "<VIDEO URI> - <AUDIO URI>",
  88. * where the URIs are the verbatim media playlist URIs as they appeared in
  89. * the master playlist.
  90. *
  91. * Used to avoid duplicates that vary only in their text stream.
  92. *
  93. * @private {!Set.<string>}
  94. */
  95. this.variantUriSet_ = new Set();
  96. /**
  97. * A map from (verbatim) media playlist URI to stream infos representing the
  98. * playlists.
  99. *
  100. * On update, used to iterate through and update from media playlists.
  101. *
  102. * On initial parse, used to iterate through and determine minimum
  103. * timestamps, offsets, and to handle TS rollover.
  104. *
  105. * During parsing, used to avoid duplicates in the async methods
  106. * createStreamInfoFromMediaTags_, createStreamInfoFromImageTag_ and
  107. * createStreamInfoFromVariantTags_.
  108. *
  109. * @private {!Map.<string, shaka.hls.HlsParser.StreamInfo>}
  110. */
  111. this.uriToStreamInfosMap_ = new Map();
  112. /** @private {?shaka.media.PresentationTimeline} */
  113. this.presentationTimeline_ = null;
  114. /**
  115. * The master playlist URI, after redirects.
  116. *
  117. * @private {string}
  118. */
  119. this.masterPlaylistUri_ = '';
  120. /** @private {shaka.hls.ManifestTextParser} */
  121. this.manifestTextParser_ = new shaka.hls.ManifestTextParser();
  122. /**
  123. * The minimum sequence number for generated segments, when ignoring
  124. * EXT-X-PROGRAM-DATE-TIME.
  125. *
  126. * @private {number}
  127. */
  128. this.minSequenceNumber_ = -1;
  129. /**
  130. * The lowest time value for any of the streams, as defined by the
  131. * EXT-X-PROGRAM-DATE-TIME value. Measured in seconds since January 1, 1970.
  132. *
  133. * @private {number}
  134. */
  135. this.lowestSyncTime_ = Infinity;
  136. /**
  137. * Flag to indicate if any of the media playlists use
  138. * EXT-X-PROGRAM-DATE-TIME.
  139. *
  140. * @private {boolean}
  141. */
  142. this.usesProgramDateTime_ = false;
  143. /**
  144. * Whether the streams have previously been "finalized"; that is to say,
  145. * whether we have loaded enough streams to know information about the asset
  146. * such as timing information, live status, etc.
  147. *
  148. * @private {boolean}
  149. */
  150. this.streamsFinalized_ = false;
  151. /**
  152. * Whether the manifest informs about the codec to use.
  153. *
  154. * @private
  155. */
  156. this.codecInfoInManifest_ = false;
  157. /**
  158. * This timer is used to trigger the start of a manifest update. A manifest
  159. * update is async. Once the update is finished, the timer will be restarted
  160. * to trigger the next update. The timer will only be started if the content
  161. * is live content.
  162. *
  163. * @private {shaka.util.Timer}
  164. */
  165. this.updatePlaylistTimer_ = new shaka.util.Timer(() => {
  166. if (this.mediaElement_ && !this.config_.continueLoadingWhenPaused) {
  167. this.eventManager_.unlisten(this.mediaElement_, 'timeupdate');
  168. if (this.mediaElement_.paused) {
  169. this.eventManager_.listenOnce(
  170. this.mediaElement_, 'timeupdate', () => this.onUpdate_());
  171. return;
  172. }
  173. }
  174. this.onUpdate_();
  175. });
  176. /** @private {shaka.hls.HlsParser.PresentationType_} */
  177. this.presentationType_ = shaka.hls.HlsParser.PresentationType_.VOD;
  178. /** @private {?shaka.extern.Manifest} */
  179. this.manifest_ = null;
  180. /** @private {number} */
  181. this.maxTargetDuration_ = 0;
  182. /** @private {number} */
  183. this.lastTargetDuration_ = Infinity;
  184. /** Partial segments target duration.
  185. * @private {number}
  186. */
  187. this.partialTargetDuration_ = 0;
  188. /** @private {number} */
  189. this.presentationDelay_ = 0;
  190. /** @private {number} */
  191. this.lowLatencyPresentationDelay_ = 0;
  192. /** @private {shaka.util.OperationManager} */
  193. this.operationManager_ = new shaka.util.OperationManager();
  194. /** A map from closed captions' group id, to a map of closed captions info.
  195. * {group id -> {closed captions channel id -> language}}
  196. * @private {Map.<string, Map.<string, string>>}
  197. */
  198. this.groupIdToClosedCaptionsMap_ = new Map();
  199. /** @private {Map.<string, string>} */
  200. this.groupIdToCodecsMap_ = new Map();
  201. /** A cache mapping EXT-X-MAP tag info to the InitSegmentReference created
  202. * from the tag.
  203. * The key is a string combining the EXT-X-MAP tag's absolute uri, and
  204. * its BYTERANGE if available.
  205. * {!Map.<string, !shaka.media.InitSegmentReference>} */
  206. this.mapTagToInitSegmentRefMap_ = new Map();
  207. /** @private {Map.<string, !shaka.extern.aesKey>} */
  208. this.aesKeyInfoMap_ = new Map();
  209. /** @private {Map.<string, !Promise.<shaka.extern.Response>>} */
  210. this.aesKeyMap_ = new Map();
  211. /** @private {Map.<string, !Promise.<shaka.extern.Response>>} */
  212. this.identityKeyMap_ = new Map();
  213. /** @private {Map.<!shaka.media.InitSegmentReference, ?string>} */
  214. this.identityKidMap_ = new Map();
  215. /** @private {boolean} */
  216. this.lowLatencyMode_ = false;
  217. /** @private {boolean} */
  218. this.lowLatencyByterangeOptimization_ = false;
  219. /**
  220. * An ewma that tracks how long updates take.
  221. * This is to mitigate issues caused by slow parsing on embedded devices.
  222. * @private {!shaka.abr.Ewma}
  223. */
  224. this.averageUpdateDuration_ = new shaka.abr.Ewma(5);
  225. /** @private {?shaka.util.ContentSteeringManager} */
  226. this.contentSteeringManager_ = null;
  227. /** @private {boolean} */
  228. this.needsClosedCaptionsDetection_ = true;
  229. /** @private {Set.<string>} */
  230. this.dateRangeIdsEmitted_ = new Set();
  231. /** @private {shaka.util.EventManager} */
  232. this.eventManager_ = new shaka.util.EventManager();
  233. /** @private {HTMLMediaElement} */
  234. this.mediaElement_ = null;
  235. /** @private {?number} */
  236. this.startTime_ = null;
  237. /** @private {function():boolean} */
  238. this.isPreloadFn_ = () => false;
  239. }
  240. /**
  241. * @param {shaka.extern.ManifestConfiguration} config
  242. * @param {(function():boolean)=} isPreloadFn
  243. * @override
  244. * @exportInterface
  245. */
  246. configure(config, isPreloadFn) {
  247. this.config_ = config;
  248. if (isPreloadFn) {
  249. this.isPreloadFn_ = isPreloadFn;
  250. }
  251. if (this.contentSteeringManager_) {
  252. this.contentSteeringManager_.configure(this.config_);
  253. }
  254. }
  255. /**
  256. * @override
  257. * @exportInterface
  258. */
  259. async start(uri, playerInterface) {
  260. goog.asserts.assert(this.config_, 'Must call configure() before start()!');
  261. this.playerInterface_ = playerInterface;
  262. this.lowLatencyMode_ = playerInterface.isLowLatencyMode();
  263. const response = await this.requestManifest_([uri]).promise;
  264. // Record the master playlist URI after redirects.
  265. this.masterPlaylistUri_ = response.uri;
  266. goog.asserts.assert(response.data, 'Response data should be non-null!');
  267. await this.parseManifest_(response.data, uri);
  268. goog.asserts.assert(this.manifest_, 'Manifest should be non-null');
  269. return this.manifest_;
  270. }
  271. /**
  272. * @override
  273. * @exportInterface
  274. */
  275. stop() {
  276. // Make sure we don't update the manifest again. Even if the timer is not
  277. // running, this is safe to call.
  278. if (this.updatePlaylistTimer_) {
  279. this.updatePlaylistTimer_.stop();
  280. this.updatePlaylistTimer_ = null;
  281. }
  282. /** @type {!Array.<!Promise>} */
  283. const pending = [];
  284. if (this.operationManager_) {
  285. pending.push(this.operationManager_.destroy());
  286. this.operationManager_ = null;
  287. }
  288. this.playerInterface_ = null;
  289. this.config_ = null;
  290. this.variantUriSet_.clear();
  291. this.manifest_ = null;
  292. this.uriToStreamInfosMap_.clear();
  293. this.groupIdToStreamInfosMap_.clear();
  294. this.groupIdToCodecsMap_.clear();
  295. this.globalVariables_.clear();
  296. this.mapTagToInitSegmentRefMap_.clear();
  297. this.aesKeyInfoMap_.clear();
  298. this.aesKeyMap_.clear();
  299. this.identityKeyMap_.clear();
  300. this.identityKidMap_.clear();
  301. this.dateRangeIdsEmitted_.clear();
  302. if (this.contentSteeringManager_) {
  303. this.contentSteeringManager_.destroy();
  304. }
  305. if (this.eventManager_) {
  306. this.eventManager_.release();
  307. this.eventManager_ = null;
  308. }
  309. return Promise.all(pending);
  310. }
  311. /**
  312. * @override
  313. * @exportInterface
  314. */
  315. async update() {
  316. if (!this.isLive_()) {
  317. return;
  318. }
  319. /** @type {!Array.<!Promise>} */
  320. const updates = [];
  321. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  322. // This is necessary to calculate correctly the update time.
  323. this.lastTargetDuration_ = Infinity;
  324. this.manifest_.gapCount = 0;
  325. // Only update active streams.
  326. const activeStreamInfos = streamInfos.filter((s) => s.stream.segmentIndex);
  327. for (const streamInfo of activeStreamInfos) {
  328. updates.push(this.updateStream_(streamInfo));
  329. }
  330. await Promise.all(updates);
  331. // Now that streams have been updated, notify the presentation timeline.
  332. this.notifySegmentsForStreams_(activeStreamInfos.map((s) => s.stream));
  333. // If any hasEndList is false, the stream is still live.
  334. const stillLive = activeStreamInfos.some((s) => s.hasEndList == false);
  335. if (activeStreamInfos.length && !stillLive) {
  336. // Convert the presentation to VOD and set the duration.
  337. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  338. this.setPresentationType_(PresentationType.VOD);
  339. // The duration is the minimum of the end times of all active streams.
  340. // Non-active streams are not guaranteed to have useful maxTimestamp
  341. // values, due to the lazy-loading system, so they are ignored.
  342. const maxTimestamps = activeStreamInfos.map((s) => s.maxTimestamp);
  343. // The duration is the minimum of the end times of all streams.
  344. this.presentationTimeline_.setDuration(Math.min(...maxTimestamps));
  345. this.playerInterface_.updateDuration();
  346. }
  347. if (stillLive) {
  348. this.determineDuration_();
  349. }
  350. // Check if any playlist does not have the first reference (due to a
  351. // problem in the live encoder for example), and disable the stream if
  352. // necessary.
  353. for (const streamInfo of activeStreamInfos) {
  354. if (streamInfo.stream.segmentIndex &&
  355. !streamInfo.stream.segmentIndex.earliestReference()) {
  356. this.playerInterface_.disableStream(streamInfo.stream);
  357. }
  358. }
  359. }
  360. /**
  361. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  362. * @return {!Map.<number, number>}
  363. * @private
  364. */
  365. getMediaSequenceToStartTimeFor_(streamInfo) {
  366. if (this.isLive_()) {
  367. return this.mediaSequenceToStartTimeByType_.get(streamInfo.type);
  368. } else {
  369. return streamInfo.mediaSequenceToStartTime;
  370. }
  371. }
  372. /**
  373. * Updates a stream.
  374. *
  375. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  376. * @return {!Promise}
  377. * @private
  378. */
  379. async updateStream_(streamInfo) {
  380. const manifestUris = [];
  381. for (const uri of streamInfo.getUris()) {
  382. const uriObj = new goog.Uri(uri);
  383. const queryData = uriObj.getQueryData();
  384. if (streamInfo.canBlockReload) {
  385. if (streamInfo.nextMediaSequence >= 0) {
  386. // Indicates that the server must hold the request until a Playlist
  387. // contains a Media Segment with Media Sequence
  388. queryData.add('_HLS_msn', String(streamInfo.nextMediaSequence));
  389. }
  390. if (streamInfo.nextPart >= 0) {
  391. // Indicates, in combination with _HLS_msn, that the server must hold
  392. // the request until a Playlist contains Partial Segment N of Media
  393. // Sequence Number M or later.
  394. queryData.add('_HLS_part', String(streamInfo.nextPart));
  395. }
  396. }
  397. if (streamInfo.canSkipSegments) {
  398. // Enable delta updates. This will replace older segments with
  399. // 'EXT-X-SKIP' tag in the media playlist.
  400. queryData.add('_HLS_skip', 'YES');
  401. }
  402. if (queryData.getCount()) {
  403. uriObj.setQueryData(queryData);
  404. }
  405. manifestUris.push(uriObj.toString());
  406. }
  407. let response;
  408. try {
  409. response = await this.requestManifest_(
  410. manifestUris, /* isPlaylist= */ true).promise;
  411. } catch (e) {
  412. if (this.playerInterface_) {
  413. this.playerInterface_.disableStream(streamInfo.stream);
  414. }
  415. throw e;
  416. }
  417. if (!streamInfo.stream.segmentIndex) {
  418. // The stream was closed since the update was first requested.
  419. return;
  420. }
  421. /** @type {shaka.hls.Playlist} */
  422. const playlist = this.manifestTextParser_.parsePlaylist(response.data);
  423. if (playlist.type != shaka.hls.PlaylistType.MEDIA) {
  424. throw new shaka.util.Error(
  425. shaka.util.Error.Severity.CRITICAL,
  426. shaka.util.Error.Category.MANIFEST,
  427. shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY);
  428. }
  429. // Record the final URI after redirects.
  430. const responseUri = response.uri;
  431. if (responseUri != response.originalUri &&
  432. !streamInfo.getUris().includes(responseUri)) {
  433. streamInfo.redirectUris.push(responseUri);
  434. }
  435. /** @type {!Array.<!shaka.hls.Tag>} */
  436. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  437. 'EXT-X-DEFINE');
  438. const mediaVariables = this.parseMediaVariables_(
  439. variablesTags, responseUri);
  440. const stream = streamInfo.stream;
  441. const mediaSequenceToStartTime =
  442. this.getMediaSequenceToStartTimeFor_(streamInfo);
  443. const {keyIds, drmInfos} = await this.parseDrmInfo_(
  444. playlist, stream.mimeType, streamInfo.getUris, mediaVariables);
  445. const keysAreEqual =
  446. (a, b) => a.size === b.size && [...a].every((value) => b.has(value));
  447. if (!keysAreEqual(stream.keyIds, keyIds)) {
  448. stream.keyIds = keyIds;
  449. stream.drmInfos = drmInfos;
  450. this.playerInterface_.newDrmInfo(stream);
  451. }
  452. const {segments, bandwidth} = this.createSegments_(
  453. playlist, mediaSequenceToStartTime, mediaVariables,
  454. streamInfo.getUris, streamInfo.type);
  455. if (bandwidth) {
  456. stream.bandwidth = bandwidth;
  457. }
  458. const qualityInfo =
  459. shaka.media.QualityObserver.createQualityInfo(stream);
  460. for (const segment of segments) {
  461. if (segment.initSegmentReference) {
  462. segment.initSegmentReference.mediaQuality = qualityInfo;
  463. }
  464. }
  465. stream.segmentIndex.mergeAndEvict(
  466. segments, this.presentationTimeline_.getSegmentAvailabilityStart());
  467. if (segments.length) {
  468. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  469. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  470. const skipTag = shaka.hls.Utils.getFirstTagWithName(
  471. playlist.tags, 'EXT-X-SKIP');
  472. const skippedSegments =
  473. skipTag ? Number(skipTag.getAttributeValue('SKIPPED-SEGMENTS')) : 0;
  474. const {nextMediaSequence, nextPart} =
  475. this.getNextMediaSequenceAndPart_(mediaSequenceNumber, segments);
  476. streamInfo.nextMediaSequence = nextMediaSequence + skippedSegments;
  477. streamInfo.nextPart = nextPart;
  478. const playlistStartTime = mediaSequenceToStartTime.get(
  479. mediaSequenceNumber);
  480. stream.segmentIndex.evict(playlistStartTime);
  481. }
  482. const oldSegment = stream.segmentIndex.earliestReference();
  483. if (oldSegment) {
  484. streamInfo.minTimestamp = oldSegment.startTime;
  485. const newestSegment = segments[segments.length - 1];
  486. goog.asserts.assert(newestSegment, 'Should have segments!');
  487. streamInfo.maxTimestamp = newestSegment.endTime;
  488. }
  489. // Once the last segment has been added to the playlist,
  490. // #EXT-X-ENDLIST tag will be appended.
  491. // If that happened, treat the rest of the EVENT presentation as VOD.
  492. const endListTag =
  493. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST');
  494. if (endListTag) {
  495. // Flag this for later. We don't convert the whole presentation into VOD
  496. // until we've seen the ENDLIST tag for all active playlists.
  497. streamInfo.hasEndList = true;
  498. }
  499. this.determineLastTargetDuration_(playlist);
  500. this.processDateRangeTags_(
  501. playlist.tags, stream.type, mediaVariables, streamInfo.getUris);
  502. }
  503. /**
  504. * @override
  505. * @exportInterface
  506. */
  507. onExpirationUpdated(sessionId, expiration) {
  508. // No-op
  509. }
  510. /**
  511. * @override
  512. * @exportInterface
  513. */
  514. onInitialVariantChosen(variant) {
  515. // No-op
  516. }
  517. /**
  518. * @override
  519. * @exportInterface
  520. */
  521. banLocation(uri) {
  522. if (this.contentSteeringManager_) {
  523. this.contentSteeringManager_.banLocation(uri);
  524. }
  525. }
  526. /**
  527. * @override
  528. * @exportInterface
  529. */
  530. setMediaElement(mediaElement) {
  531. this.mediaElement_ = mediaElement;
  532. }
  533. /**
  534. * Align the streams by sequence number by dropping early segments. Then
  535. * offset the streams to begin at presentation time 0.
  536. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  537. * @param {boolean=} force
  538. * @private
  539. */
  540. syncStreamsWithSequenceNumber_(streamInfos, force = false) {
  541. // We assume that, when this is first called, we have enough info to
  542. // determine how to use the program date times (e.g. we have both a video
  543. // and an audio, and all other videos and audios match those).
  544. // Thus, we only need to calculate this once.
  545. const updateMinSequenceNumber = this.minSequenceNumber_ == -1;
  546. // Sync using media sequence number. Find the highest starting sequence
  547. // number among all streams. Later, we will drop any references to
  548. // earlier segments in other streams, then offset everything back to 0.
  549. for (const streamInfo of streamInfos) {
  550. const segmentIndex = streamInfo.stream.segmentIndex;
  551. goog.asserts.assert(segmentIndex,
  552. 'Only loaded streams should be synced');
  553. const mediaSequenceToStartTime =
  554. this.getMediaSequenceToStartTimeFor_(streamInfo);
  555. const segment0 = segmentIndex.earliestReference();
  556. if (segment0) {
  557. // This looks inefficient, but iteration order is insertion order.
  558. // So the very first entry should be the one we want.
  559. // We assert that this holds true so that we are alerted by debug
  560. // builds and tests if it changes. We still do a loop, though, so
  561. // that the code functions correctly in production no matter what.
  562. if (goog.DEBUG) {
  563. const firstSequenceStartTime =
  564. mediaSequenceToStartTime.values().next().value;
  565. if (firstSequenceStartTime != segment0.startTime) {
  566. shaka.log.warning(
  567. 'Sequence number map is not ordered as expected!');
  568. }
  569. }
  570. for (const [sequence, start] of mediaSequenceToStartTime) {
  571. if (start == segment0.startTime) {
  572. if (updateMinSequenceNumber) {
  573. this.minSequenceNumber_ = Math.max(
  574. this.minSequenceNumber_, sequence);
  575. }
  576. // Even if we already have decided on a value for
  577. // |this.minSequenceNumber_|, we still need to determine the first
  578. // sequence number for the stream, to offset it in the code below.
  579. streamInfo.firstSequenceNumber = sequence;
  580. break;
  581. }
  582. }
  583. }
  584. }
  585. if (this.minSequenceNumber_ < 0) {
  586. // Nothing to sync.
  587. return;
  588. }
  589. shaka.log.debug('Syncing HLS streams against base sequence number:',
  590. this.minSequenceNumber_);
  591. for (const streamInfo of streamInfos) {
  592. if (!this.ignoreManifestProgramDateTimeFor_(streamInfo.type) && !force) {
  593. continue;
  594. }
  595. const segmentIndex = streamInfo.stream.segmentIndex;
  596. if (segmentIndex) {
  597. // Drop any earlier references.
  598. const numSegmentsToDrop =
  599. this.minSequenceNumber_ - streamInfo.firstSequenceNumber;
  600. if (numSegmentsToDrop > 0) {
  601. segmentIndex.dropFirstReferences(numSegmentsToDrop);
  602. // Now adjust timestamps back to begin at 0.
  603. const segmentN = segmentIndex.earliestReference();
  604. if (segmentN) {
  605. const streamOffset = -segmentN.startTime;
  606. // Modify all SegmentReferences equally.
  607. streamInfo.stream.segmentIndex.offset(streamOffset);
  608. // Update other parts of streamInfo the same way.
  609. this.offsetStreamInfo_(streamInfo, streamOffset);
  610. }
  611. }
  612. }
  613. }
  614. }
  615. /**
  616. * Synchronize streams by the EXT-X-PROGRAM-DATE-TIME tags attached to their
  617. * segments. Also normalizes segment times so that the earliest segment in
  618. * any stream is at time 0.
  619. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  620. * @private
  621. */
  622. syncStreamsWithProgramDateTime_(streamInfos) {
  623. // We assume that, when this is first called, we have enough info to
  624. // determine how to use the program date times (e.g. we have both a video
  625. // and an audio, and all other videos and audios match those).
  626. // Thus, we only need to calculate this once.
  627. if (this.lowestSyncTime_ == Infinity) {
  628. for (const streamInfo of streamInfos) {
  629. const segmentIndex = streamInfo.stream.segmentIndex;
  630. goog.asserts.assert(segmentIndex,
  631. 'Only loaded streams should be synced');
  632. const segment0 = segmentIndex.earliestReference();
  633. if (segment0 != null && segment0.syncTime != null) {
  634. this.lowestSyncTime_ =
  635. Math.min(this.lowestSyncTime_, segment0.syncTime);
  636. }
  637. }
  638. }
  639. const lowestSyncTime = this.lowestSyncTime_;
  640. if (lowestSyncTime == Infinity) {
  641. // Nothing to sync.
  642. return;
  643. }
  644. shaka.log.debug('Syncing HLS streams against base time:', lowestSyncTime);
  645. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  646. if (this.ignoreManifestProgramDateTimeFor_(streamInfo.type)) {
  647. continue;
  648. }
  649. const segmentIndex = streamInfo.stream.segmentIndex;
  650. if (segmentIndex != null) {
  651. // A segment's startTime should be based on its syncTime vs the lowest
  652. // syncTime across all streams. The earliest segment sync time from
  653. // any stream will become presentation time 0. If two streams start
  654. // e.g. 6 seconds apart in syncTime, then their first segments will
  655. // also start 6 seconds apart in presentation time.
  656. const segment0 = segmentIndex.earliestReference();
  657. if (!segment0) {
  658. continue;
  659. }
  660. if (segment0.syncTime == null) {
  661. shaka.log.alwaysError('Missing EXT-X-PROGRAM-DATE-TIME for stream',
  662. streamInfo.getUris(),
  663. 'Expect AV sync issues!');
  664. } else {
  665. // Stream metadata are offset by a fixed amount based on the
  666. // first segment.
  667. const segment0TargetTime = segment0.syncTime - lowestSyncTime;
  668. const streamOffset = segment0TargetTime - segment0.startTime;
  669. this.offsetStreamInfo_(streamInfo, streamOffset);
  670. // This is computed across all segments separately to manage
  671. // accumulated drift in durations.
  672. for (const segment of segmentIndex) {
  673. segment.syncAgainst(lowestSyncTime);
  674. }
  675. }
  676. }
  677. }
  678. }
  679. /**
  680. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  681. * @param {number} offset
  682. * @private
  683. */
  684. offsetStreamInfo_(streamInfo, offset) {
  685. // Adjust our accounting of the minimum timestamp.
  686. streamInfo.minTimestamp += offset;
  687. // Adjust our accounting of the maximum timestamp.
  688. streamInfo.maxTimestamp += offset;
  689. goog.asserts.assert(streamInfo.maxTimestamp >= 0,
  690. 'Negative maxTimestamp after adjustment!');
  691. // Update our map from sequence number to start time.
  692. const mediaSequenceToStartTime =
  693. this.getMediaSequenceToStartTimeFor_(streamInfo);
  694. for (const [key, value] of mediaSequenceToStartTime) {
  695. mediaSequenceToStartTime.set(key, value + offset);
  696. }
  697. shaka.log.debug('Offset', offset, 'applied to',
  698. streamInfo.getUris());
  699. }
  700. /**
  701. * Parses the manifest.
  702. *
  703. * @param {BufferSource} data
  704. * @param {string} uri
  705. * @return {!Promise}
  706. * @private
  707. */
  708. async parseManifest_(data, uri) {
  709. const Utils = shaka.hls.Utils;
  710. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  711. goog.asserts.assert(this.masterPlaylistUri_,
  712. 'Master playlist URI must be set before calling parseManifest_!');
  713. const playlist = this.manifestTextParser_.parsePlaylist(data);
  714. /** @type {!Array.<!shaka.hls.Tag>} */
  715. const variablesTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-DEFINE');
  716. /** @type {!Array.<!shaka.extern.Variant>} */
  717. let variants = [];
  718. /** @type {!Array.<!shaka.extern.Stream>} */
  719. let textStreams = [];
  720. /** @type {!Array.<!shaka.extern.Stream>} */
  721. let imageStreams = [];
  722. // This assert is our own sanity check.
  723. goog.asserts.assert(this.presentationTimeline_ == null,
  724. 'Presentation timeline created early!');
  725. // We don't know if the presentation is VOD or live until we parse at least
  726. // one media playlist, so make a VOD-style presentation timeline for now
  727. // and change the type later if we discover this is live.
  728. // Since the player will load the first variant chosen early in the process,
  729. // there isn't a window during playback where the live-ness is unknown.
  730. this.presentationTimeline_ = new shaka.media.PresentationTimeline(
  731. /* presentationStartTime= */ null, /* delay= */ 0);
  732. this.presentationTimeline_.setStatic(true);
  733. const getUris = () => {
  734. return [uri];
  735. };
  736. /** @type {?string} */
  737. let mediaPlaylistType = null;
  738. /** @type {!Map.<string, string>} */
  739. let mediaVariables = new Map();
  740. // Parsing a media playlist results in a single-variant stream.
  741. if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
  742. this.needsClosedCaptionsDetection_ = false;
  743. /** @type {!Array.<!shaka.hls.Tag>} */
  744. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  745. 'EXT-X-DEFINE');
  746. mediaVariables = this.parseMediaVariables_(
  747. variablesTags, this.masterPlaylistUri_);
  748. // By default we assume it is video, but in a later step the correct type
  749. // is obtained.
  750. mediaPlaylistType = ContentType.VIDEO;
  751. // These values can be obtained later so these default values are good.
  752. const codecs = '';
  753. const languageValue = '';
  754. const channelsCount = null;
  755. const sampleRate = null;
  756. const closedCaptions = new Map();
  757. const spatialAudio = false;
  758. const characteristics = null;
  759. const forced = false; // Only relevant for text.
  760. const primary = true; // This is the only stream!
  761. const name = 'Media Playlist';
  762. // Make the stream info, with those values.
  763. const streamInfo = await this.convertParsedPlaylistIntoStreamInfo_(
  764. this.globalId_++, mediaVariables, playlist, getUris, uri, codecs,
  765. mediaPlaylistType, languageValue, primary, name, channelsCount,
  766. closedCaptions, characteristics, forced, sampleRate, spatialAudio);
  767. this.uriToStreamInfosMap_.set(uri, streamInfo);
  768. if (streamInfo.stream) {
  769. const qualityInfo =
  770. shaka.media.QualityObserver.createQualityInfo(streamInfo.stream);
  771. streamInfo.stream.segmentIndex.forEachTopLevelReference(
  772. (reference) => {
  773. if (reference.initSegmentReference) {
  774. reference.initSegmentReference.mediaQuality = qualityInfo;
  775. }
  776. });
  777. }
  778. mediaPlaylistType = streamInfo.stream.type;
  779. // Wrap the stream from that stream info with a variant.
  780. variants.push({
  781. id: 0,
  782. language: this.getLanguage_(languageValue),
  783. disabledUntilTime: 0,
  784. primary: true,
  785. audio: mediaPlaylistType == 'audio' ? streamInfo.stream : null,
  786. video: mediaPlaylistType == 'video' ? streamInfo.stream : null,
  787. bandwidth: streamInfo.stream.bandwidth || 0,
  788. allowedByApplication: true,
  789. allowedByKeySystem: true,
  790. decodingInfos: [],
  791. });
  792. } else {
  793. this.parseMasterVariables_(variablesTags);
  794. /** @type {!Array.<!shaka.hls.Tag>} */
  795. const mediaTags = Utils.filterTagsByName(
  796. playlist.tags, 'EXT-X-MEDIA');
  797. /** @type {!Array.<!shaka.hls.Tag>} */
  798. const variantTags = Utils.filterTagsByName(
  799. playlist.tags, 'EXT-X-STREAM-INF');
  800. /** @type {!Array.<!shaka.hls.Tag>} */
  801. const imageTags = Utils.filterTagsByName(
  802. playlist.tags, 'EXT-X-IMAGE-STREAM-INF');
  803. /** @type {!Array.<!shaka.hls.Tag>} */
  804. const iFrameTags = Utils.filterTagsByName(
  805. playlist.tags, 'EXT-X-I-FRAME-STREAM-INF');
  806. /** @type {!Array.<!shaka.hls.Tag>} */
  807. const sessionKeyTags = Utils.filterTagsByName(
  808. playlist.tags, 'EXT-X-SESSION-KEY');
  809. /** @type {!Array.<!shaka.hls.Tag>} */
  810. const sessionDataTags = Utils.filterTagsByName(
  811. playlist.tags, 'EXT-X-SESSION-DATA');
  812. /** @type {!Array.<!shaka.hls.Tag>} */
  813. const contentSteeringTags = Utils.filterTagsByName(
  814. playlist.tags, 'EXT-X-CONTENT-STEERING');
  815. this.processSessionData_(sessionDataTags);
  816. await this.processContentSteering_(contentSteeringTags);
  817. this.parseCodecs_(variantTags);
  818. this.parseClosedCaptions_(mediaTags);
  819. const iFrameStreams = this.parseIFrames_(iFrameTags);
  820. variants = await this.createVariantsForTags_(
  821. variantTags, sessionKeyTags, mediaTags, getUris,
  822. this.globalVariables_, iFrameStreams);
  823. textStreams = this.parseTexts_(mediaTags);
  824. imageStreams = await this.parseImages_(imageTags, iFrameTags);
  825. }
  826. // Make sure that the parser has not been destroyed.
  827. if (!this.playerInterface_) {
  828. throw new shaka.util.Error(
  829. shaka.util.Error.Severity.CRITICAL,
  830. shaka.util.Error.Category.PLAYER,
  831. shaka.util.Error.Code.OPERATION_ABORTED);
  832. }
  833. this.determineStartTime_(playlist);
  834. // Single-variant streams aren't lazy-loaded, so for them we already have
  835. // enough info here to determine the presentation type and duration.
  836. if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
  837. if (this.isLive_()) {
  838. this.changePresentationTimelineToLive_(playlist);
  839. const delay = this.getUpdatePlaylistDelay_();
  840. this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
  841. }
  842. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  843. this.finalizeStreams_(streamInfos);
  844. this.determineDuration_();
  845. goog.asserts.assert(mediaPlaylistType,
  846. 'mediaPlaylistType should be non-null');
  847. this.processDateRangeTags_(
  848. playlist.tags, mediaPlaylistType, mediaVariables, getUris);
  849. }
  850. this.manifest_ = {
  851. presentationTimeline: this.presentationTimeline_,
  852. variants,
  853. textStreams,
  854. imageStreams,
  855. offlineSessionIds: [],
  856. minBufferTime: 0,
  857. sequenceMode: this.config_.hls.sequenceMode,
  858. ignoreManifestTimestampsInSegmentsMode:
  859. this.config_.hls.ignoreManifestTimestampsInSegmentsMode,
  860. type: shaka.media.ManifestParser.HLS,
  861. serviceDescription: null,
  862. nextUrl: null,
  863. periodCount: 1,
  864. gapCount: 0,
  865. isLowLatency: false,
  866. startTime: this.startTime_,
  867. };
  868. // If there is no 'CODECS' attribute in the manifest and codec guessing is
  869. // disabled, we need to create the segment indexes now so that missing info
  870. // can be parsed from the media data and added to the stream objects.
  871. if (!this.codecInfoInManifest_ && this.config_.hls.disableCodecGuessing) {
  872. const createIndexes = [];
  873. for (const variant of this.manifest_.variants) {
  874. if (variant.audio && variant.audio.codecs === '') {
  875. createIndexes.push(variant.audio.createSegmentIndex());
  876. }
  877. if (variant.video && variant.video.codecs === '') {
  878. createIndexes.push(variant.video.createSegmentIndex());
  879. }
  880. }
  881. await Promise.all(createIndexes);
  882. }
  883. this.playerInterface_.makeTextStreamsForClosedCaptions(this.manifest_);
  884. if (variants.length == 1) {
  885. const createSegmentIndexPromises = [];
  886. const variant = variants[0];
  887. for (const stream of [variant.video, variant.audio]) {
  888. if (stream && !stream.segmentIndex) {
  889. createSegmentIndexPromises.push(stream.createSegmentIndex());
  890. }
  891. }
  892. if (createSegmentIndexPromises.length > 0) {
  893. await Promise.all(createSegmentIndexPromises);
  894. }
  895. }
  896. }
  897. /**
  898. * @param {!Array.<!shaka.media.SegmentReference>} segments
  899. * @return {!Promise.<shaka.media.SegmentUtils.BasicInfo>}
  900. * @private
  901. */
  902. async getBasicInfoFromSegments_(segments) {
  903. const HlsParser = shaka.hls.HlsParser;
  904. const defaultBasicInfo = shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  905. this.config_.hls.mediaPlaylistFullMimeType);
  906. if (!segments.length) {
  907. return defaultBasicInfo;
  908. }
  909. const {segment, segmentIndex} = this.getAvailableSegment_(segments);
  910. const segmentUris = segment.getUris();
  911. const segmentUri = segmentUris[0];
  912. const parsedUri = new goog.Uri(segmentUri);
  913. const extension = parsedUri.getPath().split('.').pop();
  914. const rawMimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension];
  915. if (rawMimeType) {
  916. return shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  917. rawMimeType);
  918. }
  919. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  920. let initData = null;
  921. let initMimeType = null;
  922. const initSegmentRef = segment.initSegmentReference;
  923. if (initSegmentRef) {
  924. const initSegmentRequest = shaka.util.Networking.createSegmentRequest(
  925. initSegmentRef.getUris(), initSegmentRef.getStartByte(),
  926. initSegmentRef.getEndByte(), this.config_.retryParameters);
  927. const initType =
  928. shaka.net.NetworkingEngine.AdvancedRequestType.INIT_SEGMENT;
  929. const initResponse = await this.makeNetworkRequest_(
  930. initSegmentRequest, requestType, {type: initType}).promise;
  931. initData = initResponse.data;
  932. if (initSegmentRef.aesKey) {
  933. initData = await shaka.media.SegmentUtils.aesDecrypt(
  934. initData, initSegmentRef.aesKey, 0);
  935. }
  936. initMimeType = initResponse.headers['content-type'];
  937. if (initMimeType) {
  938. // Split the MIME type in case the server sent additional parameters.
  939. initMimeType = initMimeType.split(';')[0].toLowerCase();
  940. }
  941. }
  942. const segmentRequest = shaka.util.Networking.createSegmentRequest(
  943. segment.getUris(), segment.getStartByte(), segment.getEndByte(),
  944. this.config_.retryParameters);
  945. const type = shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT;
  946. const response = await this.makeNetworkRequest_(
  947. segmentRequest, requestType, {type}).promise;
  948. let data = response.data;
  949. if (segment.aesKey) {
  950. data = await shaka.media.SegmentUtils.aesDecrypt(
  951. data, segment.aesKey, segmentIndex);
  952. }
  953. let contentMimeType = response.headers['content-type'];
  954. if (contentMimeType) {
  955. // Split the MIME type in case the server sent additional parameters.
  956. contentMimeType = contentMimeType.split(';')[0].toLowerCase();
  957. }
  958. const validMp4Extensions = [
  959. 'mp4',
  960. 'mp4a',
  961. 'm4s',
  962. 'm4i',
  963. 'm4a',
  964. 'm4f',
  965. 'cmfa',
  966. 'mp4v',
  967. 'm4v',
  968. 'cmfv',
  969. 'fmp4',
  970. ];
  971. const validMp4MimeType = [
  972. 'audio/mp4',
  973. 'video/mp4',
  974. 'video/iso.segment',
  975. ];
  976. if (shaka.util.TsParser.probe(
  977. shaka.util.BufferUtils.toUint8(data))) {
  978. const basicInfo = shaka.media.SegmentUtils.getBasicInfoFromTs(
  979. data, this.config_.disableAudio, this.config_.disableVideo,
  980. this.config_.disableText);
  981. if (basicInfo) {
  982. return basicInfo;
  983. }
  984. } else if (validMp4Extensions.includes(extension) ||
  985. validMp4MimeType.includes(contentMimeType) ||
  986. (initMimeType && validMp4MimeType.includes(initMimeType))) {
  987. const basicInfo = shaka.media.SegmentUtils.getBasicInfoFromMp4(
  988. initData, data, this.config_.disableText);
  989. if (basicInfo) {
  990. return basicInfo;
  991. }
  992. }
  993. if (contentMimeType) {
  994. return shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  995. contentMimeType);
  996. }
  997. if (initMimeType) {
  998. return shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  999. initMimeType);
  1000. }
  1001. return defaultBasicInfo;
  1002. }
  1003. /** @private */
  1004. determineDuration_() {
  1005. goog.asserts.assert(this.presentationTimeline_,
  1006. 'Presentation timeline not created!');
  1007. if (this.isLive_()) {
  1008. // The spec says nothing much about seeking in live content, but Safari's
  1009. // built-in HLS implementation does not allow it. Therefore we will set
  1010. // the availability window equal to the presentation delay. The player
  1011. // will be able to buffer ahead three segments, but the seek window will
  1012. // be zero-sized.
  1013. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  1014. if (this.presentationType_ == PresentationType.LIVE) {
  1015. let segmentAvailabilityDuration = this.getLiveDuration_() || 0;
  1016. // The app can override that with a longer duration, to allow seeking.
  1017. if (!isNaN(this.config_.availabilityWindowOverride)) {
  1018. segmentAvailabilityDuration = this.config_.availabilityWindowOverride;
  1019. }
  1020. this.presentationTimeline_.setSegmentAvailabilityDuration(
  1021. segmentAvailabilityDuration);
  1022. }
  1023. } else {
  1024. // Use the minimum duration as the presentation duration.
  1025. this.presentationTimeline_.setDuration(this.getMinDuration_());
  1026. }
  1027. if (!this.presentationTimeline_.isStartTimeLocked()) {
  1028. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  1029. if (!streamInfo.stream.segmentIndex) {
  1030. continue; // Not active.
  1031. }
  1032. if (streamInfo.type != 'audio' && streamInfo.type != 'video') {
  1033. continue;
  1034. }
  1035. const firstReference =
  1036. streamInfo.stream.segmentIndex.earliestReference();
  1037. if (firstReference && firstReference.syncTime) {
  1038. const syncTime = firstReference.syncTime;
  1039. this.presentationTimeline_.setInitialProgramDateTime(syncTime);
  1040. }
  1041. }
  1042. }
  1043. // This is the first point where we have a meaningful presentation start
  1044. // time, and we need to tell PresentationTimeline that so that it can
  1045. // maintain consistency from here on.
  1046. this.presentationTimeline_.lockStartTime();
  1047. // This asserts that the live edge is being calculated from segment times.
  1048. // For VOD and event streams, this check should still pass.
  1049. goog.asserts.assert(
  1050. !this.presentationTimeline_.usingPresentationStartTime(),
  1051. 'We should not be using the presentation start time in HLS!');
  1052. }
  1053. /**
  1054. * Get the variables of each variant tag, and store in a map.
  1055. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1056. * @private
  1057. */
  1058. parseMasterVariables_(tags) {
  1059. const queryParams = new goog.Uri(this.masterPlaylistUri_).getQueryData();
  1060. for (const variableTag of tags) {
  1061. const name = variableTag.getAttributeValue('NAME');
  1062. const value = variableTag.getAttributeValue('VALUE');
  1063. const queryParam = variableTag.getAttributeValue('QUERYPARAM');
  1064. if (name && value) {
  1065. if (!this.globalVariables_.has(name)) {
  1066. this.globalVariables_.set(name, value);
  1067. }
  1068. }
  1069. if (queryParam) {
  1070. const queryParamValue = queryParams.get(queryParam)[0];
  1071. if (queryParamValue && !this.globalVariables_.has(queryParamValue)) {
  1072. this.globalVariables_.set(queryParam, queryParamValue);
  1073. }
  1074. }
  1075. }
  1076. }
  1077. /**
  1078. * Get the variables of each variant tag, and store in a map.
  1079. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1080. * @param {string} uri Media playlist URI.
  1081. * @return {!Map.<string, string>}
  1082. * @private
  1083. */
  1084. parseMediaVariables_(tags, uri) {
  1085. const queryParams = new goog.Uri(uri).getQueryData();
  1086. const mediaVariables = new Map();
  1087. for (const variableTag of tags) {
  1088. const name = variableTag.getAttributeValue('NAME');
  1089. const value = variableTag.getAttributeValue('VALUE');
  1090. const queryParam = variableTag.getAttributeValue('QUERYPARAM');
  1091. const mediaImport = variableTag.getAttributeValue('IMPORT');
  1092. if (name && value) {
  1093. if (!mediaVariables.has(name)) {
  1094. mediaVariables.set(name, value);
  1095. }
  1096. }
  1097. if (queryParam) {
  1098. const queryParamValue = queryParams.get(queryParam)[0];
  1099. if (queryParamValue && !mediaVariables.has(queryParamValue)) {
  1100. mediaVariables.set(queryParam, queryParamValue);
  1101. }
  1102. }
  1103. if (mediaImport) {
  1104. const globalValue = this.globalVariables_.get(mediaImport);
  1105. if (globalValue) {
  1106. mediaVariables.set(mediaImport, globalValue);
  1107. }
  1108. }
  1109. }
  1110. return mediaVariables;
  1111. }
  1112. /**
  1113. * Get the codecs of each variant tag, and store in a map from
  1114. * audio/video/subtitle group id to the codecs arraylist.
  1115. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1116. * @private
  1117. */
  1118. parseCodecs_(tags) {
  1119. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1120. for (const variantTag of tags) {
  1121. const audioGroupId = variantTag.getAttributeValue('AUDIO');
  1122. const videoGroupId = variantTag.getAttributeValue('VIDEO');
  1123. const subGroupId = variantTag.getAttributeValue('SUBTITLES');
  1124. const allCodecs = this.getCodecsForVariantTag_(variantTag);
  1125. if (subGroupId) {
  1126. const textCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1127. ContentType.TEXT, allCodecs);
  1128. goog.asserts.assert(textCodecs != null, 'Text codecs should be valid.');
  1129. this.groupIdToCodecsMap_.set(subGroupId, textCodecs);
  1130. shaka.util.ArrayUtils.remove(allCodecs, textCodecs);
  1131. }
  1132. if (audioGroupId) {
  1133. let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1134. ContentType.AUDIO, allCodecs);
  1135. if (!codecs) {
  1136. codecs = this.config_.hls.defaultAudioCodec;
  1137. }
  1138. this.groupIdToCodecsMap_.set(audioGroupId, codecs);
  1139. }
  1140. if (videoGroupId) {
  1141. let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1142. ContentType.VIDEO, allCodecs);
  1143. if (!codecs) {
  1144. codecs = this.config_.hls.defaultVideoCodec;
  1145. }
  1146. this.groupIdToCodecsMap_.set(videoGroupId, codecs);
  1147. }
  1148. }
  1149. }
  1150. /**
  1151. * Process EXT-X-SESSION-DATA tags.
  1152. *
  1153. * @param {!Array.<!shaka.hls.Tag>} tags
  1154. * @private
  1155. */
  1156. processSessionData_(tags) {
  1157. for (const tag of tags) {
  1158. const id = tag.getAttributeValue('DATA-ID');
  1159. const uri = tag.getAttributeValue('URI');
  1160. const language = tag.getAttributeValue('LANGUAGE');
  1161. const value = tag.getAttributeValue('VALUE');
  1162. const data = (new Map()).set('id', id);
  1163. if (uri) {
  1164. data.set('uri', shaka.hls.Utils.constructSegmentUris(
  1165. [this.masterPlaylistUri_], uri, this.globalVariables_)[0]);
  1166. }
  1167. if (language) {
  1168. data.set('language', language);
  1169. }
  1170. if (value) {
  1171. data.set('value', value);
  1172. }
  1173. const event = new shaka.util.FakeEvent('sessiondata', data);
  1174. if (this.playerInterface_) {
  1175. this.playerInterface_.onEvent(event);
  1176. }
  1177. }
  1178. }
  1179. /**
  1180. * Process EXT-X-CONTENT-STEERING tags.
  1181. *
  1182. * @param {!Array.<!shaka.hls.Tag>} tags
  1183. * @return {!Promise}
  1184. * @private
  1185. */
  1186. async processContentSteering_(tags) {
  1187. if (!this.playerInterface_ || !this.config_) {
  1188. return;
  1189. }
  1190. let contentSteeringPromise;
  1191. for (const tag of tags) {
  1192. const defaultPathwayId = tag.getAttributeValue('PATHWAY-ID');
  1193. const uri = tag.getAttributeValue('SERVER-URI');
  1194. if (!defaultPathwayId || !uri) {
  1195. continue;
  1196. }
  1197. this.contentSteeringManager_ =
  1198. new shaka.util.ContentSteeringManager(this.playerInterface_);
  1199. this.contentSteeringManager_.configure(this.config_);
  1200. this.contentSteeringManager_.setBaseUris([this.masterPlaylistUri_]);
  1201. this.contentSteeringManager_.setManifestType(
  1202. shaka.media.ManifestParser.HLS);
  1203. this.contentSteeringManager_.setDefaultPathwayId(defaultPathwayId);
  1204. contentSteeringPromise =
  1205. this.contentSteeringManager_.requestInfo(uri);
  1206. break;
  1207. }
  1208. await contentSteeringPromise;
  1209. }
  1210. /**
  1211. * Parse Subtitles and Closed Captions from 'EXT-X-MEDIA' tags.
  1212. * Create text streams for Subtitles, but not Closed Captions.
  1213. *
  1214. * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist.
  1215. * @return {!Array.<!shaka.extern.Stream>}
  1216. * @private
  1217. */
  1218. parseTexts_(mediaTags) {
  1219. // Create text stream for each Subtitle media tag.
  1220. const subtitleTags =
  1221. shaka.hls.Utils.filterTagsByType(mediaTags, 'SUBTITLES');
  1222. const textStreams = subtitleTags.map((tag) => {
  1223. const disableText = this.config_.disableText;
  1224. if (disableText) {
  1225. return null;
  1226. }
  1227. try {
  1228. return this.createStreamInfoFromMediaTags_([tag], new Map()).stream;
  1229. } catch (e) {
  1230. if (this.config_.hls.ignoreTextStreamFailures) {
  1231. return null;
  1232. }
  1233. throw e;
  1234. }
  1235. });
  1236. const type = shaka.util.ManifestParserUtils.ContentType.TEXT;
  1237. // Set the codecs for text streams.
  1238. for (const tag of subtitleTags) {
  1239. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1240. const codecs = this.groupIdToCodecsMap_.get(groupId);
  1241. if (codecs) {
  1242. const textStreamInfos = this.groupIdToStreamInfosMap_.get(groupId);
  1243. if (textStreamInfos) {
  1244. for (const textStreamInfo of textStreamInfos) {
  1245. textStreamInfo.stream.codecs = codecs;
  1246. textStreamInfo.stream.mimeType =
  1247. this.guessMimeTypeBeforeLoading_(type, codecs) ||
  1248. this.guessMimeTypeFallback_(type);
  1249. this.setFullTypeForStream_(textStreamInfo.stream);
  1250. }
  1251. }
  1252. }
  1253. }
  1254. // Do not create text streams for Closed captions.
  1255. return textStreams.filter((s) => s);
  1256. }
  1257. /**
  1258. * @param {!shaka.extern.Stream} stream
  1259. * @private
  1260. */
  1261. setFullTypeForStream_(stream) {
  1262. const combinations = new Set([shaka.util.MimeUtils.getFullType(
  1263. stream.mimeType, stream.codecs)]);
  1264. if (stream.segmentIndex) {
  1265. stream.segmentIndex.forEachTopLevelReference((reference) => {
  1266. if (reference.mimeType) {
  1267. combinations.add(shaka.util.MimeUtils.getFullType(
  1268. reference.mimeType, stream.codecs));
  1269. }
  1270. });
  1271. }
  1272. stream.fullMimeTypes = combinations;
  1273. }
  1274. /**
  1275. * @param {!Array.<!shaka.hls.Tag>} imageTags from the playlist.
  1276. * @param {!Array.<!shaka.hls.Tag>} iFrameTags from the playlist.
  1277. * @return {!Promise.<!Array.<!shaka.extern.Stream>>}
  1278. * @private
  1279. */
  1280. async parseImages_(imageTags, iFrameTags) {
  1281. // Create image stream for each image tag.
  1282. const imageStreamPromises = imageTags.map(async (tag) => {
  1283. const disableThumbnails = this.config_.disableThumbnails;
  1284. if (disableThumbnails) {
  1285. return null;
  1286. }
  1287. try {
  1288. const streamInfo = await this.createStreamInfoFromImageTag_(tag);
  1289. return streamInfo.stream;
  1290. } catch (e) {
  1291. if (this.config_.hls.ignoreImageStreamFailures) {
  1292. return null;
  1293. }
  1294. throw e;
  1295. }
  1296. }).concat(iFrameTags.map((tag) => {
  1297. const disableThumbnails = this.config_.disableThumbnails;
  1298. if (disableThumbnails) {
  1299. return null;
  1300. }
  1301. try {
  1302. const streamInfo = this.createStreamInfoFromIframeTag_(tag);
  1303. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1304. if (streamInfo.stream.type !== ContentType.IMAGE) {
  1305. return null;
  1306. }
  1307. return streamInfo.stream;
  1308. } catch (e) {
  1309. if (this.config_.hls.ignoreImageStreamFailures) {
  1310. return null;
  1311. }
  1312. throw e;
  1313. }
  1314. }));
  1315. const imageStreams = await Promise.all(imageStreamPromises);
  1316. return imageStreams.filter((s) => s);
  1317. }
  1318. /**
  1319. * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist.
  1320. * @param {!Map.<string, string>} groupIdPathwayIdMapping
  1321. * @private
  1322. */
  1323. createStreamInfosFromMediaTags_(mediaTags, groupIdPathwayIdMapping) {
  1324. // Filter out subtitles and media tags without uri.
  1325. mediaTags = mediaTags.filter((tag) => {
  1326. const uri = tag.getAttributeValue('URI') || '';
  1327. const type = tag.getAttributeValue('TYPE');
  1328. return type != 'SUBTITLES' && uri != '';
  1329. });
  1330. const groupedTags = {};
  1331. for (const tag of mediaTags) {
  1332. const key = tag.getTagKey(!this.contentSteeringManager_);
  1333. if (!groupedTags[key]) {
  1334. groupedTags[key] = [tag];
  1335. } else {
  1336. groupedTags[key].push(tag);
  1337. }
  1338. }
  1339. for (const key in groupedTags) {
  1340. // Create stream info for each audio / video media grouped tag.
  1341. this.createStreamInfoFromMediaTags_(
  1342. groupedTags[key], groupIdPathwayIdMapping);
  1343. }
  1344. }
  1345. /**
  1346. * @param {!Array.<!shaka.hls.Tag>} iFrameTags from the playlist.
  1347. * @return {!Array.<!shaka.extern.Stream>}
  1348. * @private
  1349. */
  1350. parseIFrames_(iFrameTags) {
  1351. // Create iFrame stream for each iFrame tag.
  1352. const iFrameStreams = iFrameTags.map((tag) => {
  1353. const streamInfo = this.createStreamInfoFromIframeTag_(tag);
  1354. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1355. if (streamInfo.stream.type !== ContentType.VIDEO) {
  1356. return null;
  1357. }
  1358. return streamInfo.stream;
  1359. });
  1360. // Filter mjpg iFrames
  1361. return iFrameStreams.filter((s) => s);
  1362. }
  1363. /**
  1364. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1365. * @param {!Array.<!shaka.hls.Tag>} sessionKeyTags EXT-X-SESSION-KEY tags
  1366. * from the playlist.
  1367. * @param {!Array.<!shaka.hls.Tag>} mediaTags EXT-X-MEDIA tags from the
  1368. * playlist.
  1369. * @param {function():!Array.<string>} getUris
  1370. * @param {?Map.<string, string>} variables
  1371. * @param {!Array.<!shaka.extern.Stream>} iFrameStreams
  1372. * @return {!Promise.<!Array.<!shaka.extern.Variant>>}
  1373. * @private
  1374. */
  1375. async createVariantsForTags_(tags, sessionKeyTags, mediaTags, getUris,
  1376. variables, iFrameStreams) {
  1377. // EXT-X-SESSION-KEY processing
  1378. const drmInfos = [];
  1379. const keyIds = new Set();
  1380. if (sessionKeyTags.length > 0) {
  1381. for (const drmTag of sessionKeyTags) {
  1382. const method = drmTag.getRequiredAttrValue('METHOD');
  1383. // According to the HLS spec, KEYFORMAT is optional and implicitly
  1384. // defaults to "identity".
  1385. // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4
  1386. const keyFormat =
  1387. drmTag.getAttributeValue('KEYFORMAT') || 'identity';
  1388. let drmInfo = null;
  1389. if (method == 'NONE') {
  1390. continue;
  1391. } else if (this.isAesMethod_(method)) {
  1392. const keyUris = shaka.hls.Utils.constructSegmentUris(
  1393. getUris(), drmTag.getRequiredAttrValue('URI'), variables);
  1394. const keyMapKey = keyUris.sort().join('');
  1395. if (!this.aesKeyMap_.has(keyMapKey)) {
  1396. const requestType = shaka.net.NetworkingEngine.RequestType.KEY;
  1397. const request = shaka.net.NetworkingEngine.makeRequest(
  1398. keyUris, this.config_.retryParameters);
  1399. const keyResponse = this.makeNetworkRequest_(request, requestType)
  1400. .promise;
  1401. this.aesKeyMap_.set(keyMapKey, keyResponse);
  1402. }
  1403. continue;
  1404. } else if (keyFormat == 'identity') {
  1405. // eslint-disable-next-line no-await-in-loop
  1406. drmInfo = await this.identityDrmParser_(
  1407. drmTag, /* mimeType= */ '', getUris,
  1408. /* initSegmentRef= */ null, variables);
  1409. } else {
  1410. const drmParser =
  1411. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_[keyFormat];
  1412. drmInfo = drmParser ?
  1413. drmParser(drmTag, /* mimeType= */ '') : null;
  1414. }
  1415. if (drmInfo) {
  1416. if (drmInfo.keyIds) {
  1417. for (const keyId of drmInfo.keyIds) {
  1418. keyIds.add(keyId);
  1419. }
  1420. }
  1421. drmInfos.push(drmInfo);
  1422. } else {
  1423. shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat);
  1424. }
  1425. }
  1426. }
  1427. const groupedTags = {};
  1428. for (const tag of tags) {
  1429. const key = tag.getTagKey(!this.contentSteeringManager_);
  1430. if (!groupedTags[key]) {
  1431. groupedTags[key] = [tag];
  1432. } else {
  1433. groupedTags[key].push(tag);
  1434. }
  1435. }
  1436. const allVariants = [];
  1437. // Create variants for each group of variant tag.
  1438. for (const key in groupedTags) {
  1439. const tags = groupedTags[key];
  1440. const firstTag = tags[0];
  1441. const frameRate = firstTag.getAttributeValue('FRAME-RATE');
  1442. const bandwidth =
  1443. Number(firstTag.getAttributeValue('AVERAGE-BANDWIDTH')) ||
  1444. Number(firstTag.getRequiredAttrValue('BANDWIDTH'));
  1445. const resolution = firstTag.getAttributeValue('RESOLUTION');
  1446. const [width, height] = resolution ? resolution.split('x') : [null, null];
  1447. const videoRange = firstTag.getAttributeValue('VIDEO-RANGE');
  1448. let videoLayout = firstTag.getAttributeValue('REQ-VIDEO-LAYOUT');
  1449. if (videoLayout && videoLayout.includes(',')) {
  1450. // If multiple video layout strings are present, pick the first valid
  1451. // one.
  1452. const layoutStrings = videoLayout.split(',').filter((layoutString) => {
  1453. return layoutString == 'CH-STEREO' || layoutString == 'CH-MONO';
  1454. });
  1455. videoLayout = layoutStrings[0];
  1456. }
  1457. // According to the HLS spec:
  1458. // By default a video variant is monoscopic, so an attribute
  1459. // consisting entirely of REQ-VIDEO-LAYOUT="CH-MONO" is unnecessary
  1460. // and SHOULD NOT be present.
  1461. videoLayout = videoLayout || 'CH-MONO';
  1462. const streamInfos = this.createStreamInfosForVariantTags_(tags,
  1463. mediaTags, resolution, frameRate);
  1464. goog.asserts.assert(streamInfos.audio.length ||
  1465. streamInfos.video.length, 'We should have created a stream!');
  1466. allVariants.push(...this.createVariants_(
  1467. streamInfos.audio,
  1468. streamInfos.video,
  1469. bandwidth,
  1470. width,
  1471. height,
  1472. frameRate,
  1473. videoRange,
  1474. videoLayout,
  1475. drmInfos,
  1476. keyIds,
  1477. iFrameStreams));
  1478. }
  1479. return allVariants.filter((variant) => variant != null);
  1480. }
  1481. /**
  1482. * Create audio and video streamInfos from an 'EXT-X-STREAM-INF' tag and its
  1483. * related media tags.
  1484. *
  1485. * @param {!Array.<!shaka.hls.Tag>} tags
  1486. * @param {!Array.<!shaka.hls.Tag>} mediaTags
  1487. * @param {?string} resolution
  1488. * @param {?string} frameRate
  1489. * @return {!shaka.hls.HlsParser.StreamInfos}
  1490. * @private
  1491. */
  1492. createStreamInfosForVariantTags_(tags, mediaTags, resolution, frameRate) {
  1493. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1494. /** @type {shaka.hls.HlsParser.StreamInfos} */
  1495. const res = {
  1496. audio: [],
  1497. video: [],
  1498. };
  1499. const groupIdPathwayIdMapping = new Map();
  1500. const globalGroupIds = [];
  1501. let isAudioGroup = false;
  1502. let isVideoGroup = false;
  1503. for (const tag of tags) {
  1504. const audioGroupId = tag.getAttributeValue('AUDIO');
  1505. const videoGroupId = tag.getAttributeValue('VIDEO');
  1506. goog.asserts.assert(audioGroupId == null || videoGroupId == null,
  1507. 'Unexpected: both video and audio described by media tags!');
  1508. const groupId = audioGroupId || videoGroupId;
  1509. if (!groupId) {
  1510. continue;
  1511. }
  1512. if (!globalGroupIds.includes(groupId)) {
  1513. globalGroupIds.push(groupId);
  1514. }
  1515. const pathwayId = tag.getAttributeValue('PATHWAY-ID');
  1516. if (pathwayId) {
  1517. groupIdPathwayIdMapping.set(groupId, pathwayId);
  1518. }
  1519. if (audioGroupId) {
  1520. isAudioGroup = true;
  1521. } else if (videoGroupId) {
  1522. isVideoGroup = true;
  1523. }
  1524. // Make an educated guess about the stream type.
  1525. shaka.log.debug('Guessing stream type for', tag.toString());
  1526. }
  1527. if (globalGroupIds.length && mediaTags.length) {
  1528. const mediaTagsForVariant = mediaTags.filter((tag) => {
  1529. return globalGroupIds.includes(tag.getRequiredAttrValue('GROUP-ID'));
  1530. });
  1531. this.createStreamInfosFromMediaTags_(
  1532. mediaTagsForVariant, groupIdPathwayIdMapping);
  1533. }
  1534. const globalGroupId = globalGroupIds.sort().join(',');
  1535. const streamInfos =
  1536. (globalGroupId && this.groupIdToStreamInfosMap_.has(globalGroupId)) ?
  1537. this.groupIdToStreamInfosMap_.get(globalGroupId) : [];
  1538. if (isAudioGroup) {
  1539. res.audio.push(...streamInfos);
  1540. } else if (isVideoGroup) {
  1541. res.video.push(...streamInfos);
  1542. }
  1543. let type;
  1544. let ignoreStream = false;
  1545. // The Microsoft HLS manifest generators will make audio-only variants
  1546. // that link to their URI both directly and through an audio tag.
  1547. // In that case, ignore the local URI and use the version in the
  1548. // AUDIO tag, so you inherit its language.
  1549. // As an example, see the manifest linked in issue #860.
  1550. const allStreamUris = tags.map((tag) => tag.getRequiredAttrValue('URI'));
  1551. const hasSameUri = res.audio.find((audio) => {
  1552. return audio && audio.getUris().find((uri) => {
  1553. return allStreamUris.includes(uri);
  1554. });
  1555. });
  1556. /** @type {!Array.<string>} */
  1557. let allCodecs = this.getCodecsForVariantTag_(tags[0]);
  1558. const videoCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1559. ContentType.VIDEO, allCodecs);
  1560. const audioCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1561. ContentType.AUDIO, allCodecs);
  1562. if (audioCodecs && !videoCodecs) {
  1563. // There are no associated media tags, and there's only audio codec,
  1564. // and no video codec, so it should be audio.
  1565. type = ContentType.AUDIO;
  1566. shaka.log.debug('Guessing audio-only.');
  1567. ignoreStream = res.audio.length > 0;
  1568. } else if (!res.audio.length && !res.video.length &&
  1569. audioCodecs && videoCodecs) {
  1570. // There are both audio and video codecs, so assume multiplexed content.
  1571. // Note that the default used when CODECS is missing assumes multiple
  1572. // (and therefore multiplexed).
  1573. // Recombine the codec strings into one so that MediaSource isn't
  1574. // lied to later. (That would trigger an error in Chrome.)
  1575. shaka.log.debug('Guessing multiplexed audio+video.');
  1576. type = ContentType.VIDEO;
  1577. allCodecs = [[videoCodecs, audioCodecs].join(',')];
  1578. } else if (res.audio.length && hasSameUri) {
  1579. shaka.log.debug('Guessing audio-only.');
  1580. type = ContentType.AUDIO;
  1581. ignoreStream = true;
  1582. } else if (res.video.length && !res.audio.length) {
  1583. // There are associated video streams. Assume this is audio.
  1584. shaka.log.debug('Guessing audio-only.');
  1585. type = ContentType.AUDIO;
  1586. } else {
  1587. shaka.log.debug('Guessing video-only.');
  1588. type = ContentType.VIDEO;
  1589. }
  1590. if (!ignoreStream) {
  1591. let language = null;
  1592. let name = null;
  1593. let channelsCount = null;
  1594. let spatialAudio = false;
  1595. let characteristics = null;
  1596. let sampleRate = null;
  1597. if (!streamInfos.length) {
  1598. const mediaTag = mediaTags.find((tag) => {
  1599. const uri = tag.getAttributeValue('URI') || '';
  1600. const type = tag.getAttributeValue('TYPE');
  1601. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1602. return type != 'SUBTITLES' && uri == '' &&
  1603. globalGroupIds.includes(groupId);
  1604. });
  1605. if (mediaTag) {
  1606. language = mediaTag.getAttributeValue('LANGUAGE');
  1607. name = mediaTag.getAttributeValue('NAME');
  1608. channelsCount = this.getChannelsCount_(mediaTag);
  1609. spatialAudio = this.isSpatialAudio_(mediaTag);
  1610. characteristics = mediaTag.getAttributeValue('CHARACTERISTICS');
  1611. sampleRate = this.getSampleRate_(mediaTag);
  1612. }
  1613. }
  1614. const streamInfo = this.createStreamInfoFromVariantTags_(
  1615. tags, allCodecs, type, language, name, channelsCount,
  1616. characteristics, sampleRate, spatialAudio);
  1617. if (globalGroupId) {
  1618. streamInfo.stream.groupId = globalGroupId;
  1619. }
  1620. res[streamInfo.stream.type] = [streamInfo];
  1621. }
  1622. return res;
  1623. }
  1624. /**
  1625. * Get the codecs from the 'EXT-X-STREAM-INF' tag.
  1626. *
  1627. * @param {!shaka.hls.Tag} tag
  1628. * @return {!Array.<string>} codecs
  1629. * @private
  1630. */
  1631. getCodecsForVariantTag_(tag) {
  1632. let codecsString = tag.getAttributeValue('CODECS') || '';
  1633. const supplementalCodecsString =
  1634. tag.getAttributeValue('SUPPLEMENTAL-CODECS');
  1635. this.codecInfoInManifest_ = codecsString.length > 0;
  1636. if (!this.codecInfoInManifest_ && !this.config_.hls.disableCodecGuessing) {
  1637. // These are the default codecs to assume if none are specified.
  1638. const defaultCodecsArray = [];
  1639. if (!this.config_.disableVideo) {
  1640. defaultCodecsArray.push(this.config_.hls.defaultVideoCodec);
  1641. }
  1642. if (!this.config_.disableAudio) {
  1643. defaultCodecsArray.push(this.config_.hls.defaultAudioCodec);
  1644. }
  1645. codecsString = defaultCodecsArray.join(',');
  1646. }
  1647. // Strip out internal whitespace while splitting on commas:
  1648. /** @type {!Array.<string>} */
  1649. const codecs = codecsString.split(/\s*,\s*/);
  1650. if (supplementalCodecsString) {
  1651. const supplementalCodecs = supplementalCodecsString.split(/\s*,\s*/)
  1652. .map((codec) => {
  1653. return codec.split('/')[0];
  1654. });
  1655. codecs.push(...supplementalCodecs);
  1656. }
  1657. return shaka.media.SegmentUtils.codecsFiltering(codecs);
  1658. }
  1659. /**
  1660. * Get the channel count information for an HLS audio track.
  1661. * CHANNELS specifies an ordered, "/" separated list of parameters.
  1662. * If the type is audio, the first parameter will be a decimal integer
  1663. * specifying the number of independent, simultaneous audio channels.
  1664. * No other channels parameters are currently defined.
  1665. *
  1666. * @param {!shaka.hls.Tag} tag
  1667. * @return {?number}
  1668. * @private
  1669. */
  1670. getChannelsCount_(tag) {
  1671. const channels = tag.getAttributeValue('CHANNELS');
  1672. if (!channels) {
  1673. return null;
  1674. }
  1675. const channelcountstring = channels.split('/')[0];
  1676. const count = parseInt(channelcountstring, 10);
  1677. return count;
  1678. }
  1679. /**
  1680. * Get the sample rate information for an HLS audio track.
  1681. *
  1682. * @param {!shaka.hls.Tag} tag
  1683. * @return {?number}
  1684. * @private
  1685. */
  1686. getSampleRate_(tag) {
  1687. const sampleRate = tag.getAttributeValue('SAMPLE-RATE');
  1688. if (!sampleRate) {
  1689. return null;
  1690. }
  1691. return parseInt(sampleRate, 10);
  1692. }
  1693. /**
  1694. * Get the spatial audio information for an HLS audio track.
  1695. * In HLS the channels field indicates the number of audio channels that the
  1696. * stream has (eg: 2). In the case of Dolby Atmos, the complexity is
  1697. * expressed with the number of channels followed by the word JOC
  1698. * (eg: 16/JOC), so 16 would be the number of channels (eg: 7.3.6 layout),
  1699. * and JOC indicates that the stream has spatial audio.
  1700. * @see https://developer.apple.com/documentation/http_live_streaming/hls_authoring_specification_for_apple_devices/hls_authoring_specification_for_apple_devices_appendixes
  1701. *
  1702. * @param {!shaka.hls.Tag} tag
  1703. * @return {boolean}
  1704. * @private
  1705. */
  1706. isSpatialAudio_(tag) {
  1707. const channels = tag.getAttributeValue('CHANNELS');
  1708. if (!channels) {
  1709. return false;
  1710. }
  1711. return channels.includes('/JOC');
  1712. }
  1713. /**
  1714. * Get the closed captions map information for the EXT-X-STREAM-INF tag, to
  1715. * create the stream info.
  1716. * @param {!shaka.hls.Tag} tag
  1717. * @param {string} type
  1718. * @return {Map.<string, string>} closedCaptions
  1719. * @private
  1720. */
  1721. getClosedCaptions_(tag, type) {
  1722. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1723. // The attribute of closed captions is optional, and the value may be
  1724. // 'NONE'.
  1725. const closedCaptionsAttr = tag.getAttributeValue('CLOSED-CAPTIONS');
  1726. // EXT-X-STREAM-INF tags may have CLOSED-CAPTIONS attributes.
  1727. // The value can be either a quoted-string or an enumerated-string with
  1728. // the value NONE. If the value is a quoted-string, it MUST match the
  1729. // value of the GROUP-ID attribute of an EXT-X-MEDIA tag elsewhere in the
  1730. // Playlist whose TYPE attribute is CLOSED-CAPTIONS.
  1731. if (type == ContentType.VIDEO ) {
  1732. if (this.config_.disableText) {
  1733. this.needsClosedCaptionsDetection_ = false;
  1734. return null;
  1735. }
  1736. if (closedCaptionsAttr) {
  1737. if (closedCaptionsAttr != 'NONE') {
  1738. return this.groupIdToClosedCaptionsMap_.get(closedCaptionsAttr);
  1739. }
  1740. this.needsClosedCaptionsDetection_ = false;
  1741. } else if (!closedCaptionsAttr && this.groupIdToClosedCaptionsMap_.size) {
  1742. for (const key of this.groupIdToClosedCaptionsMap_.keys()) {
  1743. return this.groupIdToClosedCaptionsMap_.get(key);
  1744. }
  1745. }
  1746. }
  1747. return null;
  1748. }
  1749. /**
  1750. * Get the normalized language value.
  1751. *
  1752. * @param {?string} languageValue
  1753. * @return {string}
  1754. * @private
  1755. */
  1756. getLanguage_(languageValue) {
  1757. const LanguageUtils = shaka.util.LanguageUtils;
  1758. return LanguageUtils.normalize(languageValue || 'und');
  1759. }
  1760. /**
  1761. * Get the type value.
  1762. * Shaka recognizes the content types 'audio', 'video', 'text', and 'image'.
  1763. * The HLS 'subtitles' type needs to be mapped to 'text'.
  1764. * @param {!shaka.hls.Tag} tag
  1765. * @return {string}
  1766. * @private
  1767. */
  1768. getType_(tag) {
  1769. let type = tag.getRequiredAttrValue('TYPE').toLowerCase();
  1770. if (type == 'subtitles') {
  1771. type = shaka.util.ManifestParserUtils.ContentType.TEXT;
  1772. }
  1773. return type;
  1774. }
  1775. /**
  1776. * @param {!Array.<shaka.hls.HlsParser.StreamInfo>} audioInfos
  1777. * @param {!Array.<shaka.hls.HlsParser.StreamInfo>} videoInfos
  1778. * @param {number} bandwidth
  1779. * @param {?string} width
  1780. * @param {?string} height
  1781. * @param {?string} frameRate
  1782. * @param {?string} videoRange
  1783. * @param {?string} videoLayout
  1784. * @param {!Array.<shaka.extern.DrmInfo>} drmInfos
  1785. * @param {!Set.<string>} keyIds
  1786. * @param {!Array.<!shaka.extern.Stream>} iFrameStreams
  1787. * @return {!Array.<!shaka.extern.Variant>}
  1788. * @private
  1789. */
  1790. createVariants_(
  1791. audioInfos, videoInfos, bandwidth, width, height, frameRate, videoRange,
  1792. videoLayout, drmInfos, keyIds, iFrameStreams) {
  1793. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1794. const DrmUtils = shaka.util.DrmUtils;
  1795. for (const info of videoInfos) {
  1796. this.addVideoAttributes_(
  1797. info.stream, width, height, frameRate, videoRange, videoLayout,
  1798. /** colorGamut= */ null);
  1799. }
  1800. // In case of audio-only or video-only content or the audio/video is
  1801. // disabled by the config, we create an array of one item containing
  1802. // a null. This way, the double-loop works for all kinds of content.
  1803. // NOTE: we currently don't have support for audio-only content.
  1804. const disableAudio = this.config_.disableAudio;
  1805. if (!audioInfos.length || disableAudio) {
  1806. audioInfos = [null];
  1807. }
  1808. const disableVideo = this.config_.disableVideo;
  1809. if (!videoInfos.length || disableVideo) {
  1810. videoInfos = [null];
  1811. }
  1812. const variants = [];
  1813. for (const audioInfo of audioInfos) {
  1814. for (const videoInfo of videoInfos) {
  1815. const audioStream = audioInfo ? audioInfo.stream : null;
  1816. if (audioStream) {
  1817. audioStream.drmInfos = drmInfos;
  1818. audioStream.keyIds = keyIds;
  1819. }
  1820. const videoStream = videoInfo ? videoInfo.stream : null;
  1821. if (videoStream) {
  1822. videoStream.drmInfos = drmInfos;
  1823. videoStream.keyIds = keyIds;
  1824. shaka.util.StreamUtils.setBetterIFrameStream(
  1825. videoStream, iFrameStreams);
  1826. }
  1827. if (videoStream && !audioStream) {
  1828. videoStream.bandwidth = bandwidth;
  1829. }
  1830. if (!videoStream && audioStream) {
  1831. audioStream.bandwidth = bandwidth;
  1832. }
  1833. const audioDrmInfos = audioInfo ? audioInfo.stream.drmInfos : null;
  1834. const videoDrmInfos = videoInfo ? videoInfo.stream.drmInfos : null;
  1835. const videoStreamUri =
  1836. videoInfo ? videoInfo.getUris().sort().join(',') : '';
  1837. const audioStreamUri =
  1838. audioInfo ? audioInfo.getUris().sort().join(',') : '';
  1839. const variantUriKey = videoStreamUri + ' - ' + audioStreamUri;
  1840. if (audioStream && videoStream) {
  1841. if (!DrmUtils.areDrmCompatible(audioDrmInfos, videoDrmInfos)) {
  1842. shaka.log.warning(
  1843. 'Incompatible DRM info in HLS variant. Skipping.');
  1844. continue;
  1845. }
  1846. }
  1847. if (this.variantUriSet_.has(variantUriKey)) {
  1848. // This happens when two variants only differ in their text streams.
  1849. shaka.log.debug(
  1850. 'Skipping variant which only differs in text streams.');
  1851. continue;
  1852. }
  1853. // Since both audio and video are of the same type, this assertion will
  1854. // catch certain mistakes at runtime that the compiler would miss.
  1855. goog.asserts.assert(!audioStream ||
  1856. audioStream.type == ContentType.AUDIO, 'Audio parameter mismatch!');
  1857. goog.asserts.assert(!videoStream ||
  1858. videoStream.type == ContentType.VIDEO, 'Video parameter mismatch!');
  1859. const variant = {
  1860. id: this.globalId_++,
  1861. language: audioStream ? audioStream.language : 'und',
  1862. disabledUntilTime: 0,
  1863. primary: (!!audioStream && audioStream.primary) ||
  1864. (!!videoStream && videoStream.primary),
  1865. audio: audioStream,
  1866. video: videoStream,
  1867. bandwidth,
  1868. allowedByApplication: true,
  1869. allowedByKeySystem: true,
  1870. decodingInfos: [],
  1871. };
  1872. variants.push(variant);
  1873. this.variantUriSet_.add(variantUriKey);
  1874. }
  1875. }
  1876. return variants;
  1877. }
  1878. /**
  1879. * Parses an array of EXT-X-MEDIA tags, then stores the values of all tags
  1880. * with TYPE="CLOSED-CAPTIONS" into a map of group id to closed captions.
  1881. *
  1882. * @param {!Array.<!shaka.hls.Tag>} mediaTags
  1883. * @private
  1884. */
  1885. parseClosedCaptions_(mediaTags) {
  1886. const closedCaptionsTags =
  1887. shaka.hls.Utils.filterTagsByType(mediaTags, 'CLOSED-CAPTIONS');
  1888. this.needsClosedCaptionsDetection_ = closedCaptionsTags.length == 0;
  1889. for (const tag of closedCaptionsTags) {
  1890. goog.asserts.assert(tag.name == 'EXT-X-MEDIA',
  1891. 'Should only be called on media tags!');
  1892. const languageValue = tag.getAttributeValue('LANGUAGE');
  1893. let language = this.getLanguage_(languageValue);
  1894. if (!languageValue) {
  1895. const nameValue = tag.getAttributeValue('NAME');
  1896. if (nameValue) {
  1897. language = nameValue;
  1898. }
  1899. }
  1900. // The GROUP-ID value is a quoted-string that specifies the group to which
  1901. // the Rendition belongs.
  1902. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1903. // The value of INSTREAM-ID is a quoted-string that specifies a Rendition
  1904. // within the segments in the Media Playlist. This attribute is REQUIRED
  1905. // if the TYPE attribute is CLOSED-CAPTIONS.
  1906. // We need replace SERVICE string by our internal svc string.
  1907. const instreamId = tag.getRequiredAttrValue('INSTREAM-ID')
  1908. .replace('SERVICE', 'svc');
  1909. if (!this.groupIdToClosedCaptionsMap_.get(groupId)) {
  1910. this.groupIdToClosedCaptionsMap_.set(groupId, new Map());
  1911. }
  1912. this.groupIdToClosedCaptionsMap_.get(groupId).set(instreamId, language);
  1913. }
  1914. }
  1915. /**
  1916. * Parse EXT-X-MEDIA media tag into a Stream object.
  1917. *
  1918. * @param {!Array.<!shaka.hls.Tag>} tags
  1919. * @param {!Map.<string, string>} groupIdPathwayIdMapping
  1920. * @return {!shaka.hls.HlsParser.StreamInfo}
  1921. * @private
  1922. */
  1923. createStreamInfoFromMediaTags_(tags, groupIdPathwayIdMapping) {
  1924. const verbatimMediaPlaylistUris = [];
  1925. const globalGroupIds = [];
  1926. const groupIdUriMappping = new Map();
  1927. for (const tag of tags) {
  1928. goog.asserts.assert(tag.name == 'EXT-X-MEDIA',
  1929. 'Should only be called on media tags!');
  1930. const uri = tag.getRequiredAttrValue('URI');
  1931. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1932. verbatimMediaPlaylistUris.push(uri);
  1933. globalGroupIds.push(groupId);
  1934. groupIdUriMappping.set(groupId, uri);
  1935. }
  1936. const globalGroupId = globalGroupIds.sort().join(',');
  1937. const firstTag = tags[0];
  1938. let codecs = '';
  1939. /** @type {string} */
  1940. const type = this.getType_(firstTag);
  1941. if (type == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  1942. codecs = firstTag.getAttributeValue('CODECS') || '';
  1943. } else {
  1944. for (const groupId of globalGroupIds) {
  1945. if (this.groupIdToCodecsMap_.has(groupId)) {
  1946. codecs = this.groupIdToCodecsMap_.get(groupId);
  1947. break;
  1948. }
  1949. }
  1950. }
  1951. // Check if the stream has already been created as part of another Variant
  1952. // and return it if it has.
  1953. const key = verbatimMediaPlaylistUris.sort().join(',');
  1954. if (this.uriToStreamInfosMap_.has(key)) {
  1955. return this.uriToStreamInfosMap_.get(key);
  1956. }
  1957. const streamId = this.globalId_++;
  1958. if (this.contentSteeringManager_) {
  1959. for (const [groupId, uri] of groupIdUriMappping) {
  1960. const pathwayId = groupIdPathwayIdMapping.get(groupId);
  1961. if (pathwayId) {
  1962. this.contentSteeringManager_.addLocation(streamId, pathwayId, uri);
  1963. }
  1964. }
  1965. }
  1966. const language = firstTag.getAttributeValue('LANGUAGE');
  1967. const name = firstTag.getAttributeValue('NAME');
  1968. // NOTE: According to the HLS spec, "DEFAULT=YES" requires "AUTOSELECT=YES".
  1969. // However, we don't bother to validate "AUTOSELECT", since we don't
  1970. // actually use it in our streaming model, and we treat everything as
  1971. // "AUTOSELECT=YES". A value of "AUTOSELECT=NO" would imply that it may
  1972. // only be selected explicitly by the user, and we don't have a way to
  1973. // represent that in our model.
  1974. const defaultAttrValue = firstTag.getAttributeValue('DEFAULT');
  1975. const primary = defaultAttrValue == 'YES';
  1976. const channelsCount =
  1977. type == 'audio' ? this.getChannelsCount_(firstTag) : null;
  1978. const spatialAudio =
  1979. type == 'audio' ? this.isSpatialAudio_(firstTag) : false;
  1980. const characteristics = firstTag.getAttributeValue('CHARACTERISTICS');
  1981. const forcedAttrValue = firstTag.getAttributeValue('FORCED');
  1982. const forced = forcedAttrValue == 'YES';
  1983. const sampleRate = type == 'audio' ? this.getSampleRate_(firstTag) : null;
  1984. // TODO: Should we take into account some of the currently ignored
  1985. // attributes: INSTREAM-ID, Attribute descriptions: https://bit.ly/2lpjOhj
  1986. const streamInfo = this.createStreamInfo_(
  1987. streamId, verbatimMediaPlaylistUris, codecs, type, language,
  1988. primary, name, channelsCount, /* closedCaptions= */ null,
  1989. characteristics, forced, sampleRate, spatialAudio);
  1990. if (streamInfo.stream) {
  1991. streamInfo.stream.groupId = globalGroupId;
  1992. }
  1993. if (this.groupIdToStreamInfosMap_.has(globalGroupId)) {
  1994. this.groupIdToStreamInfosMap_.get(globalGroupId).push(streamInfo);
  1995. } else {
  1996. this.groupIdToStreamInfosMap_.set(globalGroupId, [streamInfo]);
  1997. }
  1998. this.uriToStreamInfosMap_.set(key, streamInfo);
  1999. return streamInfo;
  2000. }
  2001. /**
  2002. * Parse EXT-X-IMAGE-STREAM-INF media tag into a Stream object.
  2003. *
  2004. * @param {shaka.hls.Tag} tag
  2005. * @return {!Promise.<!shaka.hls.HlsParser.StreamInfo>}
  2006. * @private
  2007. */
  2008. async createStreamInfoFromImageTag_(tag) {
  2009. goog.asserts.assert(tag.name == 'EXT-X-IMAGE-STREAM-INF',
  2010. 'Should only be called on image tags!');
  2011. /** @type {string} */
  2012. const type = shaka.util.ManifestParserUtils.ContentType.IMAGE;
  2013. const verbatimImagePlaylistUri = tag.getRequiredAttrValue('URI');
  2014. const codecs = tag.getAttributeValue('CODECS', 'jpeg') || '';
  2015. // Check if the stream has already been created as part of another Variant
  2016. // and return it if it has.
  2017. if (this.uriToStreamInfosMap_.has(verbatimImagePlaylistUri)) {
  2018. return this.uriToStreamInfosMap_.get(verbatimImagePlaylistUri);
  2019. }
  2020. const language = tag.getAttributeValue('LANGUAGE');
  2021. const name = tag.getAttributeValue('NAME');
  2022. const characteristics = tag.getAttributeValue('CHARACTERISTICS');
  2023. const streamInfo = this.createStreamInfo_(
  2024. this.globalId_++, [verbatimImagePlaylistUri], codecs, type, language,
  2025. /* primary= */ false, name, /* channelsCount= */ null,
  2026. /* closedCaptions= */ null, characteristics, /* forced= */ false,
  2027. /* sampleRate= */ null, /* spatialAudio= */ false);
  2028. // Parse misc attributes.
  2029. const resolution = tag.getAttributeValue('RESOLUTION');
  2030. if (resolution) {
  2031. // The RESOLUTION tag represents the resolution of a single thumbnail, not
  2032. // of the entire sheet at once (like we expect in the output).
  2033. // So multiply by the layout size.
  2034. // Since we need to have generated the segment index for this, we can't
  2035. // lazy-load in this situation.
  2036. await streamInfo.stream.createSegmentIndex();
  2037. const reference = streamInfo.stream.segmentIndex.earliestReference();
  2038. const layout = reference.getTilesLayout();
  2039. if (layout) {
  2040. streamInfo.stream.width =
  2041. Number(resolution.split('x')[0]) * Number(layout.split('x')[0]);
  2042. streamInfo.stream.height =
  2043. Number(resolution.split('x')[1]) * Number(layout.split('x')[1]);
  2044. // TODO: What happens if there are multiple grids, with different
  2045. // layout sizes, inside this image stream?
  2046. }
  2047. }
  2048. const bandwidth = tag.getAttributeValue('BANDWIDTH');
  2049. if (bandwidth) {
  2050. streamInfo.stream.bandwidth = Number(bandwidth);
  2051. }
  2052. this.uriToStreamInfosMap_.set(verbatimImagePlaylistUri, streamInfo);
  2053. return streamInfo;
  2054. }
  2055. /**
  2056. * Parse EXT-X-I-FRAME-STREAM-INF media tag into a Stream object.
  2057. *
  2058. * @param {shaka.hls.Tag} tag
  2059. * @return {!shaka.hls.HlsParser.StreamInfo}
  2060. * @private
  2061. */
  2062. createStreamInfoFromIframeTag_(tag) {
  2063. goog.asserts.assert(tag.name == 'EXT-X-I-FRAME-STREAM-INF',
  2064. 'Should only be called on iframe tags!');
  2065. /** @type {string} */
  2066. let type = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  2067. const verbatimIFramePlaylistUri = tag.getRequiredAttrValue('URI');
  2068. const codecs = tag.getAttributeValue('CODECS') || '';
  2069. if (codecs == 'mjpg') {
  2070. type = shaka.util.ManifestParserUtils.ContentType.IMAGE;
  2071. }
  2072. // Check if the stream has already been created as part of another Variant
  2073. // and return it if it has.
  2074. if (this.uriToStreamInfosMap_.has(verbatimIFramePlaylistUri)) {
  2075. return this.uriToStreamInfosMap_.get(verbatimIFramePlaylistUri);
  2076. }
  2077. const language = tag.getAttributeValue('LANGUAGE');
  2078. const name = tag.getAttributeValue('NAME');
  2079. const characteristics = tag.getAttributeValue('CHARACTERISTICS');
  2080. const streamInfo = this.createStreamInfo_(
  2081. this.globalId_++, [verbatimIFramePlaylistUri], codecs, type, language,
  2082. /* primary= */ false, name, /* channelsCount= */ null,
  2083. /* closedCaptions= */ null, characteristics, /* forced= */ false,
  2084. /* sampleRate= */ null, /* spatialAudio= */ false);
  2085. // Parse misc attributes.
  2086. const resolution = tag.getAttributeValue('RESOLUTION');
  2087. const [width, height] = resolution ? resolution.split('x') : [null, null];
  2088. streamInfo.stream.width = Number(width) || undefined;
  2089. streamInfo.stream.height = Number(height) || undefined;
  2090. const bandwidth = tag.getAttributeValue('BANDWIDTH');
  2091. if (bandwidth) {
  2092. streamInfo.stream.bandwidth = Number(bandwidth);
  2093. }
  2094. this.uriToStreamInfosMap_.set(verbatimIFramePlaylistUri, streamInfo);
  2095. return streamInfo;
  2096. }
  2097. /**
  2098. * Parse an EXT-X-STREAM-INF media tag into a Stream object.
  2099. *
  2100. * @param {!Array.<!shaka.hls.Tag>} tags
  2101. * @param {!Array.<string>} allCodecs
  2102. * @param {string} type
  2103. * @param {?string} language
  2104. * @param {?string} name
  2105. * @param {?number} channelsCount
  2106. * @param {?string} characteristics
  2107. * @param {?number} sampleRate
  2108. * @param {boolean} spatialAudio
  2109. * @return {!shaka.hls.HlsParser.StreamInfo}
  2110. * @private
  2111. */
  2112. createStreamInfoFromVariantTags_(tags, allCodecs, type, language, name,
  2113. channelsCount, characteristics, sampleRate, spatialAudio) {
  2114. const streamId = this.globalId_++;
  2115. const verbatimMediaPlaylistUris = [];
  2116. for (const tag of tags) {
  2117. goog.asserts.assert(tag.name == 'EXT-X-STREAM-INF',
  2118. 'Should only be called on variant tags!');
  2119. const uri = tag.getRequiredAttrValue('URI');
  2120. const pathwayId = tag.getAttributeValue('PATHWAY-ID');
  2121. if (this.contentSteeringManager_ && pathwayId) {
  2122. this.contentSteeringManager_.addLocation(streamId, pathwayId, uri);
  2123. }
  2124. verbatimMediaPlaylistUris.push(uri);
  2125. }
  2126. const key = verbatimMediaPlaylistUris.sort().join(',');
  2127. if (this.uriToStreamInfosMap_.has(key)) {
  2128. return this.uriToStreamInfosMap_.get(key);
  2129. }
  2130. const closedCaptions = this.getClosedCaptions_(tags[0], type);
  2131. const codecs = shaka.util.ManifestParserUtils.guessCodecs(type, allCodecs);
  2132. const streamInfo = this.createStreamInfo_(
  2133. streamId, verbatimMediaPlaylistUris, codecs, type, language,
  2134. /* primary= */ false, name, channelsCount, closedCaptions,
  2135. characteristics, /* forced= */ false, sampleRate,
  2136. /* spatialAudio= */ false);
  2137. this.uriToStreamInfosMap_.set(key, streamInfo);
  2138. return streamInfo;
  2139. }
  2140. /**
  2141. * @param {number} streamId
  2142. * @param {!Array.<string>} verbatimMediaPlaylistUris
  2143. * @param {string} codecs
  2144. * @param {string} type
  2145. * @param {?string} languageValue
  2146. * @param {boolean} primary
  2147. * @param {?string} name
  2148. * @param {?number} channelsCount
  2149. * @param {Map.<string, string>} closedCaptions
  2150. * @param {?string} characteristics
  2151. * @param {boolean} forced
  2152. * @param {?number} sampleRate
  2153. * @param {boolean} spatialAudio
  2154. * @return {!shaka.hls.HlsParser.StreamInfo}
  2155. * @private
  2156. */
  2157. createStreamInfo_(streamId, verbatimMediaPlaylistUris, codecs, type,
  2158. languageValue, primary, name, channelsCount, closedCaptions,
  2159. characteristics, forced, sampleRate, spatialAudio) {
  2160. // TODO: Refactor, too many parameters
  2161. // This stream is lazy-loaded inside the createSegmentIndex function.
  2162. // So we start out with a stream object that does not contain the actual
  2163. // segment index, then download when createSegmentIndex is called.
  2164. const stream = this.makeStreamObject_(streamId, codecs, type,
  2165. languageValue, primary, name, channelsCount, closedCaptions,
  2166. characteristics, forced, sampleRate, spatialAudio);
  2167. const redirectUris = [];
  2168. const getUris = () => {
  2169. if (this.contentSteeringManager_ &&
  2170. verbatimMediaPlaylistUris.length > 1) {
  2171. return this.contentSteeringManager_.getLocations(streamId);
  2172. }
  2173. return redirectUris.concat(shaka.hls.Utils.constructUris(
  2174. [this.masterPlaylistUri_], verbatimMediaPlaylistUris,
  2175. this.globalVariables_));
  2176. };
  2177. const streamInfo = {
  2178. stream,
  2179. type,
  2180. redirectUris,
  2181. getUris,
  2182. // These values are filled out or updated after lazy-loading:
  2183. minTimestamp: 0,
  2184. maxTimestamp: 0,
  2185. mediaSequenceToStartTime: new Map(),
  2186. canSkipSegments: false,
  2187. canBlockReload: false,
  2188. hasEndList: false,
  2189. firstSequenceNumber: -1,
  2190. nextMediaSequence: -1,
  2191. nextPart: -1,
  2192. loadedOnce: false,
  2193. };
  2194. /** @param {!shaka.net.NetworkingEngine.PendingRequest} pendingRequest */
  2195. const downloadSegmentIndex = async (pendingRequest) => {
  2196. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2197. try {
  2198. const uris = streamInfo.getUris();
  2199. // Download the actual manifest.
  2200. const response = await pendingRequest.promise;
  2201. if (pendingRequest.aborted) {
  2202. return;
  2203. }
  2204. // Record the final URI after redirects.
  2205. const responseUri = response.uri;
  2206. if (responseUri != response.originalUri &&
  2207. !uris.includes(responseUri)) {
  2208. redirectUris.push(responseUri);
  2209. }
  2210. // Record the redirected, final URI of this media playlist when we parse
  2211. // it.
  2212. /** @type {!shaka.hls.Playlist} */
  2213. const playlist = this.manifestTextParser_.parsePlaylist(response.data);
  2214. if (playlist.type != shaka.hls.PlaylistType.MEDIA) {
  2215. throw new shaka.util.Error(
  2216. shaka.util.Error.Severity.CRITICAL,
  2217. shaka.util.Error.Category.MANIFEST,
  2218. shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY);
  2219. }
  2220. /** @type {!Array.<!shaka.hls.Tag>} */
  2221. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  2222. 'EXT-X-DEFINE');
  2223. const mediaVariables =
  2224. this.parseMediaVariables_(variablesTags, responseUri);
  2225. const mimeType = undefined;
  2226. let requestBasicInfo = false;
  2227. // If no codec info was provided in the manifest and codec guessing is
  2228. // disabled we try to get necessary info from the media data.
  2229. if ((!this.codecInfoInManifest_ &&
  2230. this.config_.hls.disableCodecGuessing) ||
  2231. (this.needsClosedCaptionsDetection_ && type == ContentType.VIDEO &&
  2232. !this.config_.hls.disableClosedCaptionsDetection)) {
  2233. if (playlist.segments.length > 0) {
  2234. this.needsClosedCaptionsDetection_ = false;
  2235. requestBasicInfo = true;
  2236. }
  2237. }
  2238. const allowOverrideMimeType = !this.codecInfoInManifest_ &&
  2239. this.config_.hls.disableCodecGuessing;
  2240. const wasLive = this.isLive_();
  2241. const realStreamInfo = await this.convertParsedPlaylistIntoStreamInfo_(
  2242. streamId, mediaVariables, playlist, getUris, responseUri, codecs,
  2243. type, languageValue, primary, name, channelsCount, closedCaptions,
  2244. characteristics, forced, sampleRate, spatialAudio, mimeType,
  2245. requestBasicInfo, allowOverrideMimeType);
  2246. if (pendingRequest.aborted) {
  2247. return;
  2248. }
  2249. const realStream = realStreamInfo.stream;
  2250. this.determineStartTime_(playlist);
  2251. if (this.isLive_() && !wasLive) {
  2252. // Now that we know that the presentation is live, convert the
  2253. // timeline to live.
  2254. this.changePresentationTimelineToLive_(playlist);
  2255. }
  2256. // Copy values from the real stream info to our initial one.
  2257. streamInfo.minTimestamp = realStreamInfo.minTimestamp;
  2258. streamInfo.maxTimestamp = realStreamInfo.maxTimestamp;
  2259. streamInfo.canSkipSegments = realStreamInfo.canSkipSegments;
  2260. streamInfo.canBlockReload = realStreamInfo.canBlockReload;
  2261. streamInfo.hasEndList = realStreamInfo.hasEndList;
  2262. streamInfo.mediaSequenceToStartTime =
  2263. realStreamInfo.mediaSequenceToStartTime;
  2264. streamInfo.nextMediaSequence = realStreamInfo.nextMediaSequence;
  2265. streamInfo.nextPart = realStreamInfo.nextPart;
  2266. streamInfo.loadedOnce = true;
  2267. stream.segmentIndex = realStream.segmentIndex;
  2268. stream.encrypted = realStream.encrypted;
  2269. stream.drmInfos = realStream.drmInfos;
  2270. stream.keyIds = realStream.keyIds;
  2271. stream.mimeType = realStream.mimeType;
  2272. stream.bandwidth = stream.bandwidth || realStream.bandwidth;
  2273. stream.codecs = stream.codecs || realStream.codecs;
  2274. stream.closedCaptions =
  2275. stream.closedCaptions || realStream.closedCaptions;
  2276. stream.width = stream.width || realStream.width;
  2277. stream.height = stream.height || realStream.height;
  2278. stream.hdr = stream.hdr || realStream.hdr;
  2279. stream.colorGamut = stream.colorGamut || realStream.colorGamut;
  2280. stream.frameRate = stream.frameRate || realStream.frameRate;
  2281. if (stream.language == 'und' && realStream.language != 'und') {
  2282. stream.language = realStream.language;
  2283. }
  2284. stream.language = stream.language || realStream.language;
  2285. stream.channelsCount = stream.channelsCount || realStream.channelsCount;
  2286. stream.audioSamplingRate =
  2287. stream.audioSamplingRate || realStream.audioSamplingRate;
  2288. this.setFullTypeForStream_(stream);
  2289. // Since we lazy-loaded this content, the player may need to create new
  2290. // sessions for the DRM info in this stream.
  2291. if (stream.drmInfos.length) {
  2292. this.playerInterface_.newDrmInfo(stream);
  2293. }
  2294. let closedCaptionsUpdated = false;
  2295. if ((!closedCaptions && stream.closedCaptions) ||
  2296. (closedCaptions && stream.closedCaptions &&
  2297. closedCaptions.size != stream.closedCaptions.size)) {
  2298. closedCaptionsUpdated = true;
  2299. }
  2300. if (this.manifest_ && closedCaptionsUpdated) {
  2301. this.playerInterface_.makeTextStreamsForClosedCaptions(
  2302. this.manifest_);
  2303. }
  2304. if (type == ContentType.VIDEO || type == ContentType.AUDIO) {
  2305. for (const otherStreamInfo of this.uriToStreamInfosMap_.values()) {
  2306. if (!otherStreamInfo.loadedOnce && otherStreamInfo.type == type) {
  2307. // To aid manifest filtering, assume before loading that all video
  2308. // renditions have the same MIME type. (And likewise for audio.)
  2309. otherStreamInfo.stream.mimeType = realStream.mimeType;
  2310. this.setFullTypeForStream_(otherStreamInfo.stream);
  2311. }
  2312. }
  2313. }
  2314. if (type == ContentType.TEXT) {
  2315. const firstSegment = realStream.segmentIndex.earliestReference();
  2316. if (firstSegment && firstSegment.initSegmentReference) {
  2317. stream.mimeType = 'application/mp4';
  2318. this.setFullTypeForStream_(stream);
  2319. }
  2320. }
  2321. const qualityInfo =
  2322. shaka.media.QualityObserver.createQualityInfo(stream);
  2323. stream.segmentIndex.forEachTopLevelReference((reference) => {
  2324. if (reference.initSegmentReference) {
  2325. reference.initSegmentReference.mediaQuality = qualityInfo;
  2326. }
  2327. });
  2328. // Add finishing touches to the stream that can only be done once we
  2329. // have more full context on the media as a whole.
  2330. if (this.hasEnoughInfoToFinalizeStreams_()) {
  2331. if (!this.streamsFinalized_) {
  2332. // Mark this manifest as having been finalized, so we don't go
  2333. // through this whole process of finishing touches a second time.
  2334. this.streamsFinalized_ = true;
  2335. // Finalize all of the currently-loaded streams.
  2336. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  2337. const activeStreamInfos =
  2338. streamInfos.filter((s) => s.stream.segmentIndex);
  2339. this.finalizeStreams_(activeStreamInfos);
  2340. // With the addition of this new stream, we now have enough info to
  2341. // figure out how long the streams should be. So process all streams
  2342. // we have downloaded up until this point.
  2343. this.determineDuration_();
  2344. // Finally, start the update timer, if this asset has been
  2345. // determined to be a livestream.
  2346. const delay = this.getUpdatePlaylistDelay_();
  2347. if (delay > 0) {
  2348. this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
  2349. }
  2350. } else {
  2351. // We don't need to go through the full process; just finalize this
  2352. // single stream.
  2353. this.finalizeStreams_([streamInfo]);
  2354. }
  2355. }
  2356. this.processDateRangeTags_(
  2357. playlist.tags, stream.type, mediaVariables, getUris);
  2358. if (this.manifest_) {
  2359. this.manifest_.startTime = this.startTime_;
  2360. }
  2361. } catch (e) {
  2362. stream.closeSegmentIndex();
  2363. if (e.code === shaka.util.Error.Code.OPERATION_ABORTED) {
  2364. return;
  2365. }
  2366. const handled = this.playerInterface_.disableStream(stream);
  2367. if (!handled) {
  2368. throw e;
  2369. }
  2370. }
  2371. };
  2372. /** @type {Promise} */
  2373. let creationPromise = null;
  2374. /** @type {!shaka.net.NetworkingEngine.PendingRequest} */
  2375. let pendingRequest;
  2376. const safeCreateSegmentIndex = () => {
  2377. // An operation is already in progress. The second and subsequent
  2378. // callers receive the same Promise as the first caller, and only one
  2379. // download operation will occur.
  2380. if (creationPromise) {
  2381. return creationPromise;
  2382. }
  2383. // Create a new PendingRequest to be able to cancel this specific
  2384. // download.
  2385. pendingRequest = this.requestManifest_(streamInfo.getUris(),
  2386. /* isPlaylist= */ true);
  2387. // Create a Promise tied to the outcome of downloadSegmentIndex(). If
  2388. // downloadSegmentIndex is rejected, creationPromise will also be
  2389. // rejected.
  2390. creationPromise = new Promise((resolve) => {
  2391. resolve(downloadSegmentIndex(pendingRequest));
  2392. });
  2393. return creationPromise;
  2394. };
  2395. stream.createSegmentIndex = safeCreateSegmentIndex;
  2396. stream.closeSegmentIndex = () => {
  2397. // If we're mid-creation, cancel it.
  2398. if (creationPromise && !stream.segmentIndex) {
  2399. pendingRequest.abort();
  2400. }
  2401. // If we have a segment index, release it.
  2402. if (stream.segmentIndex) {
  2403. stream.segmentIndex.release();
  2404. stream.segmentIndex = null;
  2405. }
  2406. // Clear the creation Promise so that a new operation can begin.
  2407. creationPromise = null;
  2408. };
  2409. return streamInfo;
  2410. }
  2411. /**
  2412. * @return {number}
  2413. * @private
  2414. */
  2415. getMinDuration_() {
  2416. let minDuration = Infinity;
  2417. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  2418. if (streamInfo.stream.segmentIndex && streamInfo.stream.type != 'text') {
  2419. // Since everything is already offset to 0 (either by sync or by being
  2420. // VOD), only maxTimestamp is necessary to compute the duration.
  2421. minDuration = Math.min(minDuration, streamInfo.maxTimestamp);
  2422. }
  2423. }
  2424. return minDuration;
  2425. }
  2426. /**
  2427. * @return {number}
  2428. * @private
  2429. */
  2430. getLiveDuration_() {
  2431. let maxTimestamp = Infinity;
  2432. let minTimestamp = Infinity;
  2433. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  2434. if (streamInfo.stream.segmentIndex && streamInfo.stream.type != 'text') {
  2435. maxTimestamp = Math.min(maxTimestamp, streamInfo.maxTimestamp);
  2436. minTimestamp = Math.min(minTimestamp, streamInfo.minTimestamp);
  2437. }
  2438. }
  2439. return maxTimestamp - minTimestamp;
  2440. }
  2441. /**
  2442. * @param {!Array.<!shaka.extern.Stream>} streams
  2443. * @private
  2444. */
  2445. notifySegmentsForStreams_(streams) {
  2446. const references = [];
  2447. for (const stream of streams) {
  2448. if (!stream.segmentIndex) {
  2449. // The stream was closed since the list of streams was built.
  2450. continue;
  2451. }
  2452. stream.segmentIndex.forEachTopLevelReference((reference) => {
  2453. references.push(reference);
  2454. });
  2455. }
  2456. this.presentationTimeline_.notifySegments(references);
  2457. }
  2458. /**
  2459. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  2460. * @private
  2461. */
  2462. finalizeStreams_(streamInfos) {
  2463. if (!this.isLive_()) {
  2464. const minDuration = this.getMinDuration_();
  2465. for (const streamInfo of streamInfos) {
  2466. streamInfo.stream.segmentIndex.fit(/* periodStart= */ 0, minDuration);
  2467. }
  2468. }
  2469. this.notifySegmentsForStreams_(streamInfos.map((s) => s.stream));
  2470. const activeStreamInfos = Array.from(this.uriToStreamInfosMap_.values())
  2471. .filter((s) => s.stream.segmentIndex);
  2472. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2473. const hasAudio =
  2474. activeStreamInfos.some((s) => s.stream.type == ContentType.AUDIO);
  2475. const hasVideo =
  2476. activeStreamInfos.some((s) => s.stream.type == ContentType.VIDEO);
  2477. const liveWithNoProgramDateTime =
  2478. this.isLive_() && !this.usesProgramDateTime_;
  2479. const vodWithOnlyAudioOrVideo = !this.isLive_() &&
  2480. this.usesProgramDateTime_ && !(hasAudio && hasVideo);
  2481. if (this.config_.hls.ignoreManifestProgramDateTime ||
  2482. liveWithNoProgramDateTime || vodWithOnlyAudioOrVideo) {
  2483. this.syncStreamsWithSequenceNumber_(
  2484. streamInfos, liveWithNoProgramDateTime);
  2485. } else {
  2486. this.syncStreamsWithProgramDateTime_(streamInfos);
  2487. if (this.config_.hls.ignoreManifestProgramDateTimeForTypes.length > 0) {
  2488. this.syncStreamsWithSequenceNumber_(streamInfos);
  2489. }
  2490. }
  2491. }
  2492. /**
  2493. * @param {string} type
  2494. * @return {boolean}
  2495. * @private
  2496. */
  2497. ignoreManifestProgramDateTimeFor_(type) {
  2498. if (this.config_.hls.ignoreManifestProgramDateTime) {
  2499. return true;
  2500. }
  2501. const forTypes = this.config_.hls.ignoreManifestProgramDateTimeForTypes;
  2502. return forTypes.includes(type);
  2503. }
  2504. /**
  2505. * There are some values on streams that can only be set once we know about
  2506. * both the video and audio content, if present.
  2507. * This checks if there is at least one video downloaded (if the media has
  2508. * video), and that there is at least one audio downloaded (if the media has
  2509. * audio).
  2510. * @return {boolean}
  2511. * @private
  2512. */
  2513. hasEnoughInfoToFinalizeStreams_() {
  2514. if (!this.manifest_) {
  2515. return false;
  2516. }
  2517. const videos = [];
  2518. const audios = [];
  2519. for (const variant of this.manifest_.variants) {
  2520. if (variant.video) {
  2521. videos.push(variant.video);
  2522. }
  2523. if (variant.audio) {
  2524. audios.push(variant.audio);
  2525. }
  2526. }
  2527. if (videos.length > 0 && !videos.some((stream) => stream.segmentIndex)) {
  2528. return false;
  2529. }
  2530. if (audios.length > 0 && !audios.some((stream) => stream.segmentIndex)) {
  2531. return false;
  2532. }
  2533. return true;
  2534. }
  2535. /**
  2536. * @param {number} streamId
  2537. * @param {!shaka.hls.Playlist} playlist
  2538. * @param {function():!Array.<string>} getUris
  2539. * @param {string} responseUri
  2540. * @param {string} codecs
  2541. * @param {string} type
  2542. * @param {?string} languageValue
  2543. * @param {boolean} primary
  2544. * @param {?string} name
  2545. * @param {?number} channelsCount
  2546. * @param {Map.<string, string>} closedCaptions
  2547. * @param {?string} characteristics
  2548. * @param {boolean} forced
  2549. * @param {?number} sampleRate
  2550. * @param {boolean} spatialAudio
  2551. * @param {(string|undefined)} mimeType
  2552. * @param {boolean=} requestBasicInfo
  2553. * @param {boolean=} allowOverrideMimeType
  2554. * @return {!Promise.<!shaka.hls.HlsParser.StreamInfo>}
  2555. * @private
  2556. */
  2557. async convertParsedPlaylistIntoStreamInfo_(streamId, variables, playlist,
  2558. getUris, responseUri, codecs, type, languageValue, primary, name,
  2559. channelsCount, closedCaptions, characteristics, forced, sampleRate,
  2560. spatialAudio, mimeType = undefined, requestBasicInfo = true,
  2561. allowOverrideMimeType = true) {
  2562. const playlistSegments = playlist.segments || [];
  2563. const allAreMissing = playlistSegments.every((seg) => {
  2564. if (shaka.hls.Utils.getFirstTagWithName(seg.tags, 'EXT-X-GAP')) {
  2565. return true;
  2566. }
  2567. return false;
  2568. });
  2569. if (!playlistSegments.length || allAreMissing) {
  2570. throw new shaka.util.Error(
  2571. shaka.util.Error.Severity.CRITICAL,
  2572. shaka.util.Error.Category.MANIFEST,
  2573. shaka.util.Error.Code.HLS_EMPTY_MEDIA_PLAYLIST);
  2574. }
  2575. this.determinePresentationType_(playlist);
  2576. if (this.isLive_()) {
  2577. this.determineLastTargetDuration_(playlist);
  2578. }
  2579. const mediaSequenceToStartTime = this.isLive_() ?
  2580. this.mediaSequenceToStartTimeByType_.get(type) : new Map();
  2581. const {segments, bandwidth} = this.createSegments_(
  2582. playlist, mediaSequenceToStartTime, variables, getUris, type);
  2583. let width = null;
  2584. let height = null;
  2585. let videoRange = null;
  2586. let colorGamut = null;
  2587. let frameRate = null;
  2588. if (segments.length > 0 && requestBasicInfo) {
  2589. const basicInfo = await this.getBasicInfoFromSegments_(segments);
  2590. type = basicInfo.type;
  2591. languageValue = basicInfo.language;
  2592. channelsCount = basicInfo.channelCount;
  2593. sampleRate = basicInfo.sampleRate;
  2594. if (!this.config_.disableText) {
  2595. closedCaptions = basicInfo.closedCaptions;
  2596. }
  2597. height = basicInfo.height;
  2598. width = basicInfo.width;
  2599. videoRange = basicInfo.videoRange;
  2600. colorGamut = basicInfo.colorGamut;
  2601. frameRate = basicInfo.frameRate;
  2602. if (allowOverrideMimeType) {
  2603. mimeType = basicInfo.mimeType;
  2604. codecs = basicInfo.codecs;
  2605. }
  2606. }
  2607. if (!mimeType) {
  2608. mimeType = await this.guessMimeType_(type, codecs, segments);
  2609. // Some manifests don't say what text codec they use, this is a problem
  2610. // if the cmft extension is used because we identify the mimeType as
  2611. // application/mp4. In this case if we don't detect initialization
  2612. // segments, we assume that the mimeType is text/vtt.
  2613. if (type == shaka.util.ManifestParserUtils.ContentType.TEXT &&
  2614. !codecs && mimeType == 'application/mp4' &&
  2615. segments[0] && !segments[0].initSegmentReference) {
  2616. mimeType = 'text/vtt';
  2617. }
  2618. }
  2619. const {drmInfos, keyIds, encrypted, aesEncrypted} =
  2620. await this.parseDrmInfo_(playlist, mimeType, getUris, variables);
  2621. if (encrypted && !drmInfos.length && !aesEncrypted) {
  2622. throw new shaka.util.Error(
  2623. shaka.util.Error.Severity.CRITICAL,
  2624. shaka.util.Error.Category.MANIFEST,
  2625. shaka.util.Error.Code.HLS_KEYFORMATS_NOT_SUPPORTED);
  2626. }
  2627. const stream = this.makeStreamObject_(streamId, codecs, type,
  2628. languageValue, primary, name, channelsCount, closedCaptions,
  2629. characteristics, forced, sampleRate, spatialAudio);
  2630. stream.encrypted = encrypted;
  2631. stream.drmInfos = drmInfos;
  2632. stream.keyIds = keyIds;
  2633. stream.mimeType = mimeType;
  2634. if (bandwidth) {
  2635. stream.bandwidth = bandwidth;
  2636. }
  2637. this.setFullTypeForStream_(stream);
  2638. if (type == shaka.util.ManifestParserUtils.ContentType.VIDEO &&
  2639. (width || height || videoRange || colorGamut)) {
  2640. this.addVideoAttributes_(stream, width, height,
  2641. frameRate, videoRange, /* videoLayout= */ null, colorGamut);
  2642. }
  2643. // This new calculation is necessary for Low Latency streams.
  2644. if (this.isLive_()) {
  2645. this.determineLastTargetDuration_(playlist);
  2646. }
  2647. const firstStartTime = segments[0].startTime;
  2648. const lastSegment = segments[segments.length - 1];
  2649. const lastEndTime = lastSegment.endTime;
  2650. /** @type {!shaka.media.SegmentIndex} */
  2651. const segmentIndex = new shaka.media.SegmentIndex(segments);
  2652. stream.segmentIndex = segmentIndex;
  2653. const serverControlTag = shaka.hls.Utils.getFirstTagWithName(
  2654. playlist.tags, 'EXT-X-SERVER-CONTROL');
  2655. const canSkipSegments = serverControlTag ?
  2656. serverControlTag.getAttribute('CAN-SKIP-UNTIL') != null : false;
  2657. const canBlockReload = serverControlTag ?
  2658. serverControlTag.getAttribute('CAN-BLOCK-RELOAD') != null : false;
  2659. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  2660. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  2661. const {nextMediaSequence, nextPart} =
  2662. this.getNextMediaSequenceAndPart_(mediaSequenceNumber, segments);
  2663. return {
  2664. stream,
  2665. type,
  2666. redirectUris: [],
  2667. getUris,
  2668. minTimestamp: firstStartTime,
  2669. maxTimestamp: lastEndTime,
  2670. canSkipSegments,
  2671. canBlockReload,
  2672. hasEndList: false,
  2673. firstSequenceNumber: -1,
  2674. nextMediaSequence,
  2675. nextPart,
  2676. mediaSequenceToStartTime,
  2677. loadedOnce: false,
  2678. };
  2679. }
  2680. /**
  2681. * Get the next msn and part
  2682. *
  2683. * @param {number} mediaSequenceNumber
  2684. * @param {!Array.<!shaka.media.SegmentReference>} segments
  2685. * @return {{nextMediaSequence: number, nextPart:number}}}
  2686. * @private
  2687. */
  2688. getNextMediaSequenceAndPart_(mediaSequenceNumber, segments) {
  2689. const currentMediaSequence = mediaSequenceNumber + segments.length - 1;
  2690. let nextMediaSequence = currentMediaSequence;
  2691. let nextPart = -1;
  2692. if (!segments.length) {
  2693. nextMediaSequence++;
  2694. return {
  2695. nextMediaSequence,
  2696. nextPart,
  2697. };
  2698. }
  2699. const lastSegment = segments[segments.length - 1];
  2700. const partialReferences = lastSegment.partialReferences;
  2701. if (!lastSegment.partialReferences.length) {
  2702. nextMediaSequence++;
  2703. if (lastSegment.hasByterangeOptimization()) {
  2704. nextPart = 0;
  2705. }
  2706. return {
  2707. nextMediaSequence,
  2708. nextPart,
  2709. };
  2710. }
  2711. nextPart = partialReferences.length - 1;
  2712. const lastPartialReference =
  2713. partialReferences[partialReferences.length - 1];
  2714. if (!lastPartialReference.isPreload()) {
  2715. nextMediaSequence++;
  2716. nextPart = 0;
  2717. }
  2718. return {
  2719. nextMediaSequence,
  2720. nextPart,
  2721. };
  2722. }
  2723. /**
  2724. * Creates a stream object with the given parameters.
  2725. * The parameters that are passed into here are only the things that can be
  2726. * known without downloading the media playlist; other values must be set
  2727. * manually on the object after creation.
  2728. * @param {number} id
  2729. * @param {string} codecs
  2730. * @param {string} type
  2731. * @param {?string} languageValue
  2732. * @param {boolean} primary
  2733. * @param {?string} name
  2734. * @param {?number} channelsCount
  2735. * @param {Map.<string, string>} closedCaptions
  2736. * @param {?string} characteristics
  2737. * @param {boolean} forced
  2738. * @param {?number} sampleRate
  2739. * @param {boolean} spatialAudio
  2740. * @return {!shaka.extern.Stream}
  2741. * @private
  2742. */
  2743. makeStreamObject_(id, codecs, type, languageValue, primary, name,
  2744. channelsCount, closedCaptions, characteristics, forced, sampleRate,
  2745. spatialAudio) {
  2746. // Fill out a "best-guess" mimeType, for now. It will be replaced once the
  2747. // stream is lazy-loaded.
  2748. const mimeType = this.guessMimeTypeBeforeLoading_(type, codecs) ||
  2749. this.guessMimeTypeFallback_(type);
  2750. const roles = [];
  2751. if (characteristics) {
  2752. for (const characteristic of characteristics.split(',')) {
  2753. roles.push(characteristic);
  2754. }
  2755. }
  2756. let kind = undefined;
  2757. let accessibilityPurpose = null;
  2758. if (type == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  2759. if (roles.includes('public.accessibility.transcribes-spoken-dialog') &&
  2760. roles.includes('public.accessibility.describes-music-and-sound')) {
  2761. kind = shaka.util.ManifestParserUtils.TextStreamKind.CLOSED_CAPTION;
  2762. } else {
  2763. kind = shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE;
  2764. }
  2765. } else {
  2766. if (roles.includes('public.accessibility.describes-video')) {
  2767. accessibilityPurpose =
  2768. shaka.media.ManifestParser.AccessibilityPurpose.VISUALLY_IMPAIRED;
  2769. }
  2770. }
  2771. // If there are no roles, and we have defaulted to the subtitle "kind" for
  2772. // this track, add the implied subtitle role.
  2773. if (!roles.length &&
  2774. kind === shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE) {
  2775. roles.push(shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE);
  2776. }
  2777. const stream = {
  2778. id: this.globalId_++,
  2779. originalId: name,
  2780. groupId: null,
  2781. createSegmentIndex: () => Promise.resolve(),
  2782. segmentIndex: null,
  2783. mimeType,
  2784. codecs,
  2785. kind: (type == shaka.util.ManifestParserUtils.ContentType.TEXT) ?
  2786. shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE : undefined,
  2787. encrypted: false,
  2788. drmInfos: [],
  2789. keyIds: new Set(),
  2790. language: this.getLanguage_(languageValue),
  2791. originalLanguage: languageValue,
  2792. label: name, // For historical reasons, since before "originalId".
  2793. type,
  2794. primary,
  2795. // TODO: trick mode
  2796. trickModeVideo: null,
  2797. emsgSchemeIdUris: null,
  2798. frameRate: undefined,
  2799. pixelAspectRatio: undefined,
  2800. width: undefined,
  2801. height: undefined,
  2802. bandwidth: undefined,
  2803. roles,
  2804. forced,
  2805. channelsCount,
  2806. audioSamplingRate: sampleRate,
  2807. spatialAudio,
  2808. closedCaptions,
  2809. hdr: undefined,
  2810. colorGamut: undefined,
  2811. videoLayout: undefined,
  2812. tilesLayout: undefined,
  2813. accessibilityPurpose: accessibilityPurpose,
  2814. external: false,
  2815. fastSwitching: false,
  2816. fullMimeTypes: new Set(),
  2817. };
  2818. this.setFullTypeForStream_(stream);
  2819. return stream;
  2820. }
  2821. /**
  2822. * @param {!shaka.hls.Playlist} playlist
  2823. * @param {string} mimeType
  2824. * @param {function():!Array.<string>} getUris
  2825. * @param {?Map.<string, string>=} variables
  2826. * @return {Promise.<{
  2827. * drmInfos: !Array.<shaka.extern.DrmInfo>,
  2828. * keyIds: !Set.<string>,
  2829. * encrypted: boolean,
  2830. * aesEncrypted: boolean
  2831. * }>}
  2832. * @private
  2833. */
  2834. async parseDrmInfo_(playlist, mimeType, getUris, variables) {
  2835. /** @type {!Map<!shaka.hls.Tag, ?shaka.media.InitSegmentReference>} */
  2836. const drmTagsMap = new Map();
  2837. if (playlist.segments) {
  2838. for (const segment of playlist.segments) {
  2839. const segmentKeyTags = shaka.hls.Utils.filterTagsByName(segment.tags,
  2840. 'EXT-X-KEY');
  2841. let initSegmentRef = null;
  2842. if (segmentKeyTags.length) {
  2843. initSegmentRef = this.getInitSegmentReference_(playlist,
  2844. segment.tags, getUris, variables);
  2845. for (const segmentKeyTag of segmentKeyTags) {
  2846. drmTagsMap.set(segmentKeyTag, initSegmentRef);
  2847. }
  2848. }
  2849. }
  2850. }
  2851. let encrypted = false;
  2852. let aesEncrypted = false;
  2853. /** @type {!Array.<shaka.extern.DrmInfo>}*/
  2854. const drmInfos = [];
  2855. const keyIds = new Set();
  2856. for (const [key, value] of drmTagsMap) {
  2857. const drmTag = /** @type {!shaka.hls.Tag} */ (key);
  2858. const initSegmentRef =
  2859. /** @type {?shaka.media.InitSegmentReference} */ (value);
  2860. const method = drmTag.getRequiredAttrValue('METHOD');
  2861. if (method != 'NONE') {
  2862. encrypted = true;
  2863. // According to the HLS spec, KEYFORMAT is optional and implicitly
  2864. // defaults to "identity".
  2865. // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4
  2866. const keyFormat =
  2867. drmTag.getAttributeValue('KEYFORMAT') || 'identity';
  2868. let drmInfo = null;
  2869. if (this.isAesMethod_(method)) {
  2870. // These keys are handled separately.
  2871. aesEncrypted = true;
  2872. continue;
  2873. } else if (keyFormat == 'identity') {
  2874. // eslint-disable-next-line no-await-in-loop
  2875. drmInfo = await this.identityDrmParser_(
  2876. drmTag, mimeType, getUris, initSegmentRef, variables);
  2877. } else {
  2878. const drmParser =
  2879. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_[keyFormat];
  2880. drmInfo = drmParser ? drmParser(drmTag, mimeType) : null;
  2881. }
  2882. if (drmInfo) {
  2883. if (drmInfo.keyIds) {
  2884. for (const keyId of drmInfo.keyIds) {
  2885. keyIds.add(keyId);
  2886. }
  2887. }
  2888. drmInfos.push(drmInfo);
  2889. } else {
  2890. shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat);
  2891. }
  2892. }
  2893. }
  2894. return {drmInfos, keyIds, encrypted, aesEncrypted};
  2895. }
  2896. /**
  2897. * @param {!shaka.hls.Tag} drmTag
  2898. * @param {!shaka.hls.Playlist} playlist
  2899. * @param {function():!Array.<string>} getUris
  2900. * @param {?Map.<string, string>=} variables
  2901. * @return {!shaka.extern.aesKey}
  2902. * @private
  2903. */
  2904. parseAESDrmTag_(drmTag, playlist, getUris, variables) {
  2905. // Check if the Web Crypto API is available.
  2906. if (!window.crypto || !window.crypto.subtle) {
  2907. shaka.log.alwaysWarn('Web Crypto API is not available to decrypt ' +
  2908. 'AES. (Web Crypto only exists in secure origins like https)');
  2909. throw new shaka.util.Error(
  2910. shaka.util.Error.Severity.CRITICAL,
  2911. shaka.util.Error.Category.MANIFEST,
  2912. shaka.util.Error.Code.NO_WEB_CRYPTO_API);
  2913. }
  2914. // HLS RFC 8216 Section 5.2:
  2915. // An EXT-X-KEY tag with a KEYFORMAT of "identity" that does not have an IV
  2916. // attribute indicates that the Media Sequence Number is to be used as the
  2917. // IV when decrypting a Media Segment, by putting its big-endian binary
  2918. // representation into a 16-octet (128-bit) buffer and padding (on the left)
  2919. // with zeros.
  2920. let firstMediaSequenceNumber = 0;
  2921. let iv;
  2922. const ivHex = drmTag.getAttributeValue('IV', '');
  2923. if (!ivHex) {
  2924. // Media Sequence Number will be used as IV.
  2925. firstMediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  2926. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  2927. } else {
  2928. // Exclude 0x at the start of string.
  2929. iv = shaka.util.Uint8ArrayUtils.fromHex(ivHex.substr(2));
  2930. if (iv.byteLength != 16) {
  2931. throw new shaka.util.Error(
  2932. shaka.util.Error.Severity.CRITICAL,
  2933. shaka.util.Error.Category.MANIFEST,
  2934. shaka.util.Error.Code.AES_128_INVALID_IV_LENGTH);
  2935. }
  2936. }
  2937. const aesKeyInfoKey = `${drmTag.toString()}-${firstMediaSequenceNumber}`;
  2938. if (!this.aesKeyInfoMap_.has(aesKeyInfoKey)) {
  2939. // Default AES-128
  2940. const keyInfo = {
  2941. bitsKey: 128,
  2942. blockCipherMode: 'CBC',
  2943. iv,
  2944. firstMediaSequenceNumber,
  2945. };
  2946. const method = drmTag.getRequiredAttrValue('METHOD');
  2947. switch (method) {
  2948. case 'AES-256':
  2949. keyInfo.bitsKey = 256;
  2950. break;
  2951. case 'AES-256-CTR':
  2952. keyInfo.bitsKey = 256;
  2953. keyInfo.blockCipherMode = 'CTR';
  2954. break;
  2955. }
  2956. // Don't download the key object until the segment is parsed, to avoid a
  2957. // startup delay for long manifests with lots of keys.
  2958. keyInfo.fetchKey = async () => {
  2959. const keyUris = shaka.hls.Utils.constructSegmentUris(
  2960. getUris(), drmTag.getRequiredAttrValue('URI'), variables);
  2961. const keyMapKey = keyUris.sort().join('');
  2962. if (!this.aesKeyMap_.has(keyMapKey)) {
  2963. const requestType = shaka.net.NetworkingEngine.RequestType.KEY;
  2964. const request = shaka.net.NetworkingEngine.makeRequest(
  2965. keyUris, this.config_.retryParameters);
  2966. const keyResponse = this.makeNetworkRequest_(request, requestType)
  2967. .promise;
  2968. this.aesKeyMap_.set(keyMapKey, keyResponse);
  2969. }
  2970. const keyResponse = await this.aesKeyMap_.get(keyMapKey);
  2971. // keyResponse.status is undefined when URI is "data:text/plain;base64,"
  2972. if (!keyResponse.data ||
  2973. keyResponse.data.byteLength != (keyInfo.bitsKey / 8)) {
  2974. throw new shaka.util.Error(
  2975. shaka.util.Error.Severity.CRITICAL,
  2976. shaka.util.Error.Category.MANIFEST,
  2977. shaka.util.Error.Code.AES_128_INVALID_KEY_LENGTH);
  2978. }
  2979. const algorithm = {
  2980. name: keyInfo.blockCipherMode == 'CTR' ? 'AES-CTR' : 'AES-CBC',
  2981. length: keyInfo.bitsKey,
  2982. };
  2983. keyInfo.cryptoKey = await window.crypto.subtle.importKey(
  2984. 'raw', keyResponse.data, algorithm, true, ['decrypt']);
  2985. keyInfo.fetchKey = undefined; // No longer needed.
  2986. };
  2987. this.aesKeyInfoMap_.set(aesKeyInfoKey, keyInfo);
  2988. }
  2989. return this.aesKeyInfoMap_.get(aesKeyInfoKey);
  2990. }
  2991. /**
  2992. * @param {!shaka.hls.Playlist} playlist
  2993. * @private
  2994. */
  2995. determineStartTime_(playlist) {
  2996. // If we already have a starttime we avoid processing this again.
  2997. if (this.startTime_ != null) {
  2998. return;
  2999. }
  3000. const startTimeTag =
  3001. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-START');
  3002. if (startTimeTag) {
  3003. this.startTime_ =
  3004. Number(startTimeTag.getRequiredAttrValue('TIME-OFFSET'));
  3005. }
  3006. }
  3007. /**
  3008. * @param {!shaka.hls.Playlist} playlist
  3009. * @private
  3010. */
  3011. determinePresentationType_(playlist) {
  3012. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  3013. const presentationTypeTag =
  3014. shaka.hls.Utils.getFirstTagWithName(playlist.tags,
  3015. 'EXT-X-PLAYLIST-TYPE');
  3016. const endListTag =
  3017. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST');
  3018. const isVod = (presentationTypeTag && presentationTypeTag.value == 'VOD') ||
  3019. endListTag;
  3020. const isEvent = presentationTypeTag &&
  3021. presentationTypeTag.value == 'EVENT' && !isVod;
  3022. const isLive = !isVod && !isEvent;
  3023. if (isVod) {
  3024. this.setPresentationType_(PresentationType.VOD);
  3025. } else {
  3026. // If it's not VOD, it must be presentation type LIVE or an ongoing EVENT.
  3027. if (isLive) {
  3028. this.setPresentationType_(PresentationType.LIVE);
  3029. } else {
  3030. this.setPresentationType_(PresentationType.EVENT);
  3031. }
  3032. }
  3033. }
  3034. /**
  3035. * @param {!shaka.hls.Playlist} playlist
  3036. * @private
  3037. */
  3038. determineLastTargetDuration_(playlist) {
  3039. let lastTargetDuration = Infinity;
  3040. const segments = playlist.segments;
  3041. if (segments.length) {
  3042. let segmentIndex = segments.length - 1;
  3043. while (segmentIndex >= 0) {
  3044. const segment = segments[segmentIndex];
  3045. const extinfTag =
  3046. shaka.hls.Utils.getFirstTagWithName(segment.tags, 'EXTINF');
  3047. if (extinfTag) {
  3048. // The EXTINF tag format is '#EXTINF:<duration>,[<title>]'.
  3049. // We're interested in the duration part.
  3050. const extinfValues = extinfTag.value.split(',');
  3051. lastTargetDuration = Number(extinfValues[0]);
  3052. break;
  3053. }
  3054. segmentIndex--;
  3055. }
  3056. }
  3057. const targetDurationTag = this.getRequiredTag_(playlist.tags,
  3058. 'EXT-X-TARGETDURATION');
  3059. const targetDuration = Number(targetDurationTag.value);
  3060. const partialTargetDurationTag =
  3061. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-PART-INF');
  3062. if (partialTargetDurationTag) {
  3063. this.partialTargetDuration_ = Number(
  3064. partialTargetDurationTag.getRequiredAttrValue('PART-TARGET'));
  3065. }
  3066. // Get the server-recommended min distance from the live edge.
  3067. const serverControlTag = shaka.hls.Utils.getFirstTagWithName(
  3068. playlist.tags, 'EXT-X-SERVER-CONTROL');
  3069. // According to the HLS spec, updates should not happen more often than
  3070. // once in targetDuration. It also requires us to only update the active
  3071. // variant. We might implement that later, but for now every variant
  3072. // will be updated. To get the update period, choose the smallest
  3073. // targetDuration value across all playlists.
  3074. // 1. Update the shortest one to use as update period and segment
  3075. // availability time (for LIVE).
  3076. if (this.lowLatencyMode_ && this.partialTargetDuration_) {
  3077. // For low latency streaming, use the partial segment target duration.
  3078. if (this.lowLatencyByterangeOptimization_) {
  3079. // We always have at least 1 partial segment part, and most servers
  3080. // allow you to make a request with _HLS_msn=X&_HLS_part=0 with a
  3081. // distance of 4 partial segments. With this we ensure that we
  3082. // obtain the minimum latency in this type of case.
  3083. if (this.partialTargetDuration_ * 5 <= lastTargetDuration) {
  3084. this.lastTargetDuration_ = Math.min(
  3085. this.partialTargetDuration_, this.lastTargetDuration_);
  3086. } else {
  3087. this.lastTargetDuration_ = Math.min(
  3088. lastTargetDuration, this.lastTargetDuration_);
  3089. }
  3090. } else {
  3091. this.lastTargetDuration_ = Math.min(
  3092. this.partialTargetDuration_, this.lastTargetDuration_);
  3093. }
  3094. // Use 'PART-HOLD-BACK' as the presentation delay for low latency mode.
  3095. this.lowLatencyPresentationDelay_ = serverControlTag ? Number(
  3096. serverControlTag.getRequiredAttrValue('PART-HOLD-BACK')) : 0;
  3097. } else {
  3098. this.lastTargetDuration_ = Math.min(
  3099. lastTargetDuration, this.lastTargetDuration_);
  3100. // Use 'HOLD-BACK' as the presentation delay for default if defined.
  3101. const holdBack = serverControlTag ?
  3102. serverControlTag.getAttribute('HOLD-BACK') : null;
  3103. this.presentationDelay_ = holdBack ? Number(holdBack.value) : 0;
  3104. }
  3105. // 2. Update the longest target duration if need be to use as a
  3106. // presentation delay later.
  3107. this.maxTargetDuration_ = Math.max(
  3108. targetDuration, this.maxTargetDuration_);
  3109. }
  3110. /**
  3111. * @param {!shaka.hls.Playlist} playlist
  3112. * @private
  3113. */
  3114. changePresentationTimelineToLive_(playlist) {
  3115. // The live edge will be calculated from segments, so we don't need to
  3116. // set a presentation start time. We will assert later that this is
  3117. // working as expected.
  3118. // The HLS spec (RFC 8216) states in 6.3.3:
  3119. //
  3120. // "The client SHALL choose which Media Segment to play first ... the
  3121. // client SHOULD NOT choose a segment that starts less than three target
  3122. // durations from the end of the Playlist file. Doing so can trigger
  3123. // playback stalls."
  3124. //
  3125. // We accomplish this in our DASH-y model by setting a presentation
  3126. // delay of configured value, or 3 segments duration if not configured.
  3127. // This will be the "live edge" of the presentation.
  3128. let presentationDelay = 0;
  3129. if (this.config_.defaultPresentationDelay) {
  3130. presentationDelay = this.config_.defaultPresentationDelay;
  3131. } else if (this.lowLatencyPresentationDelay_) {
  3132. presentationDelay = this.lowLatencyPresentationDelay_;
  3133. } else if (this.presentationDelay_) {
  3134. presentationDelay = this.presentationDelay_;
  3135. } else {
  3136. const totalSegments = playlist.segments.length;
  3137. let delaySegments = this.config_.hls.liveSegmentsDelay;
  3138. if (delaySegments > (totalSegments - 2)) {
  3139. delaySegments = Math.max(1, totalSegments - 2);
  3140. }
  3141. for (let i = totalSegments - delaySegments; i < totalSegments; i++) {
  3142. const extinfTag = shaka.hls.Utils.getFirstTagWithName(
  3143. playlist.segments[i].tags, 'EXTINF');
  3144. if (extinfTag) {
  3145. const extinfValues = extinfTag.value.split(',');
  3146. const duration = Number(extinfValues[0]);
  3147. presentationDelay += Math.ceil(duration);
  3148. } else {
  3149. presentationDelay += this.maxTargetDuration_;
  3150. }
  3151. }
  3152. }
  3153. if (this.startTime_ && this.startTime_ < 0) {
  3154. presentationDelay = Math.min(-this.startTime_, presentationDelay);
  3155. this.startTime_ += presentationDelay;
  3156. }
  3157. this.presentationTimeline_.setPresentationStartTime(0);
  3158. this.presentationTimeline_.setDelay(presentationDelay);
  3159. this.presentationTimeline_.setStatic(false);
  3160. }
  3161. /**
  3162. * Get the InitSegmentReference for a segment if it has a EXT-X-MAP tag.
  3163. * @param {!shaka.hls.Playlist} playlist
  3164. * @param {!Array.<!shaka.hls.Tag>} tags Segment tags
  3165. * @param {function():!Array.<string>} getUris
  3166. * @param {?Map.<string, string>=} variables
  3167. * @return {shaka.media.InitSegmentReference}
  3168. * @private
  3169. */
  3170. getInitSegmentReference_(playlist, tags, getUris, variables) {
  3171. /** @type {?shaka.hls.Tag} */
  3172. const mapTag = shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-MAP');
  3173. if (!mapTag) {
  3174. return null;
  3175. }
  3176. // Map tag example: #EXT-X-MAP:URI="main.mp4",BYTERANGE="720@0"
  3177. const verbatimInitSegmentUri = mapTag.getRequiredAttrValue('URI');
  3178. const absoluteInitSegmentUris = shaka.hls.Utils.constructSegmentUris(
  3179. getUris(), verbatimInitSegmentUri, variables);
  3180. const mapTagKey = [
  3181. absoluteInitSegmentUris.toString(),
  3182. mapTag.getAttributeValue('BYTERANGE', ''),
  3183. ].join('-');
  3184. if (!this.mapTagToInitSegmentRefMap_.has(mapTagKey)) {
  3185. /** @type {shaka.extern.aesKey|undefined} */
  3186. let aesKey = undefined;
  3187. let byteRangeTag = null;
  3188. for (const tag of tags) {
  3189. if (tag.name == 'EXT-X-KEY') {
  3190. if (this.isAesMethod_(tag.getRequiredAttrValue('METHOD')) &&
  3191. tag.id < mapTag.id) {
  3192. aesKey =
  3193. this.parseAESDrmTag_(tag, playlist, getUris, variables);
  3194. }
  3195. } else if (tag.name == 'EXT-X-BYTERANGE' && tag.id < mapTag.id) {
  3196. byteRangeTag = tag;
  3197. }
  3198. }
  3199. const initSegmentRef = this.createInitSegmentReference_(
  3200. absoluteInitSegmentUris, mapTag, byteRangeTag, aesKey);
  3201. this.mapTagToInitSegmentRefMap_.set(mapTagKey, initSegmentRef);
  3202. }
  3203. return this.mapTagToInitSegmentRefMap_.get(mapTagKey);
  3204. }
  3205. /**
  3206. * Create an InitSegmentReference object for the EXT-X-MAP tag in the media
  3207. * playlist.
  3208. * @param {!Array.<string>} absoluteInitSegmentUris
  3209. * @param {!shaka.hls.Tag} mapTag EXT-X-MAP
  3210. * @param {shaka.hls.Tag=} byteRangeTag EXT-X-BYTERANGE
  3211. * @param {shaka.extern.aesKey=} aesKey
  3212. * @return {!shaka.media.InitSegmentReference}
  3213. * @private
  3214. */
  3215. createInitSegmentReference_(absoluteInitSegmentUris, mapTag, byteRangeTag,
  3216. aesKey) {
  3217. let startByte = 0;
  3218. let endByte = null;
  3219. let byterange = mapTag.getAttributeValue('BYTERANGE');
  3220. if (!byterange && byteRangeTag) {
  3221. byterange = byteRangeTag.value;
  3222. }
  3223. // If a BYTERANGE attribute is not specified, the segment consists
  3224. // of the entire resource.
  3225. if (byterange) {
  3226. const blocks = byterange.split('@');
  3227. const byteLength = Number(blocks[0]);
  3228. startByte = Number(blocks[1]);
  3229. endByte = startByte + byteLength - 1;
  3230. if (aesKey) {
  3231. // MAP segment encrypted with method AES, when served with
  3232. // HTTP Range, has the unencrypted size specified in the range.
  3233. // See: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
  3234. const length = (endByte + 1) - startByte;
  3235. if (length % 16) {
  3236. endByte += (16 - (length % 16));
  3237. }
  3238. }
  3239. }
  3240. const initSegmentRef = new shaka.media.InitSegmentReference(
  3241. () => absoluteInitSegmentUris,
  3242. startByte,
  3243. endByte,
  3244. /* mediaQuality= */ null,
  3245. /* timescale= */ null,
  3246. /* segmentData= */ null,
  3247. aesKey);
  3248. return initSegmentRef;
  3249. }
  3250. /**
  3251. * Parses one shaka.hls.Segment object into a shaka.media.SegmentReference.
  3252. *
  3253. * @param {shaka.media.InitSegmentReference} initSegmentReference
  3254. * @param {shaka.media.SegmentReference} previousReference
  3255. * @param {!shaka.hls.Segment} hlsSegment
  3256. * @param {number} startTime
  3257. * @param {!Map.<string, string>} variables
  3258. * @param {!shaka.hls.Playlist} playlist
  3259. * @param {string} type
  3260. * @param {function():!Array.<string>} getUris
  3261. * @param {shaka.extern.aesKey=} aesKey
  3262. * @return {shaka.media.SegmentReference}
  3263. * @private
  3264. */
  3265. createSegmentReference_(
  3266. initSegmentReference, previousReference, hlsSegment, startTime,
  3267. variables, playlist, type, getUris, aesKey) {
  3268. const HlsParser = shaka.hls.HlsParser;
  3269. const getMimeType = (uri) => {
  3270. const parsedUri = new goog.Uri(uri);
  3271. const extension = parsedUri.getPath().split('.').pop();
  3272. const map = HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_[type];
  3273. let mimeType = map[extension];
  3274. if (!mimeType) {
  3275. mimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension];
  3276. }
  3277. return mimeType;
  3278. };
  3279. const tags = hlsSegment.tags;
  3280. const extinfTag =
  3281. shaka.hls.Utils.getFirstTagWithName(tags, 'EXTINF');
  3282. let endTime = 0;
  3283. let startByte = 0;
  3284. let endByte = null;
  3285. if (hlsSegment.partialSegments.length) {
  3286. this.manifest_.isLowLatency = true;
  3287. }
  3288. let syncTime = null;
  3289. if (!this.config_.hls.ignoreManifestProgramDateTime) {
  3290. const dateTimeTag =
  3291. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-PROGRAM-DATE-TIME');
  3292. if (dateTimeTag && dateTimeTag.value) {
  3293. syncTime = shaka.util.TXml.parseDate(dateTimeTag.value);
  3294. goog.asserts.assert(syncTime != null,
  3295. 'EXT-X-PROGRAM-DATE-TIME format not valid');
  3296. this.usesProgramDateTime_ = true;
  3297. }
  3298. }
  3299. let status = shaka.media.SegmentReference.Status.AVAILABLE;
  3300. if (shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-GAP')) {
  3301. this.manifest_.gapCount++;
  3302. status = shaka.media.SegmentReference.Status.MISSING;
  3303. }
  3304. if (!extinfTag) {
  3305. if (hlsSegment.partialSegments.length == 0) {
  3306. // EXTINF tag must be available if the segment has no partial segments.
  3307. throw new shaka.util.Error(
  3308. shaka.util.Error.Severity.CRITICAL,
  3309. shaka.util.Error.Category.MANIFEST,
  3310. shaka.util.Error.Code.HLS_REQUIRED_TAG_MISSING, 'EXTINF');
  3311. } else if (!this.lowLatencyMode_) {
  3312. // Without EXTINF and without low-latency mode, partial segments get
  3313. // ignored.
  3314. return null;
  3315. }
  3316. }
  3317. // Create SegmentReferences for the partial segments.
  3318. let partialSegmentRefs = [];
  3319. // Optimization for LL-HLS with byterange
  3320. // More info in https://tinyurl.com/hls-open-byte-range
  3321. let segmentWithByteRangeOptimization = false;
  3322. let getUrisOptimization = null;
  3323. let somePartialSegmentWithGap = false;
  3324. let isPreloadSegment = false;
  3325. if (this.lowLatencyMode_ && hlsSegment.partialSegments.length) {
  3326. const byterangeOptimizationSupport =
  3327. initSegmentReference && window.ReadableStream &&
  3328. this.config_.hls.allowLowLatencyByteRangeOptimization;
  3329. let partialSyncTime = syncTime;
  3330. for (let i = 0; i < hlsSegment.partialSegments.length; i++) {
  3331. const item = hlsSegment.partialSegments[i];
  3332. const pPreviousReference = i == 0 ?
  3333. previousReference : partialSegmentRefs[partialSegmentRefs.length - 1];
  3334. const pStartTime = (i == 0) ? startTime : pPreviousReference.endTime;
  3335. // If DURATION is missing from this partial segment, use the target
  3336. // partial duration from the top of the playlist, which is a required
  3337. // attribute for content with partial segments.
  3338. const pDuration = Number(item.getAttributeValue('DURATION')) ||
  3339. this.partialTargetDuration_;
  3340. // If for some reason we have neither an explicit duration, nor a target
  3341. // partial duration, we should SKIP this partial segment to avoid
  3342. // duplicating content in the presentation timeline.
  3343. if (!pDuration) {
  3344. continue;
  3345. }
  3346. const pEndTime = pStartTime + pDuration;
  3347. let pStartByte = 0;
  3348. let pEndByte = null;
  3349. if (item.name == 'EXT-X-PRELOAD-HINT') {
  3350. // A preload hinted partial segment may have byterange start info.
  3351. const pByterangeStart = item.getAttributeValue('BYTERANGE-START');
  3352. pStartByte = pByterangeStart ? Number(pByterangeStart) : 0;
  3353. // A preload hinted partial segment may have byterange length info.
  3354. const pByterangeLength = item.getAttributeValue('BYTERANGE-LENGTH');
  3355. if (pByterangeLength) {
  3356. pEndByte = pStartByte + Number(pByterangeLength) - 1;
  3357. } else if (pStartByte) {
  3358. // If we have a non-zero start byte, but no end byte, follow the
  3359. // recommendation of https://tinyurl.com/hls-open-byte-range and
  3360. // set the end byte explicitly to a large integer.
  3361. pEndByte = Number.MAX_SAFE_INTEGER;
  3362. }
  3363. } else {
  3364. const pByterange = item.getAttributeValue('BYTERANGE');
  3365. [pStartByte, pEndByte] =
  3366. this.parseByteRange_(pPreviousReference, pByterange);
  3367. }
  3368. const pUri = item.getAttributeValue('URI');
  3369. if (!pUri) {
  3370. continue;
  3371. }
  3372. let partialStatus = shaka.media.SegmentReference.Status.AVAILABLE;
  3373. if (item.getAttributeValue('GAP') == 'YES') {
  3374. this.manifest_.gapCount++;
  3375. partialStatus = shaka.media.SegmentReference.Status.MISSING;
  3376. somePartialSegmentWithGap = true;
  3377. }
  3378. let uris = null;
  3379. const getPartialUris = () => {
  3380. if (uris == null) {
  3381. goog.asserts.assert(pUri, 'Partial uri should be defined!');
  3382. uris = shaka.hls.Utils.constructSegmentUris(
  3383. getUris(), pUri, variables);
  3384. }
  3385. return uris;
  3386. };
  3387. if (byterangeOptimizationSupport &&
  3388. pStartByte >= 0 && pEndByte != null) {
  3389. getUrisOptimization = getPartialUris;
  3390. segmentWithByteRangeOptimization = true;
  3391. }
  3392. const partial = new shaka.media.SegmentReference(
  3393. pStartTime,
  3394. pEndTime,
  3395. getPartialUris,
  3396. pStartByte,
  3397. pEndByte,
  3398. initSegmentReference,
  3399. /* timestampOffset= */ 0,
  3400. /* appendWindowStart= */ 0,
  3401. /* appendWindowEnd= */ Infinity,
  3402. /* partialReferences= */ [],
  3403. /* tilesLayout= */ '',
  3404. /* tileDuration= */ null,
  3405. partialSyncTime,
  3406. partialStatus,
  3407. aesKey);
  3408. if (item.name == 'EXT-X-PRELOAD-HINT') {
  3409. partial.markAsPreload();
  3410. isPreloadSegment = true;
  3411. }
  3412. // The spec doesn't say that we can assume INDEPENDENT=YES for the
  3413. // first partial segment. It does call the flag "optional", though, and
  3414. // that cases where there are no such flags on any partial segments, it
  3415. // is sensible to assume the first one is independent.
  3416. if (item.getAttributeValue('INDEPENDENT') != 'YES' && i > 0) {
  3417. partial.markAsNonIndependent();
  3418. }
  3419. const pMimeType = getMimeType(pUri);
  3420. if (pMimeType) {
  3421. partial.mimeType = pMimeType;
  3422. if (HlsParser.MIME_TYPES_WITHOUT_INIT_SEGMENT_.has(pMimeType)) {
  3423. partial.initSegmentReference = null;
  3424. }
  3425. }
  3426. partialSegmentRefs.push(partial);
  3427. if (partialSyncTime) {
  3428. partialSyncTime += pDuration;
  3429. }
  3430. } // for-loop of hlsSegment.partialSegments
  3431. }
  3432. // If the segment has EXTINF tag, set the segment's end time, start byte
  3433. // and end byte based on the duration and byterange information.
  3434. // Otherwise, calculate the end time, start / end byte based on its partial
  3435. // segments.
  3436. // Note that the sum of partial segments durations may be slightly different
  3437. // from the parent segment's duration. In this case, use the duration from
  3438. // the parent segment tag.
  3439. if (extinfTag) {
  3440. // The EXTINF tag format is '#EXTINF:<duration>,[<title>]'.
  3441. // We're interested in the duration part.
  3442. const extinfValues = extinfTag.value.split(',');
  3443. const duration = Number(extinfValues[0]);
  3444. // Skip segments without duration
  3445. if (duration == 0) {
  3446. return null;
  3447. }
  3448. endTime = startTime + duration;
  3449. } else if (partialSegmentRefs.length) {
  3450. endTime = partialSegmentRefs[partialSegmentRefs.length - 1].endTime;
  3451. } else {
  3452. // Skip segments without duration and without partialsegments
  3453. return null;
  3454. }
  3455. if (segmentWithByteRangeOptimization) {
  3456. // We cannot optimize segments with gaps, or with a start byte that is
  3457. // not 0.
  3458. if (somePartialSegmentWithGap || partialSegmentRefs[0].startByte != 0) {
  3459. segmentWithByteRangeOptimization = false;
  3460. getUrisOptimization = null;
  3461. } else {
  3462. partialSegmentRefs = [];
  3463. }
  3464. }
  3465. // If the segment has EXT-X-BYTERANGE tag, set the start byte and end byte
  3466. // base on the byterange information. If segment has no EXT-X-BYTERANGE tag
  3467. // and has partial segments, set the start byte and end byte base on the
  3468. // partial segments.
  3469. const byterangeTag =
  3470. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-BYTERANGE');
  3471. if (byterangeTag) {
  3472. [startByte, endByte] =
  3473. this.parseByteRange_(previousReference, byterangeTag.value);
  3474. } else if (partialSegmentRefs.length) {
  3475. startByte = partialSegmentRefs[0].startByte;
  3476. endByte = partialSegmentRefs[partialSegmentRefs.length - 1].endByte;
  3477. }
  3478. let tilesLayout = '';
  3479. let tileDuration = null;
  3480. if (type == shaka.util.ManifestParserUtils.ContentType.IMAGE) {
  3481. // By default in HLS the tilesLayout is 1x1
  3482. tilesLayout = '1x1';
  3483. const tilesTag =
  3484. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-TILES');
  3485. if (tilesTag) {
  3486. tilesLayout = tilesTag.getRequiredAttrValue('LAYOUT');
  3487. const duration = tilesTag.getAttributeValue('DURATION');
  3488. if (duration) {
  3489. tileDuration = Number(duration);
  3490. }
  3491. }
  3492. }
  3493. let uris = null;
  3494. const getSegmentUris = () => {
  3495. if (getUrisOptimization) {
  3496. return getUrisOptimization();
  3497. }
  3498. if (uris == null) {
  3499. uris = shaka.hls.Utils.constructSegmentUris(getUris(),
  3500. hlsSegment.verbatimSegmentUri, variables);
  3501. }
  3502. return uris || [];
  3503. };
  3504. const allPartialSegments = partialSegmentRefs.length > 0 &&
  3505. !!hlsSegment.verbatimSegmentUri;
  3506. const reference = new shaka.media.SegmentReference(
  3507. startTime,
  3508. endTime,
  3509. getSegmentUris,
  3510. startByte,
  3511. endByte,
  3512. initSegmentReference,
  3513. /* timestampOffset= */ 0,
  3514. /* appendWindowStart= */ 0,
  3515. /* appendWindowEnd= */ Infinity,
  3516. partialSegmentRefs,
  3517. tilesLayout,
  3518. tileDuration,
  3519. syncTime,
  3520. status,
  3521. aesKey,
  3522. allPartialSegments,
  3523. );
  3524. const mimeType = getMimeType(hlsSegment.verbatimSegmentUri);
  3525. if (mimeType) {
  3526. reference.mimeType = mimeType;
  3527. if (HlsParser.MIME_TYPES_WITHOUT_INIT_SEGMENT_.has(mimeType)) {
  3528. reference.initSegmentReference = null;
  3529. }
  3530. }
  3531. if (segmentWithByteRangeOptimization) {
  3532. this.lowLatencyByterangeOptimization_ = true;
  3533. reference.markAsByterangeOptimization();
  3534. if (isPreloadSegment) {
  3535. reference.markAsPreload();
  3536. }
  3537. }
  3538. return reference;
  3539. }
  3540. /**
  3541. * Parse the startByte and endByte.
  3542. * @param {shaka.media.SegmentReference} previousReference
  3543. * @param {?string} byterange
  3544. * @return {!Array.<number>} An array with the start byte and end byte.
  3545. * @private
  3546. */
  3547. parseByteRange_(previousReference, byterange) {
  3548. let startByte = 0;
  3549. let endByte = null;
  3550. // If BYTERANGE is not specified, the segment consists of the entire
  3551. // resource.
  3552. if (byterange) {
  3553. const blocks = byterange.split('@');
  3554. const byteLength = Number(blocks[0]);
  3555. if (blocks[1]) {
  3556. startByte = Number(blocks[1]);
  3557. } else {
  3558. goog.asserts.assert(previousReference,
  3559. 'Cannot refer back to previous HLS segment!');
  3560. startByte = previousReference.endByte + 1;
  3561. }
  3562. endByte = startByte + byteLength - 1;
  3563. }
  3564. return [startByte, endByte];
  3565. }
  3566. /**
  3567. * @param {!Array.<!shaka.hls.Tag>} tags
  3568. * @param {string} contentType
  3569. * @param {!Map.<string, string>} variables
  3570. * @param {function():!Array.<string>} getUris
  3571. * @private
  3572. */
  3573. processDateRangeTags_(tags, contentType, variables, getUris) {
  3574. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  3575. if (contentType != ContentType.VIDEO && contentType != ContentType.AUDIO) {
  3576. // DATE-RANGE should only appear in AUDIO or VIDEO playlists.
  3577. // We ignore those that appear in other playlists.
  3578. return;
  3579. }
  3580. const Utils = shaka.hls.Utils;
  3581. const initialProgramDateTime =
  3582. this.presentationTimeline_.getInitialProgramDateTime();
  3583. if (!initialProgramDateTime ||
  3584. this.ignoreManifestProgramDateTimeFor_(contentType)) {
  3585. return;
  3586. }
  3587. let dateRangeTags =
  3588. shaka.hls.Utils.filterTagsByName(tags, 'EXT-X-DATERANGE');
  3589. dateRangeTags = dateRangeTags.filter((tag) => {
  3590. return tag.getAttribute('START-DATE') != null;
  3591. }).sort((a, b) => {
  3592. const aStartDateValue = a.getRequiredAttrValue('START-DATE');
  3593. const bStartDateValue = b.getRequiredAttrValue('START-DATE');
  3594. if (aStartDateValue < bStartDateValue) {
  3595. return -1;
  3596. }
  3597. if (aStartDateValue > bStartDateValue) {
  3598. return 1;
  3599. }
  3600. return 0;
  3601. });
  3602. for (let i = 0; i < dateRangeTags.length; i++) {
  3603. const tag = dateRangeTags[i];
  3604. try {
  3605. const id = tag.getRequiredAttrValue('ID');
  3606. if (this.dateRangeIdsEmitted_.has(id)) {
  3607. continue;
  3608. }
  3609. const startDateValue = tag.getRequiredAttrValue('START-DATE');
  3610. const startDate = shaka.util.TXml.parseDate(startDateValue);
  3611. if (isNaN(startDate)) {
  3612. // Invalid START-DATE
  3613. continue;
  3614. }
  3615. goog.asserts.assert(startDate != null,
  3616. 'Start date should not be null!');
  3617. const startTime = Math.max(0, startDate - initialProgramDateTime);
  3618. let endTime = null;
  3619. const endDateValue = tag.getAttributeValue('END-DATE');
  3620. if (endDateValue) {
  3621. const endDate = shaka.util.TXml.parseDate(endDateValue);
  3622. if (!isNaN(endDate)) {
  3623. goog.asserts.assert(endDate != null,
  3624. 'End date should not be null!');
  3625. endTime = endDate - initialProgramDateTime;
  3626. if (endTime < 0) {
  3627. // Date range in the past
  3628. continue;
  3629. }
  3630. }
  3631. }
  3632. if (endTime == null) {
  3633. const durationValue = tag.getAttributeValue('DURATION') ||
  3634. tag.getAttributeValue('PLANNED-DURATION');
  3635. if (durationValue) {
  3636. const duration = parseFloat(durationValue);
  3637. if (!isNaN(duration)) {
  3638. endTime = startTime + duration;
  3639. }
  3640. const realEndTime = startDate - initialProgramDateTime + duration;
  3641. if (realEndTime < 0) {
  3642. // Date range in the past
  3643. continue;
  3644. }
  3645. }
  3646. }
  3647. const type =
  3648. tag.getAttributeValue('CLASS') || 'com.apple.quicktime.HLS';
  3649. const endOnNext = tag.getAttributeValue('END-ON-NEXT') == 'YES';
  3650. if (endTime == null && endOnNext) {
  3651. for (let j = i + 1; j < dateRangeTags.length; j++) {
  3652. const otherDateRangeType =
  3653. dateRangeTags[j].getAttributeValue('CLASS') ||
  3654. 'com.apple.quicktime.HLS';
  3655. if (type != otherDateRangeType) {
  3656. continue;
  3657. }
  3658. const otherDateRangeStartDateValue =
  3659. dateRangeTags[j].getRequiredAttrValue('START-DATE');
  3660. const otherDateRangeStartDate =
  3661. shaka.util.TXml.parseDate(otherDateRangeStartDateValue);
  3662. if (isNaN(otherDateRangeStartDate)) {
  3663. // Invalid START-DATE
  3664. continue;
  3665. }
  3666. if (otherDateRangeStartDate &&
  3667. otherDateRangeStartDate > startDate) {
  3668. endTime = Math.max(0,
  3669. otherDateRangeStartDate - initialProgramDateTime);
  3670. break;
  3671. }
  3672. }
  3673. if (endTime == null) {
  3674. // Since we cannot know when it ends, we omit it for now and in the
  3675. // future with an update we will be able to have more information.
  3676. continue;
  3677. }
  3678. }
  3679. // Exclude these attributes from the metadata since they already go into
  3680. // other fields (eg: startTime or endTime) or are not necessary..
  3681. const excludedAttributes = [
  3682. 'CLASS',
  3683. 'START-DATE',
  3684. 'END-DATE',
  3685. 'DURATION',
  3686. 'END-ON-NEXT',
  3687. ];
  3688. /* @type {!Array.<shaka.extern.MetadataFrame>} */
  3689. const values = [];
  3690. for (const attribute of tag.attributes) {
  3691. if (excludedAttributes.includes(attribute.name)) {
  3692. continue;
  3693. }
  3694. let data = Utils.variableSubstitution(attribute.value, variables);
  3695. if (attribute.name == 'X-ASSET-URI' ||
  3696. attribute.name == 'X-ASSET-LIST') {
  3697. data = Utils.constructSegmentUris(
  3698. getUris(), attribute.value, variables)[0];
  3699. }
  3700. const metadataFrame = {
  3701. key: attribute.name,
  3702. description: '',
  3703. data,
  3704. mimeType: null,
  3705. pictureType: null,
  3706. };
  3707. values.push(metadataFrame);
  3708. }
  3709. // ID is always required. So we need more than 1 value.
  3710. if (values.length > 1) {
  3711. this.playerInterface_.onMetadata(type, startTime, endTime, values);
  3712. }
  3713. this.dateRangeIdsEmitted_.add(id);
  3714. } catch (e) {
  3715. shaka.log.warning('Ignorando DATERANGE con errores', tag.toString());
  3716. }
  3717. }
  3718. }
  3719. /**
  3720. * Parses shaka.hls.Segment objects into shaka.media.SegmentReferences and
  3721. * get the bandwidth necessary for this segments If it's defined in the
  3722. * playlist.
  3723. *
  3724. * @param {!shaka.hls.Playlist} playlist
  3725. * @param {!Map.<number, number>} mediaSequenceToStartTime
  3726. * @param {!Map.<string, string>} variables
  3727. * @param {function():!Array.<string>} getUris
  3728. * @param {string} type
  3729. * @return {{segments: !Array.<!shaka.media.SegmentReference>,
  3730. * bandwidth: (number|undefined)}}
  3731. * @private
  3732. */
  3733. createSegments_(playlist, mediaSequenceToStartTime, variables,
  3734. getUris, type) {
  3735. /** @type {Array.<!shaka.hls.Segment>} */
  3736. const hlsSegments = playlist.segments;
  3737. goog.asserts.assert(hlsSegments.length, 'Playlist should have segments!');
  3738. /** @type {shaka.media.InitSegmentReference} */
  3739. let initSegmentRef;
  3740. /** @type {shaka.extern.aesKey|undefined} */
  3741. let aesKey = undefined;
  3742. let discontinuitySequence = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3743. playlist.tags, 'EXT-X-DISCONTINUITY-SEQUENCE', -1);
  3744. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3745. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  3746. const skipTag = shaka.hls.Utils.getFirstTagWithName(
  3747. playlist.tags, 'EXT-X-SKIP');
  3748. const skippedSegments =
  3749. skipTag ? Number(skipTag.getAttributeValue('SKIPPED-SEGMENTS')) : 0;
  3750. let position = mediaSequenceNumber + skippedSegments;
  3751. let firstStartTime = 0;
  3752. // For live stream, use the cached value in the mediaSequenceToStartTime
  3753. // map if available.
  3754. if (this.isLive_() && mediaSequenceToStartTime.has(position)) {
  3755. firstStartTime = mediaSequenceToStartTime.get(position);
  3756. }
  3757. // This is for recovering from disconnects.
  3758. if (firstStartTime === 0 &&
  3759. this.presentationType_ == shaka.hls.HlsParser.PresentationType_.LIVE &&
  3760. mediaSequenceToStartTime.size > 0 &&
  3761. !mediaSequenceToStartTime.has(position) &&
  3762. this.presentationTimeline_.getPresentationStartTime() != null) {
  3763. firstStartTime = this.presentationTimeline_.getSegmentAvailabilityStart();
  3764. }
  3765. /** @type {!Array.<!shaka.media.SegmentReference>} */
  3766. const references = [];
  3767. let previousReference = null;
  3768. /** @type {!Array.<{bitrate: number, duration: number}>} */
  3769. const bitrates = [];
  3770. for (let i = 0; i < hlsSegments.length; i++) {
  3771. const item = hlsSegments[i];
  3772. const startTime =
  3773. (i == 0) ? firstStartTime : previousReference.endTime;
  3774. position = mediaSequenceNumber + skippedSegments + i;
  3775. const discontinuityTag = shaka.hls.Utils.getFirstTagWithName(
  3776. item.tags, 'EXT-X-DISCONTINUITY');
  3777. if (discontinuityTag) {
  3778. discontinuitySequence++;
  3779. }
  3780. // Apply new AES tags as you see them, keeping a running total.
  3781. for (const drmTag of item.tags) {
  3782. if (drmTag.name == 'EXT-X-KEY') {
  3783. if (this.isAesMethod_(drmTag.getRequiredAttrValue('METHOD'))) {
  3784. aesKey =
  3785. this.parseAESDrmTag_(drmTag, playlist, getUris, variables);
  3786. } else {
  3787. aesKey = undefined;
  3788. }
  3789. }
  3790. }
  3791. mediaSequenceToStartTime.set(position, startTime);
  3792. initSegmentRef = this.getInitSegmentReference_(playlist,
  3793. item.tags, getUris, variables);
  3794. // If the stream is low latency and the user has not configured the
  3795. // lowLatencyMode, but if it has been configured to activate the
  3796. // lowLatencyMode if a stream of this type is detected, we automatically
  3797. // activate the lowLatencyMode.
  3798. if (!this.lowLatencyMode_) {
  3799. const autoLowLatencyMode = this.playerInterface_.isAutoLowLatencyMode();
  3800. if (autoLowLatencyMode) {
  3801. this.playerInterface_.enableLowLatencyMode();
  3802. this.lowLatencyMode_ = this.playerInterface_.isLowLatencyMode();
  3803. }
  3804. }
  3805. const reference = this.createSegmentReference_(
  3806. initSegmentRef,
  3807. previousReference,
  3808. item,
  3809. startTime,
  3810. variables,
  3811. playlist,
  3812. type,
  3813. getUris,
  3814. aesKey);
  3815. if (reference) {
  3816. const bitrate = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3817. item.tags, 'EXT-X-BITRATE');
  3818. if (bitrate) {
  3819. bitrates.push({
  3820. bitrate,
  3821. duration: reference.endTime - reference.startTime,
  3822. });
  3823. } else if (bitrates.length) {
  3824. // It applies to every segment between it and the next EXT-X-BITRATE,
  3825. // so we use the latest bitrate value
  3826. const prevBitrate = bitrates.pop();
  3827. prevBitrate.duration += reference.endTime - reference.startTime;
  3828. bitrates.push(prevBitrate);
  3829. }
  3830. previousReference = reference;
  3831. reference.discontinuitySequence = discontinuitySequence;
  3832. if (this.ignoreManifestProgramDateTimeFor_(type) &&
  3833. this.minSequenceNumber_ != null &&
  3834. position < this.minSequenceNumber_) {
  3835. // This segment is ignored as part of our fallback synchronization
  3836. // method.
  3837. } else {
  3838. references.push(reference);
  3839. }
  3840. }
  3841. }
  3842. let bandwidth = undefined;
  3843. if (bitrates.length) {
  3844. const duration = bitrates.reduce((sum, value) => {
  3845. return sum + value.duration;
  3846. }, 0);
  3847. bandwidth = Math.round(bitrates.reduce((sum, value) => {
  3848. return sum + value.bitrate * value.duration;
  3849. }, 0) / duration * 1000);
  3850. }
  3851. // If some segments have sync times, but not all, extrapolate the sync
  3852. // times of the ones with none.
  3853. const someSyncTime = references.some((ref) => ref.syncTime != null);
  3854. if (someSyncTime) {
  3855. for (let i = 0; i < references.length; i++) {
  3856. const reference = references[i];
  3857. if (reference.syncTime != null) {
  3858. // No need to extrapolate.
  3859. continue;
  3860. }
  3861. // Find the nearest segment with syncTime, in either direction.
  3862. // This looks forward and backward simultaneously, keeping track of what
  3863. // to offset the syncTime it finds by as it goes.
  3864. let forwardAdd = 0;
  3865. let forwardI = i;
  3866. /**
  3867. * Look forwards one reference at a time, summing all durations as we
  3868. * go, until we find a reference with a syncTime to use as a basis.
  3869. * This DOES count the original reference, but DOESN'T count the first
  3870. * reference with a syncTime (as we approach it from behind).
  3871. * @return {?number}
  3872. */
  3873. const lookForward = () => {
  3874. const other = references[forwardI];
  3875. if (other) {
  3876. if (other.syncTime != null) {
  3877. return other.syncTime + forwardAdd;
  3878. }
  3879. forwardAdd -= other.endTime - other.startTime;
  3880. forwardI += 1;
  3881. }
  3882. return null;
  3883. };
  3884. let backwardAdd = 0;
  3885. let backwardI = i;
  3886. /**
  3887. * Look backwards one reference at a time, summing all durations as we
  3888. * go, until we find a reference with a syncTime to use as a basis.
  3889. * This DOESN'T count the original reference, but DOES count the first
  3890. * reference with a syncTime (as we approach it from ahead).
  3891. * @return {?number}
  3892. */
  3893. const lookBackward = () => {
  3894. const other = references[backwardI];
  3895. if (other) {
  3896. if (other != reference) {
  3897. backwardAdd += other.endTime - other.startTime;
  3898. }
  3899. if (other.syncTime != null) {
  3900. return other.syncTime + backwardAdd;
  3901. }
  3902. backwardI -= 1;
  3903. }
  3904. return null;
  3905. };
  3906. while (reference.syncTime == null) {
  3907. reference.syncTime = lookBackward();
  3908. if (reference.syncTime == null) {
  3909. reference.syncTime = lookForward();
  3910. }
  3911. }
  3912. }
  3913. }
  3914. // Split the sync times properly among partial segments.
  3915. if (someSyncTime) {
  3916. for (const reference of references) {
  3917. let syncTime = reference.syncTime;
  3918. for (const partial of reference.partialReferences) {
  3919. partial.syncTime = syncTime;
  3920. syncTime += partial.endTime - partial.startTime;
  3921. }
  3922. }
  3923. }
  3924. // lowestSyncTime is a value from a previous playlist update. Use it to
  3925. // set reference start times. If this is the first playlist parse, we will
  3926. // skip this step, and wait until we have sync time across stream types.
  3927. const lowestSyncTime = this.lowestSyncTime_;
  3928. if (someSyncTime && lowestSyncTime != Infinity) {
  3929. if (!this.ignoreManifestProgramDateTimeFor_(type)) {
  3930. for (const reference of references) {
  3931. reference.syncAgainst(lowestSyncTime);
  3932. }
  3933. }
  3934. }
  3935. return {
  3936. segments: references,
  3937. bandwidth,
  3938. };
  3939. }
  3940. /**
  3941. * Attempts to guess stream's mime type based on content type and URI.
  3942. *
  3943. * @param {string} contentType
  3944. * @param {string} codecs
  3945. * @return {?string}
  3946. * @private
  3947. */
  3948. guessMimeTypeBeforeLoading_(contentType, codecs) {
  3949. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  3950. if (codecs == 'vtt' || codecs == 'wvtt') {
  3951. // If codecs is 'vtt', it's WebVTT.
  3952. return 'text/vtt';
  3953. } else if (codecs && codecs !== '') {
  3954. // Otherwise, assume MP4-embedded text, since text-based formats tend
  3955. // not to have a codecs string at all.
  3956. return 'application/mp4';
  3957. }
  3958. }
  3959. if (contentType == shaka.util.ManifestParserUtils.ContentType.IMAGE) {
  3960. if (!codecs || codecs == 'jpeg') {
  3961. return 'image/jpeg';
  3962. }
  3963. }
  3964. if (contentType == shaka.util.ManifestParserUtils.ContentType.AUDIO) {
  3965. // See: https://bugs.chromium.org/p/chromium/issues/detail?id=489520
  3966. if (codecs == 'mp4a.40.34') {
  3967. return 'audio/mpeg';
  3968. }
  3969. }
  3970. if (codecs == 'mjpg') {
  3971. return 'application/mp4';
  3972. }
  3973. // Not enough information to guess from the content type and codecs.
  3974. return null;
  3975. }
  3976. /**
  3977. * Get a fallback mime type for the content. Used if all the better methods
  3978. * for determining the mime type have failed.
  3979. *
  3980. * @param {string} contentType
  3981. * @return {string}
  3982. * @private
  3983. */
  3984. guessMimeTypeFallback_(contentType) {
  3985. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  3986. // If there was no codecs string and no content-type, assume HLS text
  3987. // streams are WebVTT.
  3988. return 'text/vtt';
  3989. }
  3990. // If the HLS content is lacking in both MIME type metadata and
  3991. // segment file extensions, we fall back to assuming it's MP4.
  3992. const map = shaka.hls.HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_[contentType];
  3993. return map['mp4'];
  3994. }
  3995. /**
  3996. * @param {!Array.<!shaka.media.SegmentReference>} segments
  3997. * @return {{segment: !shaka.media.SegmentReference, segmentIndex: number}}
  3998. * @private
  3999. */
  4000. getAvailableSegment_(segments) {
  4001. goog.asserts.assert(segments.length, 'Should have segments!');
  4002. // If you wait long enough, requesting the first segment can fail
  4003. // because it has fallen off the left edge of DVR, so to be safer,
  4004. // let's request the middle segment.
  4005. let segmentIndex = this.isLive_() ?
  4006. Math.trunc((segments.length - 1) / 2) : 0;
  4007. let segment = segments[segmentIndex];
  4008. while (segment.getStatus() == shaka.media.SegmentReference.Status.MISSING &&
  4009. (segmentIndex + 1) < segments.length) {
  4010. segmentIndex ++;
  4011. segment = segments[segmentIndex];
  4012. }
  4013. return {segment, segmentIndex};
  4014. }
  4015. /**
  4016. * Attempts to guess stream's mime type.
  4017. *
  4018. * @param {string} contentType
  4019. * @param {string} codecs
  4020. * @param {!Array.<!shaka.media.SegmentReference>} segments
  4021. * @return {!Promise.<string>}
  4022. * @private
  4023. */
  4024. async guessMimeType_(contentType, codecs, segments) {
  4025. const HlsParser = shaka.hls.HlsParser;
  4026. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  4027. const {segment} = this.getAvailableSegment_(segments);
  4028. if (segment.status == shaka.media.SegmentReference.Status.MISSING) {
  4029. return this.guessMimeTypeFallback_(contentType);
  4030. }
  4031. const segmentUris = segment.getUris();
  4032. const parsedUri = new goog.Uri(segmentUris[0]);
  4033. const extension = parsedUri.getPath().split('.').pop();
  4034. const map = HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_[contentType];
  4035. let mimeType = map[extension];
  4036. if (mimeType) {
  4037. return mimeType;
  4038. }
  4039. mimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension];
  4040. if (mimeType) {
  4041. return mimeType;
  4042. }
  4043. // The extension map didn't work, so guess based on codecs.
  4044. mimeType = this.guessMimeTypeBeforeLoading_(contentType, codecs);
  4045. if (mimeType) {
  4046. return mimeType;
  4047. }
  4048. // If unable to guess mime type, request a segment and try getting it
  4049. // from the response.
  4050. let contentMimeType;
  4051. const type = shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT;
  4052. const headRequest = shaka.net.NetworkingEngine.makeRequest(
  4053. segmentUris, this.config_.retryParameters);
  4054. try {
  4055. headRequest.method = 'HEAD';
  4056. const response = await this.makeNetworkRequest_(
  4057. headRequest, requestType, {type}).promise;
  4058. contentMimeType = response.headers['content-type'];
  4059. } catch (error) {
  4060. if (error &&
  4061. (error.code == shaka.util.Error.Code.HTTP_ERROR ||
  4062. error.code == shaka.util.Error.Code.BAD_HTTP_STATUS)) {
  4063. headRequest.method = 'GET';
  4064. const response = await this.makeNetworkRequest_(
  4065. headRequest, requestType, {type}).promise;
  4066. contentMimeType = response.headers['content-type'];
  4067. }
  4068. }
  4069. if (contentMimeType) {
  4070. // Split the MIME type in case the server sent additional parameters.
  4071. return contentMimeType.toLowerCase().split(';')[0];
  4072. }
  4073. return this.guessMimeTypeFallback_(contentType);
  4074. }
  4075. /**
  4076. * Returns a tag with a given name.
  4077. * Throws an error if tag was not found.
  4078. *
  4079. * @param {!Array.<shaka.hls.Tag>} tags
  4080. * @param {string} tagName
  4081. * @return {!shaka.hls.Tag}
  4082. * @private
  4083. */
  4084. getRequiredTag_(tags, tagName) {
  4085. const tag = shaka.hls.Utils.getFirstTagWithName(tags, tagName);
  4086. if (!tag) {
  4087. throw new shaka.util.Error(
  4088. shaka.util.Error.Severity.CRITICAL,
  4089. shaka.util.Error.Category.MANIFEST,
  4090. shaka.util.Error.Code.HLS_REQUIRED_TAG_MISSING, tagName);
  4091. }
  4092. return tag;
  4093. }
  4094. /**
  4095. * @param {shaka.extern.Stream} stream
  4096. * @param {?string} width
  4097. * @param {?string} height
  4098. * @param {?string} frameRate
  4099. * @param {?string} videoRange
  4100. * @param {?string} videoLayout
  4101. * @param {?string} colorGamut
  4102. * @private
  4103. */
  4104. addVideoAttributes_(stream, width, height, frameRate, videoRange,
  4105. videoLayout, colorGamut) {
  4106. if (stream) {
  4107. stream.width = Number(width) || undefined;
  4108. stream.height = Number(height) || undefined;
  4109. stream.frameRate = Number(frameRate) || undefined;
  4110. stream.hdr = videoRange || undefined;
  4111. stream.videoLayout = videoLayout || undefined;
  4112. stream.colorGamut = colorGamut || undefined;
  4113. }
  4114. }
  4115. /**
  4116. * Makes a network request for the manifest and returns a Promise
  4117. * with the resulting data.
  4118. *
  4119. * @param {!Array.<string>} uris
  4120. * @param {boolean=} isPlaylist
  4121. * @return {!shaka.net.NetworkingEngine.PendingRequest}
  4122. * @private
  4123. */
  4124. requestManifest_(uris, isPlaylist) {
  4125. const requestType = shaka.net.NetworkingEngine.RequestType.MANIFEST;
  4126. const request = shaka.net.NetworkingEngine.makeRequest(
  4127. uris, this.config_.retryParameters);
  4128. const type = isPlaylist ?
  4129. shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_PLAYLIST :
  4130. shaka.net.NetworkingEngine.AdvancedRequestType.MASTER_PLAYLIST;
  4131. return this.makeNetworkRequest_(request, requestType, {type});
  4132. }
  4133. /**
  4134. * Called when the update timer ticks. Because parsing a manifest is async,
  4135. * this method is async. To work with this, this method will schedule the next
  4136. * update when it finished instead of using a repeating-start.
  4137. *
  4138. * @return {!Promise}
  4139. * @private
  4140. */
  4141. async onUpdate_() {
  4142. shaka.log.info('Updating manifest...');
  4143. goog.asserts.assert(
  4144. this.getUpdatePlaylistDelay_() > 0,
  4145. 'We should only call |onUpdate_| when we are suppose to be updating.');
  4146. // Detect a call to stop()
  4147. if (!this.playerInterface_) {
  4148. return;
  4149. }
  4150. try {
  4151. const startTime = Date.now();
  4152. await this.update();
  4153. // Keep track of how long the longest manifest update took.
  4154. const endTime = Date.now();
  4155. // This may have converted to VOD, in which case we stop updating.
  4156. if (this.isLive_()) {
  4157. const updateDuration = (endTime - startTime) / 1000.0;
  4158. this.averageUpdateDuration_.sample(1, updateDuration);
  4159. const delay = this.getUpdatePlaylistDelay_();
  4160. const finalDelay = Math.max(0,
  4161. delay - this.averageUpdateDuration_.getEstimate());
  4162. this.updatePlaylistTimer_.tickAfter(/* seconds= */ finalDelay);
  4163. }
  4164. } catch (error) {
  4165. // Detect a call to stop() during this.update()
  4166. if (!this.playerInterface_) {
  4167. return;
  4168. }
  4169. goog.asserts.assert(error instanceof shaka.util.Error,
  4170. 'Should only receive a Shaka error');
  4171. if (this.config_.raiseFatalErrorOnManifestUpdateRequestFailure) {
  4172. this.playerInterface_.onError(error);
  4173. return;
  4174. }
  4175. // We will retry updating, so override the severity of the error.
  4176. error.severity = shaka.util.Error.Severity.RECOVERABLE;
  4177. this.playerInterface_.onError(error);
  4178. // Try again very soon.
  4179. this.updatePlaylistTimer_.tickAfter(/* seconds= */ 0.1);
  4180. }
  4181. // Detect a call to stop()
  4182. if (!this.playerInterface_) {
  4183. return;
  4184. }
  4185. this.playerInterface_.onManifestUpdated();
  4186. }
  4187. /**
  4188. * @return {boolean}
  4189. * @private
  4190. */
  4191. isLive_() {
  4192. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  4193. return this.presentationType_ != PresentationType.VOD;
  4194. }
  4195. /**
  4196. * @return {number}
  4197. * @private
  4198. */
  4199. getUpdatePlaylistDelay_() {
  4200. // The HLS spec (RFC 8216) states in 6.3.4:
  4201. // "the client MUST wait for at least the target duration before
  4202. // attempting to reload the Playlist file again".
  4203. // For LL-HLS, the server must add a new partial segment to the Playlist
  4204. // every part target duration.
  4205. return this.lastTargetDuration_;
  4206. }
  4207. /**
  4208. * @param {shaka.hls.HlsParser.PresentationType_} type
  4209. * @private
  4210. */
  4211. setPresentationType_(type) {
  4212. this.presentationType_ = type;
  4213. if (this.presentationTimeline_) {
  4214. this.presentationTimeline_.setStatic(!this.isLive_());
  4215. }
  4216. // If this manifest is not for live content, then we have no reason to
  4217. // update it.
  4218. if (!this.isLive_()) {
  4219. this.updatePlaylistTimer_.stop();
  4220. }
  4221. }
  4222. /**
  4223. * Create a networking request. This will manage the request using the
  4224. * parser's operation manager. If the parser has already been stopped, the
  4225. * request will not be made.
  4226. *
  4227. * @param {shaka.extern.Request} request
  4228. * @param {shaka.net.NetworkingEngine.RequestType} type
  4229. * @param {shaka.extern.RequestContext=} context
  4230. * @return {!shaka.net.NetworkingEngine.PendingRequest}
  4231. * @private
  4232. */
  4233. makeNetworkRequest_(request, type, context) {
  4234. if (!this.operationManager_) {
  4235. throw new shaka.util.Error(
  4236. shaka.util.Error.Severity.CRITICAL,
  4237. shaka.util.Error.Category.PLAYER,
  4238. shaka.util.Error.Code.OPERATION_ABORTED);
  4239. }
  4240. if (!context) {
  4241. context = {};
  4242. }
  4243. context.isPreload = this.isPreloadFn_();
  4244. const op = this.playerInterface_.networkingEngine.request(
  4245. type, request, context);
  4246. this.operationManager_.manage(op);
  4247. return op;
  4248. }
  4249. /**
  4250. * @param {string} method
  4251. * @return {boolean}
  4252. * @private
  4253. */
  4254. isAesMethod_(method) {
  4255. return method == 'AES-128' ||
  4256. method == 'AES-256' ||
  4257. method == 'AES-256-CTR';
  4258. }
  4259. /**
  4260. * @param {!shaka.hls.Tag} drmTag
  4261. * @param {string} mimeType
  4262. * @return {?shaka.extern.DrmInfo}
  4263. * @private
  4264. */
  4265. static fairplayDrmParser_(drmTag, mimeType) {
  4266. if (mimeType == 'video/mp2t') {
  4267. throw new shaka.util.Error(
  4268. shaka.util.Error.Severity.CRITICAL,
  4269. shaka.util.Error.Category.MANIFEST,
  4270. shaka.util.Error.Code.HLS_MSE_ENCRYPTED_MP2T_NOT_SUPPORTED);
  4271. }
  4272. if (shaka.util.Platform.isMediaKeysPolyfilled()) {
  4273. throw new shaka.util.Error(
  4274. shaka.util.Error.Severity.CRITICAL,
  4275. shaka.util.Error.Category.MANIFEST,
  4276. shaka.util.Error.Code
  4277. .HLS_MSE_ENCRYPTED_LEGACY_APPLE_MEDIA_KEYS_NOT_SUPPORTED);
  4278. }
  4279. const method = drmTag.getRequiredAttrValue('METHOD');
  4280. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  4281. if (!VALID_METHODS.includes(method)) {
  4282. shaka.log.error('FairPlay in HLS is only supported with [',
  4283. VALID_METHODS.join(', '), '], not', method);
  4284. return null;
  4285. }
  4286. let encryptionScheme = 'cenc';
  4287. if (method == 'SAMPLE-AES') {
  4288. // It should be 'cbcs-1-9' but Safari doesn't support it.
  4289. // See: https://github.com/WebKit/WebKit/blob/main/Source/WebCore/Modules/encryptedmedia/MediaKeyEncryptionScheme.idl
  4290. encryptionScheme = 'cbcs';
  4291. }
  4292. /*
  4293. * Even if we're not able to construct initData through the HLS tag, adding
  4294. * a DRMInfo will allow DRM Engine to request a media key system access
  4295. * with the correct keySystem and initDataType
  4296. */
  4297. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  4298. 'com.apple.fps', encryptionScheme, [
  4299. {initDataType: 'sinf', initData: new Uint8Array(0), keyId: null},
  4300. ], drmTag.getRequiredAttrValue('URI'));
  4301. return drmInfo;
  4302. }
  4303. /**
  4304. * @param {!shaka.hls.Tag} drmTag
  4305. * @return {?shaka.extern.DrmInfo}
  4306. * @private
  4307. */
  4308. static widevineDrmParser_(drmTag) {
  4309. const method = drmTag.getRequiredAttrValue('METHOD');
  4310. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  4311. if (!VALID_METHODS.includes(method)) {
  4312. shaka.log.error('Widevine in HLS is only supported with [',
  4313. VALID_METHODS.join(', '), '], not', method);
  4314. return null;
  4315. }
  4316. let encryptionScheme = 'cenc';
  4317. if (method == 'SAMPLE-AES') {
  4318. encryptionScheme = 'cbcs';
  4319. }
  4320. const uri = drmTag.getRequiredAttrValue('URI');
  4321. const parsedData = shaka.net.DataUriPlugin.parseRaw(uri.split('?')[0]);
  4322. // The data encoded in the URI is a PSSH box to be used as init data.
  4323. const pssh = shaka.util.BufferUtils.toUint8(parsedData.data);
  4324. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  4325. 'com.widevine.alpha', encryptionScheme, [
  4326. {initDataType: 'cenc', initData: pssh},
  4327. ]);
  4328. const keyId = drmTag.getAttributeValue('KEYID');
  4329. if (keyId) {
  4330. const keyIdLowerCase = keyId.toLowerCase();
  4331. // This value should begin with '0x':
  4332. goog.asserts.assert(
  4333. keyIdLowerCase.startsWith('0x'), 'Incorrect KEYID format!');
  4334. // But the output should not contain the '0x':
  4335. drmInfo.keyIds = new Set([keyIdLowerCase.substr(2)]);
  4336. }
  4337. return drmInfo;
  4338. }
  4339. /**
  4340. * See: https://docs.microsoft.com/en-us/playready/packaging/mp4-based-formats-supported-by-playready-clients?tabs=case4
  4341. *
  4342. * @param {!shaka.hls.Tag} drmTag
  4343. * @return {?shaka.extern.DrmInfo}
  4344. * @private
  4345. */
  4346. static playreadyDrmParser_(drmTag) {
  4347. const method = drmTag.getRequiredAttrValue('METHOD');
  4348. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  4349. if (!VALID_METHODS.includes(method)) {
  4350. shaka.log.error('PlayReady in HLS is only supported with [',
  4351. VALID_METHODS.join(', '), '], not', method);
  4352. return null;
  4353. }
  4354. let encryptionScheme = 'cenc';
  4355. if (method == 'SAMPLE-AES') {
  4356. encryptionScheme = 'cbcs';
  4357. }
  4358. const uri = drmTag.getRequiredAttrValue('URI');
  4359. const parsedData = shaka.net.DataUriPlugin.parseRaw(uri.split('?')[0]);
  4360. // The data encoded in the URI is a PlayReady Pro Object, so we need
  4361. // convert it to pssh.
  4362. const data = shaka.util.BufferUtils.toUint8(parsedData.data);
  4363. const systemId = new Uint8Array([
  4364. 0x9a, 0x04, 0xf0, 0x79, 0x98, 0x40, 0x42, 0x86,
  4365. 0xab, 0x92, 0xe6, 0x5b, 0xe0, 0x88, 0x5f, 0x95,
  4366. ]);
  4367. const keyIds = new Set();
  4368. const psshVersion = 0;
  4369. const pssh =
  4370. shaka.util.Pssh.createPssh(data, systemId, keyIds, psshVersion);
  4371. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  4372. 'com.microsoft.playready', encryptionScheme, [
  4373. {initDataType: 'cenc', initData: pssh},
  4374. ]);
  4375. return drmInfo;
  4376. }
  4377. /**
  4378. * See: https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-5.1
  4379. *
  4380. * @param {!shaka.hls.Tag} drmTag
  4381. * @param {string} mimeType
  4382. * @param {function():!Array.<string>} getUris
  4383. * @param {?shaka.media.InitSegmentReference} initSegmentRef
  4384. * @param {?Map.<string, string>=} variables
  4385. * @return {!Promise.<?shaka.extern.DrmInfo>}
  4386. * @private
  4387. */
  4388. async identityDrmParser_(drmTag, mimeType, getUris, initSegmentRef,
  4389. variables) {
  4390. if (mimeType == 'video/mp2t') {
  4391. throw new shaka.util.Error(
  4392. shaka.util.Error.Severity.CRITICAL,
  4393. shaka.util.Error.Category.MANIFEST,
  4394. shaka.util.Error.Code.HLS_MSE_ENCRYPTED_MP2T_NOT_SUPPORTED);
  4395. }
  4396. if (shaka.util.Platform.isMediaKeysPolyfilled()) {
  4397. throw new shaka.util.Error(
  4398. shaka.util.Error.Severity.CRITICAL,
  4399. shaka.util.Error.Category.MANIFEST,
  4400. shaka.util.Error.Code
  4401. .HLS_MSE_ENCRYPTED_LEGACY_APPLE_MEDIA_KEYS_NOT_SUPPORTED);
  4402. }
  4403. const method = drmTag.getRequiredAttrValue('METHOD');
  4404. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  4405. if (!VALID_METHODS.includes(method)) {
  4406. shaka.log.error('Identity (ClearKey) in HLS is only supported with [',
  4407. VALID_METHODS.join(', '), '], not', method);
  4408. return null;
  4409. }
  4410. const keyUris = shaka.hls.Utils.constructSegmentUris(
  4411. getUris(), drmTag.getRequiredAttrValue('URI'), variables);
  4412. let key;
  4413. if (keyUris[0].startsWith('data:text/plain;base64,')) {
  4414. key = shaka.util.Uint8ArrayUtils.toHex(
  4415. shaka.util.Uint8ArrayUtils.fromBase64(
  4416. keyUris[0].split('data:text/plain;base64,').pop()));
  4417. } else {
  4418. const keyMapKey = keyUris.sort().join('');
  4419. if (!this.identityKeyMap_.has(keyMapKey)) {
  4420. const requestType = shaka.net.NetworkingEngine.RequestType.KEY;
  4421. const request = shaka.net.NetworkingEngine.makeRequest(
  4422. keyUris, this.config_.retryParameters);
  4423. const keyResponse = this.makeNetworkRequest_(request, requestType)
  4424. .promise;
  4425. this.identityKeyMap_.set(keyMapKey, keyResponse);
  4426. }
  4427. const keyResponse = await this.identityKeyMap_.get(keyMapKey);
  4428. key = shaka.util.Uint8ArrayUtils.toHex(keyResponse.data);
  4429. }
  4430. // NOTE: The ClearKey CDM requires a key-id to key mapping. HLS doesn't
  4431. // provide a key ID anywhere. So although we could use the 'URI' attribute
  4432. // to fetch the actual 16-byte key, without a key ID, we can't provide this
  4433. // automatically to the ClearKey CDM. By default we assume that keyId is 0,
  4434. // but we will try to get key ID from Init Segment.
  4435. // If the application want override this behavior will have to use
  4436. // player.configure('drm.clearKeys', { ... }) to provide the key IDs
  4437. // and keys or player.configure('drm.servers.org\.w3\.clearkey', ...) to
  4438. // provide a ClearKey license server URI.
  4439. let keyId = '00000000000000000000000000000000';
  4440. if (initSegmentRef) {
  4441. let defaultKID;
  4442. if (this.identityKidMap_.has(initSegmentRef)) {
  4443. defaultKID = this.identityKidMap_.get(initSegmentRef);
  4444. } else {
  4445. const initSegmentRequest = shaka.util.Networking.createSegmentRequest(
  4446. initSegmentRef.getUris(),
  4447. initSegmentRef.getStartByte(),
  4448. initSegmentRef.getEndByte(),
  4449. this.config_.retryParameters);
  4450. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  4451. const initType =
  4452. shaka.net.NetworkingEngine.AdvancedRequestType.INIT_SEGMENT;
  4453. const initResponse = await this.makeNetworkRequest_(
  4454. initSegmentRequest, requestType, {type: initType}).promise;
  4455. defaultKID = shaka.media.SegmentUtils.getDefaultKID(
  4456. initResponse.data);
  4457. this.identityKidMap_.set(initSegmentRef, defaultKID);
  4458. }
  4459. if (defaultKID) {
  4460. keyId = defaultKID;
  4461. }
  4462. }
  4463. const clearkeys = new Map();
  4464. clearkeys.set(keyId, key);
  4465. let encryptionScheme = 'cenc';
  4466. if (method == 'SAMPLE-AES') {
  4467. encryptionScheme = 'cbcs';
  4468. }
  4469. return shaka.util.ManifestParserUtils.createDrmInfoFromClearKeys(
  4470. clearkeys, encryptionScheme);
  4471. }
  4472. };
  4473. /**
  4474. * @typedef {{
  4475. * stream: !shaka.extern.Stream,
  4476. * type: string,
  4477. * redirectUris: !Array.<string>,
  4478. * getUris: function():!Array.<string>,
  4479. * minTimestamp: number,
  4480. * maxTimestamp: number,
  4481. * mediaSequenceToStartTime: !Map.<number, number>,
  4482. * canSkipSegments: boolean,
  4483. * canBlockReload: boolean,
  4484. * hasEndList: boolean,
  4485. * firstSequenceNumber: number,
  4486. * nextMediaSequence: number,
  4487. * nextPart: number,
  4488. * loadedOnce: boolean
  4489. * }}
  4490. *
  4491. * @description
  4492. * Contains a stream and information about it.
  4493. *
  4494. * @property {!shaka.extern.Stream} stream
  4495. * The Stream itself.
  4496. * @property {string} type
  4497. * The type value. Could be 'video', 'audio', 'text', or 'image'.
  4498. * @property {!Array.<string>} redirectUris
  4499. * The redirect URIs.
  4500. * @property {function():!Array.<string>} getUris
  4501. * The verbatim media playlist URIs, as it appeared in the master playlist.
  4502. * @property {number} minTimestamp
  4503. * The minimum timestamp found in the stream.
  4504. * @property {number} maxTimestamp
  4505. * The maximum timestamp found in the stream.
  4506. * @property {!Map.<number, number>} mediaSequenceToStartTime
  4507. * A map of media sequence numbers to media start times.
  4508. * Only used for VOD content.
  4509. * @property {boolean} canSkipSegments
  4510. * True if the server supports delta playlist updates, and we can send a
  4511. * request for a playlist that can skip older media segments.
  4512. * @property {boolean} canBlockReload
  4513. * True if the server supports blocking playlist reload, and we can send a
  4514. * request for a playlist that can block reload until some segments are
  4515. * present.
  4516. * @property {boolean} hasEndList
  4517. * True if the stream has an EXT-X-ENDLIST tag.
  4518. * @property {number} firstSequenceNumber
  4519. * The sequence number of the first reference. Only calculated if needed.
  4520. * @property {number} nextMediaSequence
  4521. * The next media sequence.
  4522. * @property {number} nextPart
  4523. * The next part.
  4524. * @property {boolean} loadedOnce
  4525. * True if the stream has been loaded at least once.
  4526. */
  4527. shaka.hls.HlsParser.StreamInfo;
  4528. /**
  4529. * @typedef {{
  4530. * audio: !Array.<shaka.hls.HlsParser.StreamInfo>,
  4531. * video: !Array.<shaka.hls.HlsParser.StreamInfo>
  4532. * }}
  4533. *
  4534. * @description Audio and video stream infos.
  4535. * @property {!Array.<shaka.hls.HlsParser.StreamInfo>} audio
  4536. * @property {!Array.<shaka.hls.HlsParser.StreamInfo>} video
  4537. */
  4538. shaka.hls.HlsParser.StreamInfos;
  4539. /**
  4540. * @const {!Object.<string, string>}
  4541. * @private
  4542. */
  4543. shaka.hls.HlsParser.RAW_FORMATS_TO_MIME_TYPES_ = {
  4544. 'aac': 'audio/aac',
  4545. 'ac3': 'audio/ac3',
  4546. 'ec3': 'audio/ec3',
  4547. 'mp3': 'audio/mpeg',
  4548. };
  4549. /**
  4550. * @const {!Object.<string, string>}
  4551. * @private
  4552. */
  4553. shaka.hls.HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_ = {
  4554. 'mp4': 'audio/mp4',
  4555. 'mp4a': 'audio/mp4',
  4556. 'm4s': 'audio/mp4',
  4557. 'm4i': 'audio/mp4',
  4558. 'm4a': 'audio/mp4',
  4559. 'm4f': 'audio/mp4',
  4560. 'cmfa': 'audio/mp4',
  4561. // MPEG2-TS also uses video/ for audio: https://bit.ly/TsMse
  4562. 'ts': 'video/mp2t',
  4563. 'tsa': 'video/mp2t',
  4564. };
  4565. /**
  4566. * @const {!Object.<string, string>}
  4567. * @private
  4568. */
  4569. shaka.hls.HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_ = {
  4570. 'mp4': 'video/mp4',
  4571. 'mp4v': 'video/mp4',
  4572. 'm4s': 'video/mp4',
  4573. 'm4i': 'video/mp4',
  4574. 'm4v': 'video/mp4',
  4575. 'm4f': 'video/mp4',
  4576. 'cmfv': 'video/mp4',
  4577. 'ts': 'video/mp2t',
  4578. 'tsv': 'video/mp2t',
  4579. };
  4580. /**
  4581. * @const {!Object.<string, string>}
  4582. * @private
  4583. */
  4584. shaka.hls.HlsParser.TEXT_EXTENSIONS_TO_MIME_TYPES_ = {
  4585. 'mp4': 'application/mp4',
  4586. 'm4s': 'application/mp4',
  4587. 'm4i': 'application/mp4',
  4588. 'm4f': 'application/mp4',
  4589. 'cmft': 'application/mp4',
  4590. 'vtt': 'text/vtt',
  4591. 'webvtt': 'text/vtt',
  4592. 'ttml': 'application/ttml+xml',
  4593. };
  4594. /**
  4595. * @const {!Object.<string, string>}
  4596. * @private
  4597. */
  4598. shaka.hls.HlsParser.IMAGE_EXTENSIONS_TO_MIME_TYPES_ = {
  4599. 'jpg': 'image/jpeg',
  4600. 'png': 'image/png',
  4601. 'svg': 'image/svg+xml',
  4602. 'webp': 'image/webp',
  4603. 'avif': 'image/avif',
  4604. };
  4605. /**
  4606. * @const {!Object.<string, !Object.<string, string>>}
  4607. * @private
  4608. */
  4609. shaka.hls.HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_ = {
  4610. 'audio': shaka.hls.HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_,
  4611. 'video': shaka.hls.HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_,
  4612. 'text': shaka.hls.HlsParser.TEXT_EXTENSIONS_TO_MIME_TYPES_,
  4613. 'image': shaka.hls.HlsParser.IMAGE_EXTENSIONS_TO_MIME_TYPES_,
  4614. };
  4615. /**
  4616. * MIME types without init segment.
  4617. *
  4618. * @const {!Set.<string>}
  4619. * @private
  4620. */
  4621. shaka.hls.HlsParser.MIME_TYPES_WITHOUT_INIT_SEGMENT_ = new Set([
  4622. 'video/mp2t',
  4623. // Containerless types
  4624. ...shaka.util.MimeUtils.RAW_FORMATS,
  4625. ]);
  4626. /**
  4627. * @typedef {function(!shaka.hls.Tag, string):?shaka.extern.DrmInfo}
  4628. * @private
  4629. */
  4630. shaka.hls.HlsParser.DrmParser_;
  4631. /**
  4632. * @const {!Object.<string, shaka.hls.HlsParser.DrmParser_>}
  4633. * @private
  4634. */
  4635. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_ = {
  4636. 'com.apple.streamingkeydelivery':
  4637. shaka.hls.HlsParser.fairplayDrmParser_,
  4638. 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed':
  4639. shaka.hls.HlsParser.widevineDrmParser_,
  4640. 'com.microsoft.playready':
  4641. shaka.hls.HlsParser.playreadyDrmParser_,
  4642. };
  4643. /**
  4644. * @enum {string}
  4645. * @private
  4646. */
  4647. shaka.hls.HlsParser.PresentationType_ = {
  4648. VOD: 'VOD',
  4649. EVENT: 'EVENT',
  4650. LIVE: 'LIVE',
  4651. };
  4652. shaka.media.ManifestParser.registerParserByMime(
  4653. 'application/x-mpegurl', () => new shaka.hls.HlsParser());
  4654. shaka.media.ManifestParser.registerParserByMime(
  4655. 'application/vnd.apple.mpegurl', () => new shaka.hls.HlsParser());