Source: lib/hls/hls_parser.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.hls.HlsParser');
  7. goog.require('goog.Uri');
  8. goog.require('goog.asserts');
  9. goog.require('shaka.abr.Ewma');
  10. goog.require('shaka.hls.ManifestTextParser');
  11. goog.require('shaka.hls.Playlist');
  12. goog.require('shaka.hls.PlaylistType');
  13. goog.require('shaka.hls.Tag');
  14. goog.require('shaka.hls.Utils');
  15. goog.require('shaka.log');
  16. goog.require('shaka.media.DrmEngine');
  17. goog.require('shaka.media.InitSegmentReference');
  18. goog.require('shaka.media.ManifestParser');
  19. goog.require('shaka.media.PresentationTimeline');
  20. goog.require('shaka.media.SegmentIndex');
  21. goog.require('shaka.media.SegmentReference');
  22. goog.require('shaka.net.DataUriPlugin');
  23. goog.require('shaka.net.NetworkingEngine');
  24. goog.require('shaka.util.ArrayUtils');
  25. goog.require('shaka.util.BufferUtils');
  26. goog.require('shaka.util.ContentSteeringManager');
  27. goog.require('shaka.util.Error');
  28. goog.require('shaka.util.FakeEvent');
  29. goog.require('shaka.util.LanguageUtils');
  30. goog.require('shaka.util.ManifestParserUtils');
  31. goog.require('shaka.util.MimeUtils');
  32. goog.require('shaka.util.Networking');
  33. goog.require('shaka.util.OperationManager');
  34. goog.require('shaka.util.Pssh');
  35. goog.require('shaka.media.SegmentUtils');
  36. goog.require('shaka.util.Timer');
  37. goog.require('shaka.util.TXml');
  38. goog.require('shaka.util.Platform');
  39. goog.require('shaka.util.Uint8ArrayUtils');
  40. goog.requireType('shaka.hls.Segment');
  41. /**
  42. * HLS parser.
  43. *
  44. * @implements {shaka.extern.ManifestParser}
  45. * @export
  46. */
  47. shaka.hls.HlsParser = class {
  48. /**
  49. * Creates an Hls Parser object.
  50. */
  51. constructor() {
  52. /** @private {?shaka.extern.ManifestParser.PlayerInterface} */
  53. this.playerInterface_ = null;
  54. /** @private {?shaka.extern.ManifestConfiguration} */
  55. this.config_ = null;
  56. /** @private {number} */
  57. this.globalId_ = 1;
  58. /** @private {!Map.<string, string>} */
  59. this.globalVariables_ = new Map();
  60. /**
  61. * A map from group id to stream infos created from the media tags.
  62. * @private {!Map.<string, !Array.<?shaka.hls.HlsParser.StreamInfo>>}
  63. */
  64. this.groupIdToStreamInfosMap_ = new Map();
  65. /**
  66. * For media playlist lazy-loading to work in livestreams, we have to assume
  67. * that each stream of a type (video, audio, etc) has the same mappings of
  68. * sequence number to start time.
  69. * This map stores those relationships.
  70. * Only used during livestreams; we do not assume that VOD content is
  71. * aligned in that way.
  72. * @private {!Map.<string, !Map.<number, number>>}
  73. */
  74. this.mediaSequenceToStartTimeByType_ = new Map();
  75. // Set initial maps.
  76. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  77. this.mediaSequenceToStartTimeByType_.set(ContentType.VIDEO, new Map());
  78. this.mediaSequenceToStartTimeByType_.set(ContentType.AUDIO, new Map());
  79. this.mediaSequenceToStartTimeByType_.set(ContentType.TEXT, new Map());
  80. this.mediaSequenceToStartTimeByType_.set(ContentType.IMAGE, new Map());
  81. /**
  82. * The values are strings of the form "<VIDEO URI> - <AUDIO URI>",
  83. * where the URIs are the verbatim media playlist URIs as they appeared in
  84. * the master playlist.
  85. *
  86. * Used to avoid duplicates that vary only in their text stream.
  87. *
  88. * @private {!Set.<string>}
  89. */
  90. this.variantUriSet_ = new Set();
  91. /**
  92. * A map from (verbatim) media playlist URI to stream infos representing the
  93. * playlists.
  94. *
  95. * On update, used to iterate through and update from media playlists.
  96. *
  97. * On initial parse, used to iterate through and determine minimum
  98. * timestamps, offsets, and to handle TS rollover.
  99. *
  100. * During parsing, used to avoid duplicates in the async methods
  101. * createStreamInfoFromMediaTags_, createStreamInfoFromImageTag_ and
  102. * createStreamInfoFromVariantTags_.
  103. *
  104. * @private {!Map.<string, shaka.hls.HlsParser.StreamInfo>}
  105. */
  106. this.uriToStreamInfosMap_ = new Map();
  107. /** @private {?shaka.media.PresentationTimeline} */
  108. this.presentationTimeline_ = null;
  109. /**
  110. * The master playlist URI, after redirects.
  111. *
  112. * @private {string}
  113. */
  114. this.masterPlaylistUri_ = '';
  115. /** @private {shaka.hls.ManifestTextParser} */
  116. this.manifestTextParser_ = new shaka.hls.ManifestTextParser();
  117. /**
  118. * The minimum sequence number for generated segments, when ignoring
  119. * EXT-X-PROGRAM-DATE-TIME.
  120. *
  121. * @private {number}
  122. */
  123. this.minSequenceNumber_ = -1;
  124. /**
  125. * The lowest time value for any of the streams, as defined by the
  126. * EXT-X-PROGRAM-DATE-TIME value. Measured in seconds since January 1, 1970.
  127. *
  128. * @private {number}
  129. */
  130. this.lowestSyncTime_ = Infinity;
  131. /**
  132. * Whether the streams have previously been "finalized"; that is to say,
  133. * whether we have loaded enough streams to know information about the asset
  134. * such as timing information, live status, etc.
  135. *
  136. * @private {boolean}
  137. */
  138. this.streamsFinalized_ = false;
  139. /**
  140. * Whether the manifest informs about the codec to use.
  141. *
  142. * @private
  143. */
  144. this.codecInfoInManifest_ = false;
  145. /**
  146. * This timer is used to trigger the start of a manifest update. A manifest
  147. * update is async. Once the update is finished, the timer will be restarted
  148. * to trigger the next update. The timer will only be started if the content
  149. * is live content.
  150. *
  151. * @private {shaka.util.Timer}
  152. */
  153. this.updatePlaylistTimer_ = new shaka.util.Timer(() => {
  154. this.onUpdate_();
  155. });
  156. /** @private {shaka.hls.HlsParser.PresentationType_} */
  157. this.presentationType_ = shaka.hls.HlsParser.PresentationType_.VOD;
  158. /** @private {?shaka.extern.Manifest} */
  159. this.manifest_ = null;
  160. /** @private {number} */
  161. this.maxTargetDuration_ = 0;
  162. /** @private {number} */
  163. this.lastTargetDuration_ = Infinity;
  164. /** Partial segments target duration.
  165. * @private {number}
  166. */
  167. this.partialTargetDuration_ = 0;
  168. /** @private {number} */
  169. this.presentationDelay_ = 0;
  170. /** @private {number} */
  171. this.lowLatencyPresentationDelay_ = 0;
  172. /** @private {shaka.util.OperationManager} */
  173. this.operationManager_ = new shaka.util.OperationManager();
  174. /** A map from closed captions' group id, to a map of closed captions info.
  175. * {group id -> {closed captions channel id -> language}}
  176. * @private {Map.<string, Map.<string, string>>}
  177. */
  178. this.groupIdToClosedCaptionsMap_ = new Map();
  179. /** @private {Map.<string, string>} */
  180. this.groupIdToCodecsMap_ = new Map();
  181. /** A cache mapping EXT-X-MAP tag info to the InitSegmentReference created
  182. * from the tag.
  183. * The key is a string combining the EXT-X-MAP tag's absolute uri, and
  184. * its BYTERANGE if available.
  185. * {!Map.<string, !shaka.media.InitSegmentReference>} */
  186. this.mapTagToInitSegmentRefMap_ = new Map();
  187. /** @private {Map.<string, !shaka.extern.aesKey>} */
  188. this.aesKeyInfoMap_ = new Map();
  189. /** @private {Map.<string, !Promise.<shaka.extern.Response>>} */
  190. this.aesKeyMap_ = new Map();
  191. /** @private {Map.<string, !Promise.<shaka.extern.Response>>} */
  192. this.identityKeyMap_ = new Map();
  193. /** @private {Map.<!shaka.media.InitSegmentReference, ?string>} */
  194. this.identityKidMap_ = new Map();
  195. /** @private {boolean} */
  196. this.lowLatencyMode_ = false;
  197. /** @private {boolean} */
  198. this.lowLatencyByterangeOptimization_ = false;
  199. /**
  200. * An ewma that tracks how long updates take.
  201. * This is to mitigate issues caused by slow parsing on embedded devices.
  202. * @private {!shaka.abr.Ewma}
  203. */
  204. this.averageUpdateDuration_ = new shaka.abr.Ewma(5);
  205. /** @private {?shaka.util.ContentSteeringManager} */
  206. this.contentSteeringManager_ = null;
  207. }
  208. /**
  209. * @override
  210. * @exportInterface
  211. */
  212. configure(config) {
  213. this.config_ = config;
  214. if (this.contentSteeringManager_) {
  215. this.contentSteeringManager_.configure(this.config_);
  216. }
  217. }
  218. /**
  219. * @override
  220. * @exportInterface
  221. */
  222. async start(uri, playerInterface) {
  223. goog.asserts.assert(this.config_, 'Must call configure() before start()!');
  224. this.playerInterface_ = playerInterface;
  225. this.lowLatencyMode_ = playerInterface.isLowLatencyMode();
  226. const response = await this.requestManifest_([uri]);
  227. // Record the master playlist URI after redirects.
  228. this.masterPlaylistUri_ = response.uri;
  229. goog.asserts.assert(response.data, 'Response data should be non-null!');
  230. await this.parseManifest_(response.data, uri);
  231. goog.asserts.assert(this.manifest_, 'Manifest should be non-null');
  232. return this.manifest_;
  233. }
  234. /**
  235. * @override
  236. * @exportInterface
  237. */
  238. stop() {
  239. // Make sure we don't update the manifest again. Even if the timer is not
  240. // running, this is safe to call.
  241. if (this.updatePlaylistTimer_) {
  242. this.updatePlaylistTimer_.stop();
  243. this.updatePlaylistTimer_ = null;
  244. }
  245. /** @type {!Array.<!Promise>} */
  246. const pending = [];
  247. if (this.operationManager_) {
  248. pending.push(this.operationManager_.destroy());
  249. this.operationManager_ = null;
  250. }
  251. this.playerInterface_ = null;
  252. this.config_ = null;
  253. this.variantUriSet_.clear();
  254. this.manifest_ = null;
  255. this.uriToStreamInfosMap_.clear();
  256. this.groupIdToStreamInfosMap_.clear();
  257. this.groupIdToCodecsMap_.clear();
  258. this.globalVariables_.clear();
  259. this.mapTagToInitSegmentRefMap_.clear();
  260. this.aesKeyInfoMap_.clear();
  261. this.aesKeyMap_.clear();
  262. this.identityKeyMap_.clear();
  263. this.identityKidMap_.clear();
  264. if (this.contentSteeringManager_) {
  265. this.contentSteeringManager_.destroy();
  266. }
  267. return Promise.all(pending);
  268. }
  269. /**
  270. * @override
  271. * @exportInterface
  272. */
  273. async update() {
  274. if (!this.isLive_()) {
  275. return;
  276. }
  277. /** @type {!Array.<!Promise>} */
  278. const updates = [];
  279. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  280. // This is necessary to calculate correctly the update time.
  281. this.lastTargetDuration_ = Infinity;
  282. // Only update active streams.
  283. const activeStreamInfos = streamInfos.filter((s) => s.stream.segmentIndex);
  284. for (const streamInfo of activeStreamInfos) {
  285. updates.push(this.updateStream_(streamInfo));
  286. }
  287. await Promise.all(updates);
  288. // Now that streams have been updated, notify the presentation timeline.
  289. this.notifySegmentsForStreams_(activeStreamInfos.map((s) => s.stream));
  290. // If any hasEndList is false, the stream is still live.
  291. const stillLive = activeStreamInfos.some((s) => s.hasEndList == false);
  292. if (activeStreamInfos.length && !stillLive) {
  293. // Convert the presentation to VOD and set the duration.
  294. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  295. this.setPresentationType_(PresentationType.VOD);
  296. // The duration is the minimum of the end times of all active streams.
  297. // Non-active streams are not guaranteed to have useful maxTimestamp
  298. // values, due to the lazy-loading system, so they are ignored.
  299. const maxTimestamps = activeStreamInfos.map((s) => s.maxTimestamp);
  300. // The duration is the minimum of the end times of all streams.
  301. this.presentationTimeline_.setDuration(Math.min(...maxTimestamps));
  302. this.playerInterface_.updateDuration();
  303. }
  304. if (stillLive) {
  305. this.determineDuration_();
  306. }
  307. }
  308. /**
  309. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  310. * @return {!Map.<number, number>}
  311. * @private
  312. */
  313. getMediaSequenceToStartTimeFor_(streamInfo) {
  314. if (this.isLive_()) {
  315. return this.mediaSequenceToStartTimeByType_.get(streamInfo.type);
  316. } else {
  317. return streamInfo.mediaSequenceToStartTime;
  318. }
  319. }
  320. /**
  321. * Updates a stream.
  322. *
  323. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  324. * @return {!Promise}
  325. * @private
  326. */
  327. async updateStream_(streamInfo) {
  328. const manifestUris = [];
  329. for (const uri of streamInfo.getUris()) {
  330. const uriObj = new goog.Uri(uri);
  331. const queryData = uriObj.getQueryData();
  332. if (streamInfo.canBlockReload) {
  333. if (streamInfo.nextMediaSequence >= 0) {
  334. // Indicates that the server must hold the request until a Playlist
  335. // contains a Media Segment with Media Sequence
  336. queryData.add('_HLS_msn', String(streamInfo.nextMediaSequence));
  337. }
  338. if (streamInfo.nextPart >= 0) {
  339. // Indicates, in combination with _HLS_msn, that the server must hold
  340. // the request until a Playlist contains Partial Segment N of Media
  341. // Sequence Number M or later.
  342. queryData.add('_HLS_part', String(streamInfo.nextPart));
  343. }
  344. }
  345. if (streamInfo.canSkipSegments) {
  346. // Enable delta updates. This will replace older segments with
  347. // 'EXT-X-SKIP' tag in the media playlist.
  348. queryData.add('_HLS_skip', 'YES');
  349. }
  350. if (queryData.getCount()) {
  351. uriObj.setQueryData(queryData);
  352. }
  353. manifestUris.push(uriObj.toString());
  354. }
  355. const response =
  356. await this.requestManifest_(manifestUris, /* isPlaylist= */ true);
  357. if (!streamInfo.stream.segmentIndex) {
  358. // The stream was closed since the update was first requested.
  359. return;
  360. }
  361. /** @type {shaka.hls.Playlist} */
  362. const playlist = this.manifestTextParser_.parsePlaylist(response.data);
  363. if (playlist.type != shaka.hls.PlaylistType.MEDIA) {
  364. throw new shaka.util.Error(
  365. shaka.util.Error.Severity.CRITICAL,
  366. shaka.util.Error.Category.MANIFEST,
  367. shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY);
  368. }
  369. // Record the final URI after redirects.
  370. const responseUri = response.uri;
  371. if (responseUri != response.originalUri &&
  372. !streamInfo.getUris().includes(responseUri)) {
  373. streamInfo.redirectUris.push(responseUri);
  374. }
  375. /** @type {!Array.<!shaka.hls.Tag>} */
  376. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  377. 'EXT-X-DEFINE');
  378. const mediaVariables = this.parseMediaVariables_(
  379. variablesTags, responseUri);
  380. const stream = streamInfo.stream;
  381. const mediaSequenceToStartTime =
  382. this.getMediaSequenceToStartTimeFor_(streamInfo);
  383. const {keyIds, drmInfos} = await this.parseDrmInfo_(
  384. playlist, stream.mimeType, streamInfo.getUris, mediaVariables);
  385. const keysAreEqual =
  386. (a, b) => a.size === b.size && [...a].every((value) => b.has(value));
  387. if (!keysAreEqual(stream.keyIds, keyIds)) {
  388. stream.keyIds = keyIds;
  389. stream.drmInfos = drmInfos;
  390. this.playerInterface_.newDrmInfo(stream);
  391. }
  392. const {segments, bandwidth} = this.createSegments_(
  393. playlist, stream, mediaSequenceToStartTime, mediaVariables,
  394. streamInfo.getUris, streamInfo.type);
  395. if (bandwidth) {
  396. stream.bandwidth = bandwidth;
  397. }
  398. stream.segmentIndex.mergeAndEvict(
  399. segments, this.presentationTimeline_.getSegmentAvailabilityStart());
  400. if (segments.length) {
  401. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  402. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  403. const skipTag = shaka.hls.Utils.getFirstTagWithName(
  404. playlist.tags, 'EXT-X-SKIP');
  405. const skippedSegments =
  406. skipTag ? Number(skipTag.getAttributeValue('SKIPPED-SEGMENTS')) : 0;
  407. const {nextMediaSequence, nextPart} =
  408. this.getNextMediaSequenceAndPart_(mediaSequenceNumber, segments);
  409. streamInfo.nextMediaSequence = nextMediaSequence + skippedSegments;
  410. streamInfo.nextPart = nextPart;
  411. const playlistStartTime = mediaSequenceToStartTime.get(
  412. mediaSequenceNumber);
  413. stream.segmentIndex.evict(playlistStartTime);
  414. }
  415. const oldSegment = stream.segmentIndex.earliestReference();
  416. goog.asserts.assert(oldSegment, 'Should have segments!');
  417. streamInfo.minTimestamp = oldSegment.startTime;
  418. const newestSegment = segments[segments.length - 1];
  419. goog.asserts.assert(newestSegment, 'Should have segments!');
  420. streamInfo.maxTimestamp = newestSegment.endTime;
  421. // Once the last segment has been added to the playlist,
  422. // #EXT-X-ENDLIST tag will be appended.
  423. // If that happened, treat the rest of the EVENT presentation as VOD.
  424. const endListTag =
  425. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST');
  426. if (endListTag) {
  427. // Flag this for later. We don't convert the whole presentation into VOD
  428. // until we've seen the ENDLIST tag for all active playlists.
  429. streamInfo.hasEndList = true;
  430. }
  431. this.determineLastTargetDuration_(playlist);
  432. }
  433. /**
  434. * @override
  435. * @exportInterface
  436. */
  437. onExpirationUpdated(sessionId, expiration) {
  438. // No-op
  439. }
  440. /**
  441. * @override
  442. * @exportInterface
  443. */
  444. onInitialVariantChosen(variant) {
  445. // No-op
  446. }
  447. /**
  448. * @override
  449. * @exportInterface
  450. */
  451. banLocation(uri) {
  452. if (this.contentSteeringManager_) {
  453. this.contentSteeringManager_.banLocation(uri);
  454. }
  455. }
  456. /**
  457. * Align the streams by sequence number by dropping early segments. Then
  458. * offset the streams to begin at presentation time 0.
  459. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  460. * @private
  461. */
  462. syncStreamsWithSequenceNumber_(streamInfos) {
  463. // We assume that, when this is first called, we have enough info to
  464. // determine how to use the program date times (e.g. we have both a video
  465. // and an audio, and all other videos and audios match those).
  466. // Thus, we only need to calculate this once.
  467. const updateMinSequenceNumber = this.minSequenceNumber_ == -1;
  468. // Sync using media sequence number. Find the highest starting sequence
  469. // number among all streams. Later, we will drop any references to
  470. // earlier segments in other streams, then offset everything back to 0.
  471. for (const streamInfo of streamInfos) {
  472. const segmentIndex = streamInfo.stream.segmentIndex;
  473. goog.asserts.assert(segmentIndex,
  474. 'Only loaded streams should be synced');
  475. const mediaSequenceToStartTime =
  476. this.getMediaSequenceToStartTimeFor_(streamInfo);
  477. const segment0 = segmentIndex.earliestReference();
  478. if (segment0) {
  479. // This looks inefficient, but iteration order is insertion order.
  480. // So the very first entry should be the one we want.
  481. // We assert that this holds true so that we are alerted by debug
  482. // builds and tests if it changes. We still do a loop, though, so
  483. // that the code functions correctly in production no matter what.
  484. if (goog.DEBUG) {
  485. const firstSequenceStartTime =
  486. mediaSequenceToStartTime.values().next().value;
  487. shaka.log.warning(
  488. firstSequenceStartTime == segment0.startTime,
  489. 'Sequence number map is not ordered as expected!');
  490. }
  491. for (const [sequence, start] of mediaSequenceToStartTime) {
  492. if (start == segment0.startTime) {
  493. if (updateMinSequenceNumber) {
  494. this.minSequenceNumber_ = Math.max(
  495. this.minSequenceNumber_, sequence);
  496. }
  497. // Even if we already have decided on a value for
  498. // |this.minSequenceNumber_|, we still need to determine the first
  499. // sequence number for the stream, to offset it in the code below.
  500. streamInfo.firstSequenceNumber = sequence;
  501. break;
  502. }
  503. }
  504. }
  505. }
  506. if (this.minSequenceNumber_ < 0) {
  507. // Nothing to sync.
  508. return;
  509. }
  510. shaka.log.debug('Syncing HLS streams against base sequence number:',
  511. this.minSequenceNumber_);
  512. for (const streamInfo of streamInfos) {
  513. if (!this.ignoreManifestProgramDateTimeFor_(streamInfo.type)) {
  514. continue;
  515. }
  516. const segmentIndex = streamInfo.stream.segmentIndex;
  517. if (segmentIndex) {
  518. // Drop any earlier references.
  519. const numSegmentsToDrop =
  520. this.minSequenceNumber_ - streamInfo.firstSequenceNumber;
  521. if (numSegmentsToDrop > 0) {
  522. segmentIndex.dropFirstReferences(numSegmentsToDrop);
  523. // Now adjust timestamps back to begin at 0.
  524. const segmentN = segmentIndex.earliestReference();
  525. if (segmentN) {
  526. const streamOffset = -segmentN.startTime;
  527. // Modify all SegmentReferences equally.
  528. streamInfo.stream.segmentIndex.offset(streamOffset);
  529. // Update other parts of streamInfo the same way.
  530. this.offsetStreamInfo_(streamInfo, streamOffset);
  531. }
  532. }
  533. }
  534. }
  535. }
  536. /**
  537. * Synchronize streams by the EXT-X-PROGRAM-DATE-TIME tags attached to their
  538. * segments. Also normalizes segment times so that the earliest segment in
  539. * any stream is at time 0.
  540. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  541. * @private
  542. */
  543. syncStreamsWithProgramDateTime_(streamInfos) {
  544. // We assume that, when this is first called, we have enough info to
  545. // determine how to use the program date times (e.g. we have both a video
  546. // and an audio, and all other videos and audios match those).
  547. // Thus, we only need to calculate this once.
  548. if (this.lowestSyncTime_ == Infinity) {
  549. for (const streamInfo of streamInfos) {
  550. const segmentIndex = streamInfo.stream.segmentIndex;
  551. goog.asserts.assert(segmentIndex,
  552. 'Only loaded streams should be synced');
  553. const segment0 = segmentIndex.earliestReference();
  554. if (segment0 != null && segment0.syncTime != null) {
  555. this.lowestSyncTime_ =
  556. Math.min(this.lowestSyncTime_, segment0.syncTime);
  557. }
  558. }
  559. }
  560. const lowestSyncTime = this.lowestSyncTime_;
  561. if (lowestSyncTime == Infinity) {
  562. // Nothing to sync.
  563. return;
  564. }
  565. shaka.log.debug('Syncing HLS streams against base time:', lowestSyncTime);
  566. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  567. if (this.ignoreManifestProgramDateTimeFor_(streamInfo.type)) {
  568. continue;
  569. }
  570. const segmentIndex = streamInfo.stream.segmentIndex;
  571. if (segmentIndex != null) {
  572. // A segment's startTime should be based on its syncTime vs the lowest
  573. // syncTime across all streams. The earliest segment sync time from
  574. // any stream will become presentation time 0. If two streams start
  575. // e.g. 6 seconds apart in syncTime, then their first segments will
  576. // also start 6 seconds apart in presentation time.
  577. const segment0 = segmentIndex.earliestReference();
  578. if (segment0.syncTime == null) {
  579. shaka.log.alwaysError('Missing EXT-X-PROGRAM-DATE-TIME for stream',
  580. streamInfo.getUris(),
  581. 'Expect AV sync issues!');
  582. } else {
  583. // Stream metadata are offset by a fixed amount based on the
  584. // first segment.
  585. const segment0TargetTime = segment0.syncTime - lowestSyncTime;
  586. const streamOffset = segment0TargetTime - segment0.startTime;
  587. this.offsetStreamInfo_(streamInfo, streamOffset);
  588. // This is computed across all segments separately to manage
  589. // accumulated drift in durations.
  590. for (const segment of segmentIndex) {
  591. segment.syncAgainst(lowestSyncTime);
  592. }
  593. }
  594. }
  595. }
  596. }
  597. /**
  598. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  599. * @param {number} offset
  600. * @private
  601. */
  602. offsetStreamInfo_(streamInfo, offset) {
  603. // Adjust our accounting of the minimum timestamp.
  604. streamInfo.minTimestamp += offset;
  605. // Adjust our accounting of the maximum timestamp.
  606. streamInfo.maxTimestamp += offset;
  607. goog.asserts.assert(streamInfo.maxTimestamp >= 0,
  608. 'Negative maxTimestamp after adjustment!');
  609. // Update our map from sequence number to start time.
  610. const mediaSequenceToStartTime =
  611. this.getMediaSequenceToStartTimeFor_(streamInfo);
  612. for (const [key, value] of mediaSequenceToStartTime) {
  613. mediaSequenceToStartTime.set(key, value + offset);
  614. }
  615. shaka.log.debug('Offset', offset, 'applied to',
  616. streamInfo.getUris());
  617. }
  618. /**
  619. * Parses the manifest.
  620. *
  621. * @param {BufferSource} data
  622. * @param {string} uri
  623. * @return {!Promise}
  624. * @private
  625. */
  626. async parseManifest_(data, uri) {
  627. const Utils = shaka.hls.Utils;
  628. goog.asserts.assert(this.masterPlaylistUri_,
  629. 'Master playlist URI must be set before calling parseManifest_!');
  630. const playlist = this.manifestTextParser_.parsePlaylist(data);
  631. /** @type {!Array.<!shaka.hls.Tag>} */
  632. const variablesTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-DEFINE');
  633. /** @type {!Array.<!shaka.extern.Variant>} */
  634. let variants = [];
  635. /** @type {!Array.<!shaka.extern.Stream>} */
  636. let textStreams = [];
  637. /** @type {!Array.<!shaka.extern.Stream>} */
  638. let imageStreams = [];
  639. // This assert is our own sanity check.
  640. goog.asserts.assert(this.presentationTimeline_ == null,
  641. 'Presentation timeline created early!');
  642. // We don't know if the presentation is VOD or live until we parse at least
  643. // one media playlist, so make a VOD-style presentation timeline for now
  644. // and change the type later if we discover this is live.
  645. // Since the player will load the first variant chosen early in the process,
  646. // there isn't a window during playback where the live-ness is unknown.
  647. this.presentationTimeline_ = new shaka.media.PresentationTimeline(
  648. /* presentationStartTime= */ null, /* delay= */ 0);
  649. this.presentationTimeline_.setStatic(true);
  650. const getUris = () => {
  651. return [uri];
  652. };
  653. // Parsing a media playlist results in a single-variant stream.
  654. if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
  655. // Get necessary info for this stream. These are things we would normally
  656. // find from the master playlist (e.g. from values on EXT-X-MEDIA tags).
  657. const basicInfo =
  658. await this.getMediaPlaylistBasicInfo_(playlist, getUris);
  659. const type = basicInfo.type;
  660. const mimeType = basicInfo.mimeType;
  661. const codecs = basicInfo.codecs;
  662. const languageValue = basicInfo.language;
  663. const height = basicInfo.height;
  664. const width = basicInfo.width;
  665. const channelsCount = basicInfo.channelCount;
  666. const sampleRate = basicInfo.sampleRate;
  667. const closedCaptions = basicInfo.closedCaptions;
  668. const videoRange = basicInfo.videoRange;
  669. // Some values we cannot figure out, and aren't important enough to ask
  670. // the user to provide through config values. A lot of these are only
  671. // relevant to ABR, which isn't necessary if there's only one variant.
  672. // So these unknowns should be set to false or null, largely.
  673. const spatialAudio = false;
  674. const characteristics = null;
  675. const forced = false; // Only relevant for text.
  676. const primary = true; // This is the only stream!
  677. const name = 'Media Playlist';
  678. // Make the stream info, with those values.
  679. const streamInfo = await this.convertParsedPlaylistIntoStreamInfo_(
  680. this.globalId_++, playlist, getUris, uri, codecs, type,
  681. languageValue, primary, name, channelsCount, closedCaptions,
  682. characteristics, forced, sampleRate, spatialAudio, mimeType);
  683. this.uriToStreamInfosMap_.set(uri, streamInfo);
  684. if (type == 'video') {
  685. this.addVideoAttributes_(streamInfo.stream, width, height,
  686. /* frameRate= */ null, videoRange, /* videoLayout= */ null);
  687. }
  688. // Wrap the stream from that stream info with a variant.
  689. variants.push({
  690. id: 0,
  691. language: this.getLanguage_(languageValue),
  692. disabledUntilTime: 0,
  693. primary: true,
  694. audio: type == 'audio' ? streamInfo.stream : null,
  695. video: type == 'video' ? streamInfo.stream : null,
  696. bandwidth: streamInfo.stream.bandwidth || 0,
  697. allowedByApplication: true,
  698. allowedByKeySystem: true,
  699. decodingInfos: [],
  700. });
  701. } else {
  702. this.parseMasterVariables_(variablesTags);
  703. /** @type {!Array.<!shaka.hls.Tag>} */
  704. const mediaTags = Utils.filterTagsByName(
  705. playlist.tags, 'EXT-X-MEDIA');
  706. /** @type {!Array.<!shaka.hls.Tag>} */
  707. const variantTags = Utils.filterTagsByName(
  708. playlist.tags, 'EXT-X-STREAM-INF');
  709. /** @type {!Array.<!shaka.hls.Tag>} */
  710. const imageTags = Utils.filterTagsByName(
  711. playlist.tags, 'EXT-X-IMAGE-STREAM-INF');
  712. /** @type {!Array.<!shaka.hls.Tag>} */
  713. const iFrameTags = Utils.filterTagsByName(
  714. playlist.tags, 'EXT-X-I-FRAME-STREAM-INF');
  715. /** @type {!Array.<!shaka.hls.Tag>} */
  716. const sessionKeyTags = Utils.filterTagsByName(
  717. playlist.tags, 'EXT-X-SESSION-KEY');
  718. /** @type {!Array.<!shaka.hls.Tag>} */
  719. const sessionDataTags = Utils.filterTagsByName(
  720. playlist.tags, 'EXT-X-SESSION-DATA');
  721. /** @type {!Array.<!shaka.hls.Tag>} */
  722. const contentSteeringTags = Utils.filterTagsByName(
  723. playlist.tags, 'EXT-X-CONTENT-STEERING');
  724. this.processSessionData_(sessionDataTags);
  725. await this.processContentSteering_(contentSteeringTags);
  726. this.parseCodecs_(variantTags);
  727. this.parseClosedCaptions_(mediaTags);
  728. variants = await this.createVariantsForTags_(
  729. variantTags, sessionKeyTags, mediaTags, getUris,
  730. this.globalVariables_);
  731. textStreams = this.parseTexts_(mediaTags);
  732. imageStreams = await this.parseImages_(imageTags, iFrameTags);
  733. }
  734. // Make sure that the parser has not been destroyed.
  735. if (!this.playerInterface_) {
  736. throw new shaka.util.Error(
  737. shaka.util.Error.Severity.CRITICAL,
  738. shaka.util.Error.Category.PLAYER,
  739. shaka.util.Error.Code.OPERATION_ABORTED);
  740. }
  741. // Single-variant streams aren't lazy-loaded, so for them we already have
  742. // enough info here to determine the presentation type and duration.
  743. if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
  744. if (this.isLive_()) {
  745. this.changePresentationTimelineToLive_(playlist);
  746. const delay = this.getUpdatePlaylistDelay_();
  747. this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
  748. }
  749. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  750. this.finalizeStreams_(streamInfos);
  751. this.determineDuration_();
  752. }
  753. this.manifest_ = {
  754. presentationTimeline: this.presentationTimeline_,
  755. variants,
  756. textStreams,
  757. imageStreams,
  758. offlineSessionIds: [],
  759. minBufferTime: 0,
  760. sequenceMode: this.config_.hls.sequenceMode,
  761. ignoreManifestTimestampsInSegmentsMode:
  762. this.config_.hls.ignoreManifestTimestampsInSegmentsMode,
  763. type: shaka.media.ManifestParser.HLS,
  764. serviceDescription: null,
  765. };
  766. // If there is no 'CODECS' attribute in the manifest and codec guessing is
  767. // disabled, we need to create the segment indexes now so that missing info
  768. // can be parsed from the media data and added to the stream objects.
  769. if (!this.codecInfoInManifest_ && this.config_.hls.disableCodecGuessing) {
  770. const createIndexes = [];
  771. for (const variant of this.manifest_.variants) {
  772. if (variant.audio && variant.audio.codecs === '') {
  773. createIndexes.push(variant.audio.createSegmentIndex());
  774. }
  775. if (variant.video && variant.video.codecs === '') {
  776. createIndexes.push(variant.video.createSegmentIndex());
  777. }
  778. }
  779. await Promise.all(createIndexes);
  780. }
  781. this.playerInterface_.makeTextStreamsForClosedCaptions(this.manifest_);
  782. if (variants.length == 1) {
  783. const createSegmentIndexPromises = [];
  784. const variant = variants[0];
  785. for (const stream of [variant.video, variant.audio]) {
  786. if (stream && !stream.segmentIndex) {
  787. createSegmentIndexPromises.push(stream.createSegmentIndex());
  788. }
  789. }
  790. if (createSegmentIndexPromises.length > 0) {
  791. await Promise.all(createSegmentIndexPromises);
  792. }
  793. }
  794. }
  795. /**
  796. * @param {shaka.hls.Playlist} playlist
  797. * @param {function():!Array.<string>} getUris
  798. * @return {!Promise.<shaka.media.SegmentUtils.BasicInfo>}
  799. * @private
  800. */
  801. async getMediaPlaylistBasicInfo_(playlist, getUris) {
  802. const HlsParser = shaka.hls.HlsParser;
  803. const defaultBasicInfo = shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  804. this.config_.hls.mediaPlaylistFullMimeType);
  805. if (!playlist.segments.length) {
  806. return defaultBasicInfo;
  807. }
  808. const firstSegment = playlist.segments[0];
  809. const firstSegmentUris = shaka.hls.Utils.constructSegmentUris(
  810. getUris(),
  811. firstSegment.verbatimSegmentUri);
  812. const firstSegmentUri = firstSegmentUris[0];
  813. const parsedUri = new goog.Uri(firstSegmentUri);
  814. const extension = parsedUri.getPath().split('.').pop();
  815. const rawMimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension];
  816. if (rawMimeType) {
  817. return shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  818. rawMimeType);
  819. }
  820. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  821. let initData = null;
  822. const initSegmentRef = this.getInitSegmentReference_(
  823. playlist, firstSegment.tags, getUris);
  824. this.mapTagToInitSegmentRefMap_.clear();
  825. if (initSegmentRef) {
  826. const initSegmentRequest = shaka.util.Networking.createSegmentRequest(
  827. initSegmentRef.getUris(),
  828. initSegmentRef.getStartByte(),
  829. initSegmentRef.getEndByte(),
  830. this.config_.retryParameters);
  831. const initType =
  832. shaka.net.NetworkingEngine.AdvancedRequestType.INIT_SEGMENT;
  833. const initResponse = await this.makeNetworkRequest_(
  834. initSegmentRequest, requestType, {type: initType});
  835. initData = initResponse.data;
  836. }
  837. let startByte = 0;
  838. let endByte = null;
  839. const byterangeTag = shaka.hls.Utils.getFirstTagWithName(
  840. firstSegment.tags, 'EXT-X-BYTERANGE');
  841. if (byterangeTag) {
  842. [startByte, endByte] = this.parseByteRange_(
  843. /* previousReference= */ null, byterangeTag.value);
  844. }
  845. const segmentRequest = shaka.util.Networking.createSegmentRequest(
  846. firstSegmentUris, startByte, endByte, this.config_.retryParameters);
  847. const type = shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT;
  848. const response = await this.makeNetworkRequest_(
  849. segmentRequest, requestType, {type});
  850. let contentMimeType = response.headers['content-type'];
  851. if (contentMimeType) {
  852. // Split the MIME type in case the server sent additional parameters.
  853. contentMimeType = contentMimeType.split(';')[0].toLowerCase();
  854. }
  855. if (extension == 'ts' || contentMimeType == 'video/mp2t') {
  856. const basicInfo =
  857. shaka.media.SegmentUtils.getBasicInfoFromTs(response.data);
  858. if (basicInfo) {
  859. return basicInfo;
  860. }
  861. } else if (extension == 'mp4' || extension == 'cmfv' ||
  862. extension == 'm4s' || extension == 'fmp4' ||
  863. contentMimeType == 'video/mp4' ||
  864. contentMimeType == 'audio/mp4' ||
  865. contentMimeType == 'video/iso.segment') {
  866. const basicInfo = shaka.media.SegmentUtils.getBasicInfoFromMp4(
  867. initData, response.data);
  868. if (basicInfo) {
  869. return basicInfo;
  870. }
  871. }
  872. return defaultBasicInfo;
  873. }
  874. /** @private */
  875. determineDuration_() {
  876. goog.asserts.assert(this.presentationTimeline_,
  877. 'Presentation timeline not created!');
  878. if (this.isLive_()) {
  879. // The spec says nothing much about seeking in live content, but Safari's
  880. // built-in HLS implementation does not allow it. Therefore we will set
  881. // the availability window equal to the presentation delay. The player
  882. // will be able to buffer ahead three segments, but the seek window will
  883. // be zero-sized.
  884. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  885. if (this.presentationType_ == PresentationType.LIVE) {
  886. let segmentAvailabilityDuration = this.getLiveDuration_();
  887. // This defaults to the presentation delay, which has the effect of
  888. // making the live stream unseekable. This is consistent with Apple's
  889. // HLS implementation.
  890. if (this.config_.hls.useSafariBehaviorForLive) {
  891. segmentAvailabilityDuration = this.presentationTimeline_.getDelay();
  892. }
  893. // The app can override that with a longer duration, to allow seeking.
  894. if (!isNaN(this.config_.availabilityWindowOverride)) {
  895. segmentAvailabilityDuration = this.config_.availabilityWindowOverride;
  896. }
  897. this.presentationTimeline_.setSegmentAvailabilityDuration(
  898. segmentAvailabilityDuration);
  899. }
  900. } else {
  901. // Use the minimum duration as the presentation duration.
  902. this.presentationTimeline_.setDuration(this.getMinDuration_());
  903. }
  904. if (!this.presentationTimeline_.isStartTimeLocked()) {
  905. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  906. if (!streamInfo.stream.segmentIndex) {
  907. continue; // Not active.
  908. }
  909. if (streamInfo.type != 'audio' && streamInfo.type != 'video') {
  910. continue;
  911. }
  912. const firstReference = streamInfo.stream.segmentIndex.get(0);
  913. if (firstReference && firstReference.syncTime) {
  914. const syncTime = firstReference.syncTime;
  915. this.presentationTimeline_.setInitialProgramDateTime(syncTime);
  916. }
  917. }
  918. }
  919. // This is the first point where we have a meaningful presentation start
  920. // time, and we need to tell PresentationTimeline that so that it can
  921. // maintain consistency from here on.
  922. this.presentationTimeline_.lockStartTime();
  923. // This asserts that the live edge is being calculated from segment times.
  924. // For VOD and event streams, this check should still pass.
  925. goog.asserts.assert(
  926. !this.presentationTimeline_.usingPresentationStartTime(),
  927. 'We should not be using the presentation start time in HLS!');
  928. }
  929. /**
  930. * Get the variables of each variant tag, and store in a map.
  931. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  932. * @private
  933. */
  934. parseMasterVariables_(tags) {
  935. const queryParams = new goog.Uri(this.masterPlaylistUri_).getQueryData();
  936. for (const variableTag of tags) {
  937. const name = variableTag.getAttributeValue('NAME');
  938. const value = variableTag.getAttributeValue('VALUE');
  939. const queryParam = variableTag.getAttributeValue('QUERYPARAM');
  940. if (name && value) {
  941. if (!this.globalVariables_.has(name)) {
  942. this.globalVariables_.set(name, value);
  943. }
  944. }
  945. if (queryParam) {
  946. const queryParamValue = queryParams.get(queryParam)[0];
  947. if (queryParamValue && !this.globalVariables_.has(queryParamValue)) {
  948. this.globalVariables_.set(queryParam, queryParamValue);
  949. }
  950. }
  951. }
  952. }
  953. /**
  954. * Get the variables of each variant tag, and store in a map.
  955. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  956. * @param {string} uri Media playlist URI.
  957. * @return {!Map.<string, string>}
  958. * @private
  959. */
  960. parseMediaVariables_(tags, uri) {
  961. const queryParams = new goog.Uri(uri).getQueryData();
  962. const mediaVariables = new Map();
  963. for (const variableTag of tags) {
  964. const name = variableTag.getAttributeValue('NAME');
  965. const value = variableTag.getAttributeValue('VALUE');
  966. const queryParam = variableTag.getAttributeValue('QUERYPARAM');
  967. const mediaImport = variableTag.getAttributeValue('IMPORT');
  968. if (name && value) {
  969. if (!mediaVariables.has(name)) {
  970. mediaVariables.set(name, value);
  971. }
  972. }
  973. if (queryParam) {
  974. const queryParamValue = queryParams.get(queryParam)[0];
  975. if (queryParamValue && !mediaVariables.has(queryParamValue)) {
  976. mediaVariables.set(queryParam, queryParamValue);
  977. }
  978. }
  979. if (mediaImport) {
  980. const globalValue = this.globalVariables_.get(mediaImport);
  981. if (globalValue) {
  982. mediaVariables.set(mediaImport, globalValue);
  983. }
  984. }
  985. }
  986. return mediaVariables;
  987. }
  988. /**
  989. * Get the codecs of each variant tag, and store in a map from
  990. * audio/video/subtitle group id to the codecs arraylist.
  991. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  992. * @private
  993. */
  994. parseCodecs_(tags) {
  995. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  996. for (const variantTag of tags) {
  997. const audioGroupId = variantTag.getAttributeValue('AUDIO');
  998. const videoGroupId = variantTag.getAttributeValue('VIDEO');
  999. const subGroupId = variantTag.getAttributeValue('SUBTITLES');
  1000. const allCodecs = this.getCodecsForVariantTag_(variantTag);
  1001. if (subGroupId) {
  1002. const textCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1003. ContentType.TEXT, allCodecs);
  1004. goog.asserts.assert(textCodecs != null, 'Text codecs should be valid.');
  1005. this.groupIdToCodecsMap_.set(subGroupId, textCodecs);
  1006. shaka.util.ArrayUtils.remove(allCodecs, textCodecs);
  1007. }
  1008. if (audioGroupId) {
  1009. let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1010. ContentType.AUDIO, allCodecs);
  1011. if (!codecs) {
  1012. codecs = this.config_.hls.defaultAudioCodec;
  1013. }
  1014. this.groupIdToCodecsMap_.set(audioGroupId, codecs);
  1015. }
  1016. if (videoGroupId) {
  1017. let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1018. ContentType.VIDEO, allCodecs);
  1019. if (!codecs) {
  1020. codecs = this.config_.hls.defaultVideoCodec;
  1021. }
  1022. this.groupIdToCodecsMap_.set(videoGroupId, codecs);
  1023. }
  1024. }
  1025. }
  1026. /**
  1027. * Process EXT-X-SESSION-DATA tags.
  1028. *
  1029. * @param {!Array.<!shaka.hls.Tag>} tags
  1030. * @private
  1031. */
  1032. processSessionData_(tags) {
  1033. for (const tag of tags) {
  1034. const id = tag.getAttributeValue('DATA-ID');
  1035. const uri = tag.getAttributeValue('URI');
  1036. const language = tag.getAttributeValue('LANGUAGE');
  1037. const value = tag.getAttributeValue('VALUE');
  1038. const data = (new Map()).set('id', id);
  1039. if (uri) {
  1040. data.set('uri', shaka.hls.Utils.constructSegmentUris(
  1041. [this.masterPlaylistUri_], uri, this.globalVariables_)[0]);
  1042. }
  1043. if (language) {
  1044. data.set('language', language);
  1045. }
  1046. if (value) {
  1047. data.set('value', value);
  1048. }
  1049. const event = new shaka.util.FakeEvent('sessiondata', data);
  1050. if (this.playerInterface_) {
  1051. this.playerInterface_.onEvent(event);
  1052. }
  1053. }
  1054. }
  1055. /**
  1056. * Process EXT-X-CONTENT-STEERING tags.
  1057. *
  1058. * @param {!Array.<!shaka.hls.Tag>} tags
  1059. * @return {!Promise}
  1060. * @private
  1061. */
  1062. async processContentSteering_(tags) {
  1063. if (!this.playerInterface_ || !this.config_) {
  1064. return;
  1065. }
  1066. let contentSteeringPromise;
  1067. for (const tag of tags) {
  1068. const defaultPathwayId = tag.getAttributeValue('PATHWAY-ID');
  1069. const uri = tag.getAttributeValue('SERVER-URI');
  1070. if (!defaultPathwayId || !uri) {
  1071. continue;
  1072. }
  1073. this.contentSteeringManager_ =
  1074. new shaka.util.ContentSteeringManager(this.playerInterface_);
  1075. this.contentSteeringManager_.configure(this.config_);
  1076. this.contentSteeringManager_.setBaseUris([this.masterPlaylistUri_]);
  1077. this.contentSteeringManager_.setManifestType(
  1078. shaka.media.ManifestParser.HLS);
  1079. this.contentSteeringManager_.setDefaultPathwayId(defaultPathwayId);
  1080. contentSteeringPromise =
  1081. this.contentSteeringManager_.requestInfo(uri);
  1082. break;
  1083. }
  1084. await contentSteeringPromise;
  1085. }
  1086. /**
  1087. * Parse Subtitles and Closed Captions from 'EXT-X-MEDIA' tags.
  1088. * Create text streams for Subtitles, but not Closed Captions.
  1089. *
  1090. * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist.
  1091. * @return {!Array.<!shaka.extern.Stream>}
  1092. * @private
  1093. */
  1094. parseTexts_(mediaTags) {
  1095. // Create text stream for each Subtitle media tag.
  1096. const subtitleTags =
  1097. shaka.hls.Utils.filterTagsByType(mediaTags, 'SUBTITLES');
  1098. const textStreams = subtitleTags.map((tag) => {
  1099. const disableText = this.config_.disableText;
  1100. if (disableText) {
  1101. return null;
  1102. }
  1103. try {
  1104. return this.createStreamInfoFromMediaTags_([tag], new Map()).stream;
  1105. } catch (e) {
  1106. if (this.config_.hls.ignoreTextStreamFailures) {
  1107. return null;
  1108. }
  1109. throw e;
  1110. }
  1111. });
  1112. const type = shaka.util.ManifestParserUtils.ContentType.TEXT;
  1113. // Set the codecs for text streams.
  1114. for (const tag of subtitleTags) {
  1115. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1116. const codecs = this.groupIdToCodecsMap_.get(groupId);
  1117. if (codecs) {
  1118. const textStreamInfos = this.groupIdToStreamInfosMap_.get(groupId);
  1119. if (textStreamInfos) {
  1120. for (const textStreamInfo of textStreamInfos) {
  1121. textStreamInfo.stream.codecs = codecs;
  1122. textStreamInfo.stream.mimeType =
  1123. this.guessMimeTypeBeforeLoading_(type, codecs) ||
  1124. this.guessMimeTypeFallback_(type);
  1125. this.setFullTypeForStream_(textStreamInfo.stream);
  1126. }
  1127. }
  1128. }
  1129. }
  1130. // Do not create text streams for Closed captions.
  1131. return textStreams.filter((s) => s);
  1132. }
  1133. /**
  1134. * @param {!shaka.extern.Stream} stream
  1135. * @private
  1136. */
  1137. setFullTypeForStream_(stream) {
  1138. stream.fullMimeTypes = new Set([shaka.util.MimeUtils.getFullType(
  1139. stream.mimeType, stream.codecs)]);
  1140. }
  1141. /**
  1142. * @param {!Array.<!shaka.hls.Tag>} imageTags from the playlist.
  1143. * @param {!Array.<!shaka.hls.Tag>} iFrameTags from the playlist.
  1144. * @return {!Promise.<!Array.<!shaka.extern.Stream>>}
  1145. * @private
  1146. */
  1147. async parseImages_(imageTags, iFrameTags) {
  1148. // Create image stream for each image tag.
  1149. const imageStreamPromises = imageTags.map(async (tag) => {
  1150. const disableThumbnails = this.config_.disableThumbnails;
  1151. if (disableThumbnails) {
  1152. return null;
  1153. }
  1154. try {
  1155. const streamInfo = await this.createStreamInfoFromImageTag_(tag);
  1156. return streamInfo.stream;
  1157. } catch (e) {
  1158. if (this.config_.hls.ignoreImageStreamFailures) {
  1159. return null;
  1160. }
  1161. throw e;
  1162. }
  1163. }).concat(iFrameTags.map((tag) => {
  1164. const disableThumbnails = this.config_.disableThumbnails;
  1165. if (disableThumbnails) {
  1166. return null;
  1167. }
  1168. try {
  1169. const streamInfo = this.createStreamInfoFromIframeTag_(tag);
  1170. if (streamInfo.stream.codecs !== 'mjpg') {
  1171. return null;
  1172. }
  1173. return streamInfo.stream;
  1174. } catch (e) {
  1175. if (this.config_.hls.ignoreImageStreamFailures) {
  1176. return null;
  1177. }
  1178. throw e;
  1179. }
  1180. }));
  1181. const imageStreams = await Promise.all(imageStreamPromises);
  1182. return imageStreams.filter((s) => s);
  1183. }
  1184. /**
  1185. * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist.
  1186. * @param {!Map.<string, string>} groupIdPathwayIdMapping
  1187. * @private
  1188. */
  1189. createStreamInfosFromMediaTags_(mediaTags, groupIdPathwayIdMapping) {
  1190. // Filter out subtitles and media tags without uri.
  1191. mediaTags = mediaTags.filter((tag) => {
  1192. const uri = tag.getAttributeValue('URI') || '';
  1193. const type = tag.getAttributeValue('TYPE');
  1194. return type != 'SUBTITLES' && uri != '';
  1195. });
  1196. const groupedTags = {};
  1197. for (const tag of mediaTags) {
  1198. const key = tag.getTagKey();
  1199. if (!groupedTags[key]) {
  1200. groupedTags[key] = [tag];
  1201. } else {
  1202. groupedTags[key].push(tag);
  1203. }
  1204. }
  1205. for (const key in groupedTags) {
  1206. // Create stream info for each audio / video media grouped tag.
  1207. this.createStreamInfoFromMediaTags_(
  1208. groupedTags[key], groupIdPathwayIdMapping);
  1209. }
  1210. }
  1211. /**
  1212. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1213. * @param {!Array.<!shaka.hls.Tag>} sessionKeyTags EXT-X-SESSION-KEY tags
  1214. * from the playlist.
  1215. * @param {!Array.<!shaka.hls.Tag>} mediaTags EXT-X-MEDIA tags from the
  1216. * playlist.
  1217. * @param {function():!Array.<string>} getUris
  1218. * @param {?Map.<string, string>=} variables
  1219. * @return {!Promise.<!Array.<!shaka.extern.Variant>>}
  1220. * @private
  1221. */
  1222. async createVariantsForTags_(tags, sessionKeyTags, mediaTags, getUris,
  1223. variables) {
  1224. // EXT-X-SESSION-KEY processing
  1225. const drmInfos = [];
  1226. const keyIds = new Set();
  1227. if (sessionKeyTags.length > 0) {
  1228. for (const drmTag of sessionKeyTags) {
  1229. const method = drmTag.getRequiredAttrValue('METHOD');
  1230. // According to the HLS spec, KEYFORMAT is optional and implicitly
  1231. // defaults to "identity".
  1232. // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4
  1233. const keyFormat =
  1234. drmTag.getAttributeValue('KEYFORMAT') || 'identity';
  1235. let drmInfo = null;
  1236. if (method == 'NONE' || this.isAesMethod_(method)) {
  1237. continue;
  1238. } else if (keyFormat == 'identity') {
  1239. // eslint-disable-next-line no-await-in-loop
  1240. drmInfo = await this.identityDrmParser_(
  1241. drmTag, /* mimeType= */ '', getUris,
  1242. /* initSegmentRef= */ null, variables);
  1243. } else {
  1244. const drmParser =
  1245. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_[keyFormat];
  1246. drmInfo = drmParser ?
  1247. drmParser(drmTag, /* mimeType= */ '') : null;
  1248. }
  1249. if (drmInfo) {
  1250. if (drmInfo.keyIds) {
  1251. for (const keyId of drmInfo.keyIds) {
  1252. keyIds.add(keyId);
  1253. }
  1254. }
  1255. drmInfos.push(drmInfo);
  1256. } else {
  1257. shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat);
  1258. }
  1259. }
  1260. }
  1261. const groupedTags = {};
  1262. for (const tag of tags) {
  1263. const key = tag.getTagKey();
  1264. if (!groupedTags[key]) {
  1265. groupedTags[key] = [tag];
  1266. } else {
  1267. groupedTags[key].push(tag);
  1268. }
  1269. }
  1270. const allVariants = [];
  1271. // Create variants for each group of variant tag.
  1272. for (const key in groupedTags) {
  1273. const tags = groupedTags[key];
  1274. const firstTag = tags[0];
  1275. const frameRate = firstTag.getAttributeValue('FRAME-RATE');
  1276. const bandwidth =
  1277. Number(firstTag.getAttributeValue('AVERAGE-BANDWIDTH')) ||
  1278. Number(firstTag.getRequiredAttrValue('BANDWIDTH'));
  1279. const resolution = firstTag.getAttributeValue('RESOLUTION');
  1280. const [width, height] = resolution ? resolution.split('x') : [null, null];
  1281. const videoRange = firstTag.getAttributeValue('VIDEO-RANGE');
  1282. let videoLayout = firstTag.getAttributeValue('REQ-VIDEO-LAYOUT');
  1283. if (videoLayout && videoLayout.includes(',')) {
  1284. // If multiple video layout strings are present, pick the first valid
  1285. // one.
  1286. const layoutStrings = videoLayout.split(',').filter((layoutString) => {
  1287. return layoutString == 'CH-STEREO' || layoutString == 'CH-MONO';
  1288. });
  1289. videoLayout = layoutStrings[0];
  1290. }
  1291. // According to the HLS spec:
  1292. // By default a video variant is monoscopic, so an attribute
  1293. // consisting entirely of REQ-VIDEO-LAYOUT="CH-MONO" is unnecessary
  1294. // and SHOULD NOT be present.
  1295. videoLayout = videoLayout || 'CH-MONO';
  1296. const streamInfos = this.createStreamInfosForVariantTags_(tags,
  1297. mediaTags, resolution, frameRate, bandwidth);
  1298. goog.asserts.assert(streamInfos.audio.length ||
  1299. streamInfos.video.length, 'We should have created a stream!');
  1300. allVariants.push(...this.createVariants_(
  1301. streamInfos.audio,
  1302. streamInfos.video,
  1303. bandwidth,
  1304. width,
  1305. height,
  1306. frameRate,
  1307. videoRange,
  1308. videoLayout,
  1309. drmInfos,
  1310. keyIds));
  1311. }
  1312. return allVariants.filter((variant) => variant != null);
  1313. }
  1314. /**
  1315. * Create audio and video streamInfos from an 'EXT-X-STREAM-INF' tag and its
  1316. * related media tags.
  1317. *
  1318. * @param {!Array.<!shaka.hls.Tag>} tags
  1319. * @param {!Array.<!shaka.hls.Tag>} mediaTags
  1320. * @param {?string} resolution
  1321. * @param {?string} frameRate
  1322. * @param {number} bandwidth
  1323. * @return {!shaka.hls.HlsParser.StreamInfos}
  1324. * @private
  1325. */
  1326. createStreamInfosForVariantTags_(
  1327. tags, mediaTags, resolution, frameRate, bandwidth) {
  1328. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1329. /** @type {shaka.hls.HlsParser.StreamInfos} */
  1330. const res = {
  1331. audio: [],
  1332. video: [],
  1333. };
  1334. const groupIdPathwayIdMapping = new Map();
  1335. const globalGroupIds = [];
  1336. let isAudioGroup = false;
  1337. let isVideoGroup = false;
  1338. for (const tag of tags) {
  1339. const audioGroupId = tag.getAttributeValue('AUDIO');
  1340. const videoGroupId = tag.getAttributeValue('VIDEO');
  1341. goog.asserts.assert(audioGroupId == null || videoGroupId == null,
  1342. 'Unexpected: both video and audio described by media tags!');
  1343. const groupId = audioGroupId || videoGroupId;
  1344. if (!groupId) {
  1345. continue;
  1346. }
  1347. if (!globalGroupIds.includes(groupId)) {
  1348. globalGroupIds.push(groupId);
  1349. }
  1350. const pathwayId = tag.getAttributeValue('PATHWAY-ID');
  1351. if (pathwayId) {
  1352. groupIdPathwayIdMapping.set(groupId, pathwayId);
  1353. }
  1354. if (audioGroupId) {
  1355. isAudioGroup = true;
  1356. } else if (videoGroupId) {
  1357. isVideoGroup = true;
  1358. }
  1359. // Make an educated guess about the stream type.
  1360. shaka.log.debug('Guessing stream type for', tag.toString());
  1361. }
  1362. if (globalGroupIds.length && mediaTags.length) {
  1363. const mediaTagsForVariant = mediaTags.filter((tag) => {
  1364. return globalGroupIds.includes(tag.getRequiredAttrValue('GROUP-ID'));
  1365. });
  1366. this.createStreamInfosFromMediaTags_(
  1367. mediaTagsForVariant, groupIdPathwayIdMapping);
  1368. }
  1369. const globalGroupId = globalGroupIds.sort().join(',');
  1370. const streamInfos =
  1371. (globalGroupId && this.groupIdToStreamInfosMap_.has(globalGroupId)) ?
  1372. this.groupIdToStreamInfosMap_.get(globalGroupId) : [];
  1373. if (isAudioGroup) {
  1374. res.audio.push(...streamInfos);
  1375. } else if (isVideoGroup) {
  1376. res.video.push(...streamInfos);
  1377. }
  1378. let type;
  1379. let ignoreStream = false;
  1380. // The Microsoft HLS manifest generators will make audio-only variants
  1381. // that link to their URI both directly and through an audio tag.
  1382. // In that case, ignore the local URI and use the version in the
  1383. // AUDIO tag, so you inherit its language.
  1384. // As an example, see the manifest linked in issue #860.
  1385. const allStreamUris = tags.map((tag) => tag.getRequiredAttrValue('URI'));
  1386. const hasSameUri = res.audio.find((audio) => {
  1387. return audio && audio.getUris().find((uri) => {
  1388. return allStreamUris.includes(uri);
  1389. });
  1390. });
  1391. /** @type {!Array.<string>} */
  1392. let allCodecs = this.getCodecsForVariantTag_(tags[0]);
  1393. const videoCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1394. ContentType.VIDEO, allCodecs);
  1395. const audioCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1396. ContentType.AUDIO, allCodecs);
  1397. if (audioCodecs && !videoCodecs) {
  1398. // There are no associated media tags, and there's only audio codec,
  1399. // and no video codec, so it should be audio.
  1400. type = ContentType.AUDIO;
  1401. shaka.log.debug('Guessing audio-only.');
  1402. ignoreStream = res.audio.length > 0;
  1403. } else if (!res.audio.length && !res.video.length &&
  1404. audioCodecs && videoCodecs) {
  1405. // There are both audio and video codecs, so assume multiplexed content.
  1406. // Note that the default used when CODECS is missing assumes multiple
  1407. // (and therefore multiplexed).
  1408. // Recombine the codec strings into one so that MediaSource isn't
  1409. // lied to later. (That would trigger an error in Chrome.)
  1410. shaka.log.debug('Guessing multiplexed audio+video.');
  1411. type = ContentType.VIDEO;
  1412. allCodecs = [[videoCodecs, audioCodecs].join(',')];
  1413. } else if (res.audio.length && hasSameUri) {
  1414. shaka.log.debug('Guessing audio-only.');
  1415. type = ContentType.AUDIO;
  1416. ignoreStream = true;
  1417. } else if (res.video.length && !res.audio.length) {
  1418. // There are associated video streams. Assume this is audio.
  1419. shaka.log.debug('Guessing audio-only.');
  1420. type = ContentType.AUDIO;
  1421. } else {
  1422. shaka.log.debug('Guessing video-only.');
  1423. type = ContentType.VIDEO;
  1424. }
  1425. if (!ignoreStream) {
  1426. let language = null;
  1427. let name = null;
  1428. let channelsCount = null;
  1429. let spatialAudio = false;
  1430. let characteristics = null;
  1431. let sampleRate = null;
  1432. if (!streamInfos.length) {
  1433. const mediaTag = mediaTags.find((tag) => {
  1434. const uri = tag.getAttributeValue('URI') || '';
  1435. const type = tag.getAttributeValue('TYPE');
  1436. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1437. return type != 'SUBTITLES' && uri == '' &&
  1438. globalGroupIds.includes(groupId);
  1439. });
  1440. if (mediaTag) {
  1441. language = mediaTag.getAttributeValue('LANGUAGE');
  1442. name = mediaTag.getAttributeValue('NAME');
  1443. channelsCount = this.getChannelsCount_(mediaTag);
  1444. spatialAudio = this.isSpatialAudio_(mediaTag);
  1445. characteristics = mediaTag.getAttributeValue('CHARACTERISTICS');
  1446. sampleRate = this.getSampleRate_(mediaTag);
  1447. }
  1448. }
  1449. const streamInfo = this.createStreamInfoFromVariantTags_(
  1450. tags, allCodecs, type, language, name, channelsCount,
  1451. characteristics, sampleRate, spatialAudio);
  1452. if (globalGroupId) {
  1453. streamInfo.stream.groupId = globalGroupId;
  1454. }
  1455. if (!streamInfos.length) {
  1456. streamInfo.stream.bandwidth = bandwidth;
  1457. }
  1458. res[streamInfo.stream.type] = [streamInfo];
  1459. }
  1460. return res;
  1461. }
  1462. /**
  1463. * Get the codecs from the 'EXT-X-STREAM-INF' tag.
  1464. *
  1465. * @param {!shaka.hls.Tag} tag
  1466. * @return {!Array.<string>} codecs
  1467. * @private
  1468. */
  1469. getCodecsForVariantTag_(tag) {
  1470. let codecsString = tag.getAttributeValue('CODECS') || '';
  1471. const supplementalCodecsString =
  1472. tag.getAttributeValue('SUPPLEMENTAL-CODECS');
  1473. this.codecInfoInManifest_ = codecsString.length > 0;
  1474. if (!this.codecInfoInManifest_ && !this.config_.hls.disableCodecGuessing) {
  1475. // These are the default codecs to assume if none are specified.
  1476. const defaultCodecsArray = [];
  1477. if (!this.config_.disableVideo) {
  1478. defaultCodecsArray.push(this.config_.hls.defaultVideoCodec);
  1479. }
  1480. if (!this.config_.disableAudio) {
  1481. defaultCodecsArray.push(this.config_.hls.defaultAudioCodec);
  1482. }
  1483. codecsString = defaultCodecsArray.join(',');
  1484. }
  1485. // Strip out internal whitespace while splitting on commas:
  1486. /** @type {!Array.<string>} */
  1487. const codecs = codecsString.split(/\s*,\s*/);
  1488. if (supplementalCodecsString) {
  1489. const supplementalCodecs = supplementalCodecsString.split(/\s*,\s*/)
  1490. .map((codec) => {
  1491. return codec.split('/')[0];
  1492. });
  1493. codecs.push(...supplementalCodecs);
  1494. }
  1495. return shaka.media.SegmentUtils.codecsFiltering(codecs);
  1496. }
  1497. /**
  1498. * Get the channel count information for an HLS audio track.
  1499. * CHANNELS specifies an ordered, "/" separated list of parameters.
  1500. * If the type is audio, the first parameter will be a decimal integer
  1501. * specifying the number of independent, simultaneous audio channels.
  1502. * No other channels parameters are currently defined.
  1503. *
  1504. * @param {!shaka.hls.Tag} tag
  1505. * @return {?number}
  1506. * @private
  1507. */
  1508. getChannelsCount_(tag) {
  1509. const channels = tag.getAttributeValue('CHANNELS');
  1510. if (!channels) {
  1511. return null;
  1512. }
  1513. const channelcountstring = channels.split('/')[0];
  1514. const count = parseInt(channelcountstring, 10);
  1515. return count;
  1516. }
  1517. /**
  1518. * Get the sample rate information for an HLS audio track.
  1519. *
  1520. * @param {!shaka.hls.Tag} tag
  1521. * @return {?number}
  1522. * @private
  1523. */
  1524. getSampleRate_(tag) {
  1525. const sampleRate = tag.getAttributeValue('SAMPLE-RATE');
  1526. if (!sampleRate) {
  1527. return null;
  1528. }
  1529. return parseInt(sampleRate, 10);
  1530. }
  1531. /**
  1532. * Get the spatial audio information for an HLS audio track.
  1533. * In HLS the channels field indicates the number of audio channels that the
  1534. * stream has (eg: 2). In the case of Dolby Atmos, the complexity is
  1535. * expressed with the number of channels followed by the word JOC
  1536. * (eg: 16/JOC), so 16 would be the number of channels (eg: 7.3.6 layout),
  1537. * and JOC indicates that the stream has spatial audio.
  1538. * @see https://developer.apple.com/documentation/http_live_streaming/hls_authoring_specification_for_apple_devices/hls_authoring_specification_for_apple_devices_appendixes
  1539. *
  1540. * @param {!shaka.hls.Tag} tag
  1541. * @return {boolean}
  1542. * @private
  1543. */
  1544. isSpatialAudio_(tag) {
  1545. const channels = tag.getAttributeValue('CHANNELS');
  1546. if (!channels) {
  1547. return false;
  1548. }
  1549. return channels.includes('/JOC');
  1550. }
  1551. /**
  1552. * Get the closed captions map information for the EXT-X-STREAM-INF tag, to
  1553. * create the stream info.
  1554. * @param {!shaka.hls.Tag} tag
  1555. * @param {string} type
  1556. * @return {Map.<string, string>} closedCaptions
  1557. * @private
  1558. */
  1559. getClosedCaptions_(tag, type) {
  1560. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1561. // The attribute of closed captions is optional, and the value may be
  1562. // 'NONE'.
  1563. const closedCaptionsAttr = tag.getAttributeValue('CLOSED-CAPTIONS');
  1564. // EXT-X-STREAM-INF tags may have CLOSED-CAPTIONS attributes.
  1565. // The value can be either a quoted-string or an enumerated-string with
  1566. // the value NONE. If the value is a quoted-string, it MUST match the
  1567. // value of the GROUP-ID attribute of an EXT-X-MEDIA tag elsewhere in the
  1568. // Playlist whose TYPE attribute is CLOSED-CAPTIONS.
  1569. if (type == ContentType.VIDEO) {
  1570. if (closedCaptionsAttr && closedCaptionsAttr != 'NONE') {
  1571. return this.groupIdToClosedCaptionsMap_.get(closedCaptionsAttr);
  1572. } else if (!closedCaptionsAttr &&
  1573. this.groupIdToClosedCaptionsMap_.size) {
  1574. for (const key of this.groupIdToClosedCaptionsMap_.keys()) {
  1575. return this.groupIdToClosedCaptionsMap_.get(key);
  1576. }
  1577. }
  1578. }
  1579. return null;
  1580. }
  1581. /**
  1582. * Get the normalized language value.
  1583. *
  1584. * @param {?string} languageValue
  1585. * @return {string}
  1586. * @private
  1587. */
  1588. getLanguage_(languageValue) {
  1589. const LanguageUtils = shaka.util.LanguageUtils;
  1590. return LanguageUtils.normalize(languageValue || 'und');
  1591. }
  1592. /**
  1593. * Get the type value.
  1594. * Shaka recognizes the content types 'audio', 'video', 'text', and 'image'.
  1595. * The HLS 'subtitles' type needs to be mapped to 'text'.
  1596. * @param {!shaka.hls.Tag} tag
  1597. * @return {string}
  1598. * @private
  1599. */
  1600. getType_(tag) {
  1601. let type = tag.getRequiredAttrValue('TYPE').toLowerCase();
  1602. if (type == 'subtitles') {
  1603. type = shaka.util.ManifestParserUtils.ContentType.TEXT;
  1604. }
  1605. return type;
  1606. }
  1607. /**
  1608. * @param {!Array.<shaka.hls.HlsParser.StreamInfo>} audioInfos
  1609. * @param {!Array.<shaka.hls.HlsParser.StreamInfo>} videoInfos
  1610. * @param {number} bandwidth
  1611. * @param {?string} width
  1612. * @param {?string} height
  1613. * @param {?string} frameRate
  1614. * @param {?string} videoRange
  1615. * @param {?string} videoLayout
  1616. * @param {!Array.<shaka.extern.DrmInfo>} drmInfos
  1617. * @param {!Set.<string>} keyIds
  1618. * @return {!Array.<!shaka.extern.Variant>}
  1619. * @private
  1620. */
  1621. createVariants_(
  1622. audioInfos, videoInfos, bandwidth, width, height, frameRate, videoRange,
  1623. videoLayout, drmInfos, keyIds) {
  1624. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1625. const DrmEngine = shaka.media.DrmEngine;
  1626. for (const info of videoInfos) {
  1627. this.addVideoAttributes_(
  1628. info.stream, width, height, frameRate, videoRange, videoLayout);
  1629. }
  1630. // In case of audio-only or video-only content or the audio/video is
  1631. // disabled by the config, we create an array of one item containing
  1632. // a null. This way, the double-loop works for all kinds of content.
  1633. // NOTE: we currently don't have support for audio-only content.
  1634. const disableAudio = this.config_.disableAudio;
  1635. if (!audioInfos.length || disableAudio) {
  1636. audioInfos = [null];
  1637. }
  1638. const disableVideo = this.config_.disableVideo;
  1639. if (!videoInfos.length || disableVideo) {
  1640. videoInfos = [null];
  1641. }
  1642. const variants = [];
  1643. for (const audioInfo of audioInfos) {
  1644. for (const videoInfo of videoInfos) {
  1645. const audioStream = audioInfo ? audioInfo.stream : null;
  1646. if (audioStream) {
  1647. audioStream.drmInfos = drmInfos;
  1648. audioStream.keyIds = keyIds;
  1649. }
  1650. const videoStream = videoInfo ? videoInfo.stream : null;
  1651. if (videoStream) {
  1652. videoStream.drmInfos = drmInfos;
  1653. videoStream.keyIds = keyIds;
  1654. }
  1655. const audioDrmInfos = audioInfo ? audioInfo.stream.drmInfos : null;
  1656. const videoDrmInfos = videoInfo ? videoInfo.stream.drmInfos : null;
  1657. const videoStreamUri =
  1658. videoInfo ? videoInfo.getUris().sort().join(',') : '';
  1659. const audioStreamUri =
  1660. audioInfo ? audioInfo.getUris().sort().join(',') : '';
  1661. const variantUriKey = videoStreamUri + ' - ' + audioStreamUri;
  1662. if (audioStream && videoStream) {
  1663. if (!DrmEngine.areDrmCompatible(audioDrmInfos, videoDrmInfos)) {
  1664. shaka.log.warning(
  1665. 'Incompatible DRM info in HLS variant. Skipping.');
  1666. continue;
  1667. }
  1668. }
  1669. if (this.variantUriSet_.has(variantUriKey)) {
  1670. // This happens when two variants only differ in their text streams.
  1671. shaka.log.debug(
  1672. 'Skipping variant which only differs in text streams.');
  1673. continue;
  1674. }
  1675. // Since both audio and video are of the same type, this assertion will
  1676. // catch certain mistakes at runtime that the compiler would miss.
  1677. goog.asserts.assert(!audioStream ||
  1678. audioStream.type == ContentType.AUDIO, 'Audio parameter mismatch!');
  1679. goog.asserts.assert(!videoStream ||
  1680. videoStream.type == ContentType.VIDEO, 'Video parameter mismatch!');
  1681. const variant = {
  1682. id: this.globalId_++,
  1683. language: audioStream ? audioStream.language : 'und',
  1684. disabledUntilTime: 0,
  1685. primary: (!!audioStream && audioStream.primary) ||
  1686. (!!videoStream && videoStream.primary),
  1687. audio: audioStream,
  1688. video: videoStream,
  1689. bandwidth,
  1690. allowedByApplication: true,
  1691. allowedByKeySystem: true,
  1692. decodingInfos: [],
  1693. };
  1694. variants.push(variant);
  1695. this.variantUriSet_.add(variantUriKey);
  1696. }
  1697. }
  1698. return variants;
  1699. }
  1700. /**
  1701. * Parses an array of EXT-X-MEDIA tags, then stores the values of all tags
  1702. * with TYPE="CLOSED-CAPTIONS" into a map of group id to closed captions.
  1703. *
  1704. * @param {!Array.<!shaka.hls.Tag>} mediaTags
  1705. * @private
  1706. */
  1707. parseClosedCaptions_(mediaTags) {
  1708. const closedCaptionsTags =
  1709. shaka.hls.Utils.filterTagsByType(mediaTags, 'CLOSED-CAPTIONS');
  1710. for (const tag of closedCaptionsTags) {
  1711. goog.asserts.assert(tag.name == 'EXT-X-MEDIA',
  1712. 'Should only be called on media tags!');
  1713. const languageValue = tag.getAttributeValue('LANGUAGE');
  1714. let language = this.getLanguage_(languageValue);
  1715. if (!languageValue) {
  1716. const nameValue = tag.getAttributeValue('NAME');
  1717. if (nameValue) {
  1718. language = nameValue;
  1719. }
  1720. }
  1721. // The GROUP-ID value is a quoted-string that specifies the group to which
  1722. // the Rendition belongs.
  1723. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1724. // The value of INSTREAM-ID is a quoted-string that specifies a Rendition
  1725. // within the segments in the Media Playlist. This attribute is REQUIRED
  1726. // if the TYPE attribute is CLOSED-CAPTIONS.
  1727. // We need replace SERVICE string by our internal svc string.
  1728. const instreamId = tag.getRequiredAttrValue('INSTREAM-ID')
  1729. .replace('SERVICE', 'svc');
  1730. if (!this.groupIdToClosedCaptionsMap_.get(groupId)) {
  1731. this.groupIdToClosedCaptionsMap_.set(groupId, new Map());
  1732. }
  1733. this.groupIdToClosedCaptionsMap_.get(groupId).set(instreamId, language);
  1734. }
  1735. }
  1736. /**
  1737. * Parse EXT-X-MEDIA media tag into a Stream object.
  1738. *
  1739. * @param {!Array.<!shaka.hls.Tag>} tags
  1740. * @param {!Map.<string, string>} groupIdPathwayIdMapping
  1741. * @return {!shaka.hls.HlsParser.StreamInfo}
  1742. * @private
  1743. */
  1744. createStreamInfoFromMediaTags_(tags, groupIdPathwayIdMapping) {
  1745. const verbatimMediaPlaylistUris = [];
  1746. const globalGroupIds = [];
  1747. const groupIdUriMappping = new Map();
  1748. for (const tag of tags) {
  1749. goog.asserts.assert(tag.name == 'EXT-X-MEDIA',
  1750. 'Should only be called on media tags!');
  1751. const uri = tag.getRequiredAttrValue('URI');
  1752. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1753. verbatimMediaPlaylistUris.push(uri);
  1754. globalGroupIds.push(groupId);
  1755. groupIdUriMappping.set(groupId, uri);
  1756. }
  1757. const globalGroupId = globalGroupIds.sort().join(',');
  1758. const firstTag = tags[0];
  1759. let codecs = '';
  1760. /** @type {string} */
  1761. const type = this.getType_(firstTag);
  1762. if (type == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  1763. codecs = firstTag.getAttributeValue('CODECS') || '';
  1764. } else {
  1765. for (const groupId of globalGroupIds) {
  1766. if (this.groupIdToCodecsMap_.has(groupId)) {
  1767. codecs = this.groupIdToCodecsMap_.get(groupId);
  1768. break;
  1769. }
  1770. }
  1771. }
  1772. // Check if the stream has already been created as part of another Variant
  1773. // and return it if it has.
  1774. const key = verbatimMediaPlaylistUris.sort().join(',');
  1775. if (this.uriToStreamInfosMap_.has(key)) {
  1776. return this.uriToStreamInfosMap_.get(key);
  1777. }
  1778. const streamId = this.globalId_++;
  1779. if (this.contentSteeringManager_) {
  1780. for (const [groupId, uri] of groupIdUriMappping) {
  1781. const pathwayId = groupIdPathwayIdMapping.get(groupId);
  1782. if (pathwayId) {
  1783. this.contentSteeringManager_.addLocation(streamId, pathwayId, uri);
  1784. }
  1785. }
  1786. }
  1787. const language = firstTag.getAttributeValue('LANGUAGE');
  1788. const name = firstTag.getAttributeValue('NAME');
  1789. // NOTE: According to the HLS spec, "DEFAULT=YES" requires "AUTOSELECT=YES".
  1790. // However, we don't bother to validate "AUTOSELECT", since we don't
  1791. // actually use it in our streaming model, and we treat everything as
  1792. // "AUTOSELECT=YES". A value of "AUTOSELECT=NO" would imply that it may
  1793. // only be selected explicitly by the user, and we don't have a way to
  1794. // represent that in our model.
  1795. const defaultAttrValue = firstTag.getAttributeValue('DEFAULT');
  1796. const primary = defaultAttrValue == 'YES';
  1797. const channelsCount =
  1798. type == 'audio' ? this.getChannelsCount_(firstTag) : null;
  1799. const spatialAudio =
  1800. type == 'audio' ? this.isSpatialAudio_(firstTag) : false;
  1801. const characteristics = firstTag.getAttributeValue('CHARACTERISTICS');
  1802. const forcedAttrValue = firstTag.getAttributeValue('FORCED');
  1803. const forced = forcedAttrValue == 'YES';
  1804. const sampleRate = type == 'audio' ? this.getSampleRate_(firstTag) : null;
  1805. // TODO: Should we take into account some of the currently ignored
  1806. // attributes: INSTREAM-ID, Attribute descriptions: https://bit.ly/2lpjOhj
  1807. const streamInfo = this.createStreamInfo_(
  1808. streamId, verbatimMediaPlaylistUris, codecs, type, language,
  1809. primary, name, channelsCount, /* closedCaptions= */ null,
  1810. characteristics, forced, sampleRate, spatialAudio);
  1811. if (streamInfo.stream) {
  1812. streamInfo.stream.groupId = globalGroupId;
  1813. }
  1814. if (this.groupIdToStreamInfosMap_.has(globalGroupId)) {
  1815. this.groupIdToStreamInfosMap_.get(globalGroupId).push(streamInfo);
  1816. } else {
  1817. this.groupIdToStreamInfosMap_.set(globalGroupId, [streamInfo]);
  1818. }
  1819. this.uriToStreamInfosMap_.set(key, streamInfo);
  1820. return streamInfo;
  1821. }
  1822. /**
  1823. * Parse EXT-X-IMAGE-STREAM-INF media tag into a Stream object.
  1824. *
  1825. * @param {shaka.hls.Tag} tag
  1826. * @return {!Promise.<!shaka.hls.HlsParser.StreamInfo>}
  1827. * @private
  1828. */
  1829. async createStreamInfoFromImageTag_(tag) {
  1830. goog.asserts.assert(tag.name == 'EXT-X-IMAGE-STREAM-INF',
  1831. 'Should only be called on image tags!');
  1832. /** @type {string} */
  1833. const type = shaka.util.ManifestParserUtils.ContentType.IMAGE;
  1834. const verbatimImagePlaylistUri = tag.getRequiredAttrValue('URI');
  1835. const codecs = tag.getAttributeValue('CODECS', 'jpeg') || '';
  1836. // Check if the stream has already been created as part of another Variant
  1837. // and return it if it has.
  1838. if (this.uriToStreamInfosMap_.has(verbatimImagePlaylistUri)) {
  1839. return this.uriToStreamInfosMap_.get(verbatimImagePlaylistUri);
  1840. }
  1841. const language = tag.getAttributeValue('LANGUAGE');
  1842. const name = tag.getAttributeValue('NAME');
  1843. const characteristics = tag.getAttributeValue('CHARACTERISTICS');
  1844. const streamInfo = this.createStreamInfo_(
  1845. this.globalId_++, [verbatimImagePlaylistUri], codecs, type, language,
  1846. /* primary= */ false, name, /* channelsCount= */ null,
  1847. /* closedCaptions= */ null, characteristics, /* forced= */ false,
  1848. /* sampleRate= */ null, /* spatialAudio= */ false);
  1849. // Parse misc attributes.
  1850. const resolution = tag.getAttributeValue('RESOLUTION');
  1851. if (resolution) {
  1852. // The RESOLUTION tag represents the resolution of a single thumbnail, not
  1853. // of the entire sheet at once (like we expect in the output).
  1854. // So multiply by the layout size.
  1855. // Since we need to have generated the segment index for this, we can't
  1856. // lazy-load in this situation.
  1857. await streamInfo.stream.createSegmentIndex();
  1858. const reference = streamInfo.stream.segmentIndex.get(0);
  1859. const layout = reference.getTilesLayout();
  1860. if (layout) {
  1861. streamInfo.stream.width =
  1862. Number(resolution.split('x')[0]) * Number(layout.split('x')[0]);
  1863. streamInfo.stream.height =
  1864. Number(resolution.split('x')[1]) * Number(layout.split('x')[1]);
  1865. // TODO: What happens if there are multiple grids, with different
  1866. // layout sizes, inside this image stream?
  1867. }
  1868. }
  1869. const bandwidth = tag.getAttributeValue('BANDWIDTH');
  1870. if (bandwidth) {
  1871. streamInfo.stream.bandwidth = Number(bandwidth);
  1872. }
  1873. this.uriToStreamInfosMap_.set(verbatimImagePlaylistUri, streamInfo);
  1874. return streamInfo;
  1875. }
  1876. /**
  1877. * Parse EXT-X-I-FRAME-STREAM-INF media tag into a Stream object.
  1878. *
  1879. * @param {shaka.hls.Tag} tag
  1880. * @return {!shaka.hls.HlsParser.StreamInfo}
  1881. * @private
  1882. */
  1883. createStreamInfoFromIframeTag_(tag) {
  1884. goog.asserts.assert(tag.name == 'EXT-X-I-FRAME-STREAM-INF',
  1885. 'Should only be called on iframe tags!');
  1886. /** @type {string} */
  1887. const type = shaka.util.ManifestParserUtils.ContentType.IMAGE;
  1888. const verbatimIFramePlaylistUri = tag.getRequiredAttrValue('URI');
  1889. const codecs = tag.getAttributeValue('CODECS') || '';
  1890. // Check if the stream has already been created as part of another Variant
  1891. // and return it if it has.
  1892. if (this.uriToStreamInfosMap_.has(verbatimIFramePlaylistUri)) {
  1893. return this.uriToStreamInfosMap_.get(verbatimIFramePlaylistUri);
  1894. }
  1895. const language = tag.getAttributeValue('LANGUAGE');
  1896. const name = tag.getAttributeValue('NAME');
  1897. const characteristics = tag.getAttributeValue('CHARACTERISTICS');
  1898. const streamInfo = this.createStreamInfo_(
  1899. this.globalId_++, [verbatimIFramePlaylistUri], codecs, type, language,
  1900. /* primary= */ false, name, /* channelsCount= */ null,
  1901. /* closedCaptions= */ null, characteristics, /* forced= */ false,
  1902. /* sampleRate= */ null, /* spatialAudio= */ false);
  1903. // Parse misc attributes.
  1904. const resolution = tag.getAttributeValue('RESOLUTION');
  1905. const [width, height] = resolution ? resolution.split('x') : [null, null];
  1906. streamInfo.stream.width = Number(width) || undefined;
  1907. streamInfo.stream.height = Number(height) || undefined;
  1908. const bandwidth = tag.getAttributeValue('BANDWIDTH');
  1909. if (bandwidth) {
  1910. streamInfo.stream.bandwidth = Number(bandwidth);
  1911. }
  1912. this.uriToStreamInfosMap_.set(verbatimIFramePlaylistUri, streamInfo);
  1913. return streamInfo;
  1914. }
  1915. /**
  1916. * Parse an EXT-X-STREAM-INF media tag into a Stream object.
  1917. *
  1918. * @param {!Array.<!shaka.hls.Tag>} tags
  1919. * @param {!Array.<string>} allCodecs
  1920. * @param {string} type
  1921. * @param {?string} language
  1922. * @param {?string} name
  1923. * @param {?number} channelsCount
  1924. * @param {?string} characteristics
  1925. * @param {?number} sampleRate
  1926. * @param {boolean} spatialAudio
  1927. * @return {!shaka.hls.HlsParser.StreamInfo}
  1928. * @private
  1929. */
  1930. createStreamInfoFromVariantTags_(tags, allCodecs, type, language, name,
  1931. channelsCount, characteristics, sampleRate, spatialAudio) {
  1932. const streamId = this.globalId_++;
  1933. const verbatimMediaPlaylistUris = [];
  1934. for (const tag of tags) {
  1935. goog.asserts.assert(tag.name == 'EXT-X-STREAM-INF',
  1936. 'Should only be called on variant tags!');
  1937. const uri = tag.getRequiredAttrValue('URI');
  1938. const pathwayId = tag.getAttributeValue('PATHWAY-ID');
  1939. if (this.contentSteeringManager_ && pathwayId) {
  1940. this.contentSteeringManager_.addLocation(streamId, pathwayId, uri);
  1941. }
  1942. verbatimMediaPlaylistUris.push(uri);
  1943. }
  1944. const key = verbatimMediaPlaylistUris.sort().join(',');
  1945. if (this.uriToStreamInfosMap_.has(key)) {
  1946. return this.uriToStreamInfosMap_.get(key);
  1947. }
  1948. const closedCaptions = this.getClosedCaptions_(tags[0], type);
  1949. const codecs = shaka.util.ManifestParserUtils.guessCodecs(type, allCodecs);
  1950. const streamInfo = this.createStreamInfo_(
  1951. streamId, verbatimMediaPlaylistUris, codecs, type, language,
  1952. /* primary= */ false, name, channelsCount, closedCaptions,
  1953. characteristics, /* forced= */ false, sampleRate,
  1954. /* spatialAudio= */ false);
  1955. this.uriToStreamInfosMap_.set(key, streamInfo);
  1956. return streamInfo;
  1957. }
  1958. /**
  1959. * @param {number} streamId
  1960. * @param {!Array.<string>} verbatimMediaPlaylistUris
  1961. * @param {string} codecs
  1962. * @param {string} type
  1963. * @param {?string} languageValue
  1964. * @param {boolean} primary
  1965. * @param {?string} name
  1966. * @param {?number} channelsCount
  1967. * @param {Map.<string, string>} closedCaptions
  1968. * @param {?string} characteristics
  1969. * @param {boolean} forced
  1970. * @param {?number} sampleRate
  1971. * @param {boolean} spatialAudio
  1972. * @return {!shaka.hls.HlsParser.StreamInfo}
  1973. * @private
  1974. */
  1975. createStreamInfo_(streamId, verbatimMediaPlaylistUris, codecs, type,
  1976. languageValue, primary, name, channelsCount, closedCaptions,
  1977. characteristics, forced, sampleRate, spatialAudio) {
  1978. // TODO: Refactor, too many parameters
  1979. // This stream is lazy-loaded inside the createSegmentIndex function.
  1980. // So we start out with a stream object that does not contain the actual
  1981. // segment index, then download when createSegmentIndex is called.
  1982. const stream = this.makeStreamObject_(streamId, codecs, type,
  1983. languageValue, primary, name, channelsCount, closedCaptions,
  1984. characteristics, forced, sampleRate, spatialAudio);
  1985. const redirectUris = [];
  1986. const getUris = () => {
  1987. if (this.contentSteeringManager_ &&
  1988. verbatimMediaPlaylistUris.length > 1) {
  1989. return this.contentSteeringManager_.getLocations(streamId);
  1990. }
  1991. return redirectUris.concat(shaka.hls.Utils.constructUris(
  1992. [this.masterPlaylistUri_], verbatimMediaPlaylistUris,
  1993. this.globalVariables_));
  1994. };
  1995. const streamInfo = {
  1996. stream,
  1997. type,
  1998. redirectUris,
  1999. getUris,
  2000. // These values are filled out or updated after lazy-loading:
  2001. minTimestamp: 0,
  2002. maxTimestamp: 0,
  2003. mediaSequenceToStartTime: new Map(),
  2004. canSkipSegments: false,
  2005. canBlockReload: false,
  2006. hasEndList: false,
  2007. firstSequenceNumber: -1,
  2008. nextMediaSequence: -1,
  2009. nextPart: -1,
  2010. loadedOnce: false,
  2011. };
  2012. /** @param {!AbortSignal} abortSignal */
  2013. const downloadSegmentIndex = async (abortSignal) => {
  2014. const uris = streamInfo.getUris();
  2015. // Download the actual manifest.
  2016. const response = await this.requestManifest_(
  2017. streamInfo.getUris(), /* isPlaylist= */ true);
  2018. if (abortSignal.aborted) {
  2019. return;
  2020. }
  2021. // Record the final URI after redirects.
  2022. const responseUri = response.uri;
  2023. if (responseUri != response.originalUri && !uris.includes(responseUri)) {
  2024. redirectUris.push(responseUri);
  2025. }
  2026. // Record the redirected, final URI of this media playlist when we parse
  2027. // it.
  2028. /** @type {!shaka.hls.Playlist} */
  2029. const playlist = this.manifestTextParser_.parsePlaylist(response.data);
  2030. let mimeType = undefined;
  2031. // If no codec info was provided in the manifest and codec guessing is
  2032. // disabled we try to get necessary info from the media data.
  2033. if (!this.codecInfoInManifest_ && this.config_.hls.disableCodecGuessing) {
  2034. const basicInfo =
  2035. await this.getMediaPlaylistBasicInfo_(playlist, getUris);
  2036. goog.asserts.assert(
  2037. type === basicInfo.type, 'Media types should match!');
  2038. mimeType = basicInfo.mimeType;
  2039. codecs = basicInfo.codecs;
  2040. }
  2041. const wasLive = this.isLive_();
  2042. const realStreamInfo = await this.convertParsedPlaylistIntoStreamInfo_(
  2043. streamId, playlist, getUris, responseUri, codecs,
  2044. type, languageValue, primary, name, channelsCount, closedCaptions,
  2045. characteristics, forced, sampleRate, spatialAudio, mimeType);
  2046. if (abortSignal.aborted) {
  2047. return;
  2048. }
  2049. const realStream = realStreamInfo.stream;
  2050. if (this.isLive_() && !wasLive) {
  2051. // Now that we know that the presentation is live, convert the timeline
  2052. // to live.
  2053. this.changePresentationTimelineToLive_(playlist);
  2054. }
  2055. // Copy values from the real stream info to our initial one.
  2056. streamInfo.minTimestamp = realStreamInfo.minTimestamp;
  2057. streamInfo.maxTimestamp = realStreamInfo.maxTimestamp;
  2058. streamInfo.canSkipSegments = realStreamInfo.canSkipSegments;
  2059. streamInfo.canBlockReload = realStreamInfo.canBlockReload;
  2060. streamInfo.hasEndList = realStreamInfo.hasEndList;
  2061. streamInfo.mediaSequenceToStartTime =
  2062. realStreamInfo.mediaSequenceToStartTime;
  2063. streamInfo.nextMediaSequence = realStreamInfo.nextMediaSequence;
  2064. streamInfo.nextPart = realStreamInfo.nextPart;
  2065. streamInfo.loadedOnce = true;
  2066. stream.segmentIndex = realStream.segmentIndex;
  2067. stream.encrypted = realStream.encrypted;
  2068. stream.drmInfos = realStream.drmInfos;
  2069. stream.keyIds = realStream.keyIds;
  2070. stream.mimeType = realStream.mimeType;
  2071. stream.bandwidth = stream.bandwidth || realStream.bandwidth;
  2072. stream.codecs = stream.codecs || realStream.codecs;
  2073. this.setFullTypeForStream_(stream);
  2074. // Since we lazy-loaded this content, the player may need to create new
  2075. // sessions for the DRM info in this stream.
  2076. if (stream.drmInfos.length) {
  2077. this.playerInterface_.newDrmInfo(stream);
  2078. }
  2079. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2080. if (type == ContentType.VIDEO || type == ContentType.AUDIO) {
  2081. for (const otherStreamInfo of this.uriToStreamInfosMap_.values()) {
  2082. if (!otherStreamInfo.loadedOnce && otherStreamInfo.type == type) {
  2083. // To aid manifest filtering, assume before loading that all video
  2084. // renditions have the same MIME type. (And likewise for audio.)
  2085. otherStreamInfo.stream.mimeType = realStream.mimeType;
  2086. this.setFullTypeForStream_(otherStreamInfo.stream);
  2087. }
  2088. }
  2089. }
  2090. if (type == ContentType.TEXT) {
  2091. const firstSegment = realStream.segmentIndex.get(0);
  2092. if (firstSegment && firstSegment.initSegmentReference) {
  2093. stream.mimeType = 'application/mp4';
  2094. this.setFullTypeForStream_(stream);
  2095. }
  2096. }
  2097. // Add finishing touches to the stream that can only be done once we have
  2098. // more full context on the media as a whole.
  2099. if (this.hasEnoughInfoToFinalizeStreams_()) {
  2100. if (!this.streamsFinalized_) {
  2101. // Mark this manifest as having been finalized, so we don't go through
  2102. // this whole process of finishing touches a second time.
  2103. this.streamsFinalized_ = true;
  2104. // Finalize all of the currently-loaded streams.
  2105. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  2106. const activeStreamInfos =
  2107. streamInfos.filter((s) => s.stream.segmentIndex);
  2108. this.finalizeStreams_(activeStreamInfos);
  2109. // With the addition of this new stream, we now have enough info to
  2110. // figure out how long the streams should be. So process all streams
  2111. // we have downloaded up until this point.
  2112. this.determineDuration_();
  2113. // Finally, start the update timer, if this asset has been determined
  2114. // to be a livestream.
  2115. const delay = this.getUpdatePlaylistDelay_();
  2116. if (delay > 0) {
  2117. this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
  2118. }
  2119. } else {
  2120. // We don't need to go through the full process; just finalize this
  2121. // single stream.
  2122. this.finalizeStreams_([streamInfo]);
  2123. }
  2124. }
  2125. };
  2126. /** @type {Promise} */
  2127. let creationPromise = null;
  2128. /** @type {!AbortController} */
  2129. let abortController = new AbortController();
  2130. const safeCreateSegmentIndex = () => {
  2131. // An operation is already in progress. The second and subsequent
  2132. // callers receive the same Promise as the first caller, and only one
  2133. // download operation will occur.
  2134. if (creationPromise) {
  2135. return creationPromise;
  2136. }
  2137. // Create a new AbortController to be able to cancel this specific
  2138. // download.
  2139. abortController = new AbortController();
  2140. // Create a Promise tied to the outcome of downloadSegmentIndex(). If
  2141. // downloadSegmentIndex is rejected, creationPromise will also be
  2142. // rejected.
  2143. creationPromise = new Promise((resolve) => {
  2144. resolve(downloadSegmentIndex(abortController.signal));
  2145. });
  2146. return creationPromise;
  2147. };
  2148. stream.createSegmentIndex = safeCreateSegmentIndex;
  2149. stream.closeSegmentIndex = () => {
  2150. // If we're mid-creation, cancel it.
  2151. if (creationPromise && !stream.segmentIndex) {
  2152. abortController.abort();
  2153. }
  2154. // If we have a segment index, release it.
  2155. if (stream.segmentIndex) {
  2156. stream.segmentIndex.release();
  2157. stream.segmentIndex = null;
  2158. }
  2159. // Clear the creation Promise so that a new operation can begin.
  2160. creationPromise = null;
  2161. };
  2162. return streamInfo;
  2163. }
  2164. /**
  2165. * @return {number}
  2166. * @private
  2167. */
  2168. getMinDuration_() {
  2169. let minDuration = Infinity;
  2170. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  2171. if (streamInfo.stream.segmentIndex && streamInfo.stream.type != 'text') {
  2172. // Since everything is already offset to 0 (either by sync or by being
  2173. // VOD), only maxTimestamp is necessary to compute the duration.
  2174. minDuration = Math.min(minDuration, streamInfo.maxTimestamp);
  2175. }
  2176. }
  2177. return minDuration;
  2178. }
  2179. /**
  2180. * @return {number}
  2181. * @private
  2182. */
  2183. getLiveDuration_() {
  2184. let maxTimestamp = Infinity;
  2185. let minTimestamp = Infinity;
  2186. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  2187. if (streamInfo.stream.segmentIndex && streamInfo.stream.type != 'text') {
  2188. maxTimestamp = Math.min(maxTimestamp, streamInfo.maxTimestamp);
  2189. minTimestamp = Math.min(minTimestamp, streamInfo.minTimestamp);
  2190. }
  2191. }
  2192. return maxTimestamp - minTimestamp;
  2193. }
  2194. /**
  2195. * @param {!Array.<!shaka.extern.Stream>} streams
  2196. * @private
  2197. */
  2198. notifySegmentsForStreams_(streams) {
  2199. const references = [];
  2200. for (const stream of streams) {
  2201. if (!stream.segmentIndex) {
  2202. // The stream was closed since the list of streams was built.
  2203. continue;
  2204. }
  2205. stream.segmentIndex.forEachTopLevelReference((reference) => {
  2206. references.push(reference);
  2207. });
  2208. }
  2209. this.presentationTimeline_.notifySegments(references);
  2210. }
  2211. /**
  2212. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  2213. * @private
  2214. */
  2215. finalizeStreams_(streamInfos) {
  2216. if (!this.isLive_()) {
  2217. const minDuration = this.getMinDuration_();
  2218. for (const streamInfo of streamInfos) {
  2219. streamInfo.stream.segmentIndex.fit(/* periodStart= */ 0, minDuration);
  2220. }
  2221. }
  2222. this.notifySegmentsForStreams_(streamInfos.map((s) => s.stream));
  2223. if (this.config_.hls.ignoreManifestProgramDateTime) {
  2224. this.syncStreamsWithSequenceNumber_(streamInfos);
  2225. } else {
  2226. this.syncStreamsWithProgramDateTime_(streamInfos);
  2227. if (this.config_.hls.ignoreManifestProgramDateTimeForTypes.length > 0) {
  2228. this.syncStreamsWithSequenceNumber_(streamInfos);
  2229. }
  2230. }
  2231. }
  2232. /**
  2233. * @param {string} type
  2234. * @return {boolean}
  2235. * @private
  2236. */
  2237. ignoreManifestProgramDateTimeFor_(type) {
  2238. if (this.config_.hls.ignoreManifestProgramDateTime) {
  2239. return true;
  2240. }
  2241. const forTypes = this.config_.hls.ignoreManifestProgramDateTimeForTypes;
  2242. return forTypes.includes(type);
  2243. }
  2244. /**
  2245. * There are some values on streams that can only be set once we know about
  2246. * both the video and audio content, if present.
  2247. * This checks if there is at least one video downloaded (if the media has
  2248. * video), and that there is at least one audio downloaded (if the media has
  2249. * audio).
  2250. * @return {boolean}
  2251. * @private
  2252. */
  2253. hasEnoughInfoToFinalizeStreams_() {
  2254. if (!this.manifest_) {
  2255. return false;
  2256. }
  2257. const videos = [];
  2258. const audios = [];
  2259. for (const variant of this.manifest_.variants) {
  2260. if (variant.video) {
  2261. videos.push(variant.video);
  2262. }
  2263. if (variant.audio) {
  2264. audios.push(variant.audio);
  2265. }
  2266. }
  2267. if (videos.length > 0 && !videos.some((stream) => stream.segmentIndex)) {
  2268. return false;
  2269. }
  2270. if (audios.length > 0 && !audios.some((stream) => stream.segmentIndex)) {
  2271. return false;
  2272. }
  2273. return true;
  2274. }
  2275. /**
  2276. * @param {number} streamId
  2277. * @param {!shaka.hls.Playlist} playlist
  2278. * @param {function():!Array.<string>} getUris
  2279. * @param {string} responseUri
  2280. * @param {string} codecs
  2281. * @param {string} type
  2282. * @param {?string} languageValue
  2283. * @param {boolean} primary
  2284. * @param {?string} name
  2285. * @param {?number} channelsCount
  2286. * @param {Map.<string, string>} closedCaptions
  2287. * @param {?string} characteristics
  2288. * @param {boolean} forced
  2289. * @param {?number} sampleRate
  2290. * @param {boolean} spatialAudio
  2291. * @param {(string|undefined)} mimeType
  2292. * @return {!Promise.<!shaka.hls.HlsParser.StreamInfo>}
  2293. * @private
  2294. */
  2295. async convertParsedPlaylistIntoStreamInfo_(streamId, playlist,
  2296. getUris, responseUri, codecs, type, languageValue, primary, name,
  2297. channelsCount, closedCaptions, characteristics, forced, sampleRate,
  2298. spatialAudio, mimeType = undefined) {
  2299. if (playlist.type != shaka.hls.PlaylistType.MEDIA) {
  2300. // EXT-X-MEDIA and EXT-X-IMAGE-STREAM-INF tags should point to media
  2301. // playlists.
  2302. throw new shaka.util.Error(
  2303. shaka.util.Error.Severity.CRITICAL,
  2304. shaka.util.Error.Category.MANIFEST,
  2305. shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY);
  2306. }
  2307. /** @type {!Array.<!shaka.hls.Tag>} */
  2308. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  2309. 'EXT-X-DEFINE');
  2310. const mediaVariables =
  2311. this.parseMediaVariables_(variablesTags, responseUri);
  2312. goog.asserts.assert(playlist.segments != null,
  2313. 'Media playlist should have segments!');
  2314. this.determinePresentationType_(playlist);
  2315. if (this.isLive_()) {
  2316. this.determineLastTargetDuration_(playlist);
  2317. }
  2318. if (!mimeType) {
  2319. mimeType = await this.guessMimeType_(type, codecs, playlist,
  2320. mediaVariables, getUris);
  2321. }
  2322. const {drmInfos, keyIds, encrypted, aesEncrypted} =
  2323. await this.parseDrmInfo_(playlist, mimeType, getUris, mediaVariables);
  2324. if (encrypted && !drmInfos.length && !aesEncrypted) {
  2325. throw new shaka.util.Error(
  2326. shaka.util.Error.Severity.CRITICAL,
  2327. shaka.util.Error.Category.MANIFEST,
  2328. shaka.util.Error.Code.HLS_KEYFORMATS_NOT_SUPPORTED);
  2329. }
  2330. const stream = this.makeStreamObject_(streamId, codecs, type,
  2331. languageValue, primary, name, channelsCount, closedCaptions,
  2332. characteristics, forced, sampleRate, spatialAudio);
  2333. stream.encrypted = encrypted;
  2334. stream.drmInfos = drmInfos;
  2335. stream.keyIds = keyIds;
  2336. stream.mimeType = mimeType;
  2337. this.setFullTypeForStream_(stream);
  2338. const mediaSequenceToStartTime = this.isLive_() ?
  2339. this.mediaSequenceToStartTimeByType_.get(type) : new Map();
  2340. const {segments, bandwidth} = this.createSegments_(
  2341. playlist, stream, mediaSequenceToStartTime, mediaVariables, getUris,
  2342. type);
  2343. if (bandwidth) {
  2344. stream.bandwidth = bandwidth;
  2345. }
  2346. // This new calculation is necessary for Low Latency streams.
  2347. if (this.isLive_()) {
  2348. this.determineLastTargetDuration_(playlist);
  2349. }
  2350. const firstStartTime = segments[0].startTime;
  2351. const lastSegment = segments[segments.length - 1];
  2352. const lastEndTime = lastSegment.endTime;
  2353. /** @type {!shaka.media.SegmentIndex} */
  2354. const segmentIndex = new shaka.media.SegmentIndex(segments);
  2355. stream.segmentIndex = segmentIndex;
  2356. const serverControlTag = shaka.hls.Utils.getFirstTagWithName(
  2357. playlist.tags, 'EXT-X-SERVER-CONTROL');
  2358. const canSkipSegments = serverControlTag ?
  2359. serverControlTag.getAttribute('CAN-SKIP-UNTIL') != null : false;
  2360. const canBlockReload = serverControlTag ?
  2361. serverControlTag.getAttribute('CAN-BLOCK-RELOAD') != null : false;
  2362. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  2363. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  2364. const {nextMediaSequence, nextPart} =
  2365. this.getNextMediaSequenceAndPart_(mediaSequenceNumber, segments);
  2366. return {
  2367. stream,
  2368. type,
  2369. redirectUris: [],
  2370. getUris,
  2371. minTimestamp: firstStartTime,
  2372. maxTimestamp: lastEndTime,
  2373. canSkipSegments,
  2374. canBlockReload,
  2375. hasEndList: false,
  2376. firstSequenceNumber: -1,
  2377. nextMediaSequence,
  2378. nextPart,
  2379. mediaSequenceToStartTime,
  2380. loadedOnce: false,
  2381. };
  2382. }
  2383. /**
  2384. * Get the next msn and part
  2385. *
  2386. * @param {number} mediaSequenceNumber
  2387. * @param {!Array.<!shaka.media.SegmentReference>} segments
  2388. * @return {{nextMediaSequence: number, nextPart:number}}}
  2389. * @private
  2390. */
  2391. getNextMediaSequenceAndPart_(mediaSequenceNumber, segments) {
  2392. const currentMediaSequence = mediaSequenceNumber + segments.length - 1;
  2393. let nextMediaSequence = currentMediaSequence;
  2394. let nextPart = -1;
  2395. if (!segments.length) {
  2396. nextMediaSequence++;
  2397. return {
  2398. nextMediaSequence,
  2399. nextPart,
  2400. };
  2401. }
  2402. const lastSegment = segments[segments.length - 1];
  2403. const partialReferences = lastSegment.partialReferences;
  2404. if (!lastSegment.partialReferences.length) {
  2405. nextMediaSequence++;
  2406. if (lastSegment.hasByterangeOptimization()) {
  2407. nextPart = 0;
  2408. }
  2409. return {
  2410. nextMediaSequence,
  2411. nextPart,
  2412. };
  2413. }
  2414. nextPart = partialReferences.length - 1;
  2415. const lastPartialReference =
  2416. partialReferences[partialReferences.length - 1];
  2417. if (!lastPartialReference.isPreload()) {
  2418. nextMediaSequence++;
  2419. nextPart = 0;
  2420. }
  2421. return {
  2422. nextMediaSequence,
  2423. nextPart,
  2424. };
  2425. }
  2426. /**
  2427. * Creates a stream object with the given parameters.
  2428. * The parameters that are passed into here are only the things that can be
  2429. * known without downloading the media playlist; other values must be set
  2430. * manually on the object after creation.
  2431. * @param {number} id
  2432. * @param {string} codecs
  2433. * @param {string} type
  2434. * @param {?string} languageValue
  2435. * @param {boolean} primary
  2436. * @param {?string} name
  2437. * @param {?number} channelsCount
  2438. * @param {Map.<string, string>} closedCaptions
  2439. * @param {?string} characteristics
  2440. * @param {boolean} forced
  2441. * @param {?number} sampleRate
  2442. * @param {boolean} spatialAudio
  2443. * @return {!shaka.extern.Stream}
  2444. * @private
  2445. */
  2446. makeStreamObject_(id, codecs, type, languageValue, primary, name,
  2447. channelsCount, closedCaptions, characteristics, forced, sampleRate,
  2448. spatialAudio) {
  2449. // Fill out a "best-guess" mimeType, for now. It will be replaced once the
  2450. // stream is lazy-loaded.
  2451. const mimeType = this.guessMimeTypeBeforeLoading_(type, codecs) ||
  2452. this.guessMimeTypeFallback_(type);
  2453. const roles = [];
  2454. if (characteristics) {
  2455. for (const characteristic of characteristics.split(',')) {
  2456. roles.push(characteristic);
  2457. }
  2458. }
  2459. let kind = undefined;
  2460. let accessibilityPurpose = null;
  2461. if (type == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  2462. if (roles.includes('public.accessibility.transcribes-spoken-dialog') &&
  2463. roles.includes('public.accessibility.describes-music-and-sound')) {
  2464. kind = shaka.util.ManifestParserUtils.TextStreamKind.CLOSED_CAPTION;
  2465. } else {
  2466. kind = shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE;
  2467. }
  2468. } else {
  2469. if (roles.includes('public.accessibility.describes-video')) {
  2470. accessibilityPurpose =
  2471. shaka.media.ManifestParser.AccessibilityPurpose.VISUALLY_IMPAIRED;
  2472. }
  2473. }
  2474. // If there are no roles, and we have defaulted to the subtitle "kind" for
  2475. // this track, add the implied subtitle role.
  2476. if (!roles.length &&
  2477. kind === shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE) {
  2478. roles.push(shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE);
  2479. }
  2480. const stream = {
  2481. id: this.globalId_++,
  2482. originalId: name,
  2483. groupId: null,
  2484. createSegmentIndex: () => Promise.resolve(),
  2485. segmentIndex: null,
  2486. mimeType,
  2487. codecs,
  2488. kind: (type == shaka.util.ManifestParserUtils.ContentType.TEXT) ?
  2489. shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE : undefined,
  2490. encrypted: false,
  2491. drmInfos: [],
  2492. keyIds: new Set(),
  2493. language: this.getLanguage_(languageValue),
  2494. originalLanguage: languageValue,
  2495. label: name, // For historical reasons, since before "originalId".
  2496. type,
  2497. primary,
  2498. // TODO: trick mode
  2499. trickModeVideo: null,
  2500. emsgSchemeIdUris: null,
  2501. frameRate: undefined,
  2502. pixelAspectRatio: undefined,
  2503. width: undefined,
  2504. height: undefined,
  2505. bandwidth: undefined,
  2506. roles,
  2507. forced,
  2508. channelsCount,
  2509. audioSamplingRate: sampleRate,
  2510. spatialAudio,
  2511. closedCaptions,
  2512. hdr: undefined,
  2513. videoLayout: undefined,
  2514. tilesLayout: undefined,
  2515. accessibilityPurpose: accessibilityPurpose,
  2516. external: false,
  2517. fastSwitching: false,
  2518. fullMimeTypes: new Set(),
  2519. };
  2520. this.setFullTypeForStream_(stream);
  2521. return stream;
  2522. }
  2523. /**
  2524. * @param {!shaka.hls.Playlist} playlist
  2525. * @param {string} mimeType
  2526. * @param {function():!Array.<string>} getUris
  2527. * @param {?Map.<string, string>=} variables
  2528. * @return {Promise.<{
  2529. * drmInfos: !Array.<shaka.extern.DrmInfo>,
  2530. * keyIds: !Set.<string>,
  2531. * encrypted: boolean,
  2532. * aesEncrypted: boolean
  2533. * }>}
  2534. * @private
  2535. */
  2536. async parseDrmInfo_(playlist, mimeType, getUris, variables) {
  2537. /** @type {!Map<!shaka.hls.Tag, ?shaka.media.InitSegmentReference>} */
  2538. const drmTagsMap = new Map();
  2539. if (playlist.segments) {
  2540. for (const segment of playlist.segments) {
  2541. const segmentKeyTags = shaka.hls.Utils.filterTagsByName(segment.tags,
  2542. 'EXT-X-KEY');
  2543. let initSegmentRef = null;
  2544. if (segmentKeyTags.length) {
  2545. initSegmentRef = this.getInitSegmentReference_(playlist,
  2546. segment.tags, getUris, variables);
  2547. for (const segmentKeyTag of segmentKeyTags) {
  2548. drmTagsMap.set(segmentKeyTag, initSegmentRef);
  2549. }
  2550. }
  2551. }
  2552. }
  2553. let encrypted = false;
  2554. let aesEncrypted = false;
  2555. /** @type {!Array.<shaka.extern.DrmInfo>}*/
  2556. const drmInfos = [];
  2557. const keyIds = new Set();
  2558. for (const [key, value] of drmTagsMap) {
  2559. const drmTag = /** @type {!shaka.hls.Tag} */ (key);
  2560. const initSegmentRef =
  2561. /** @type {?shaka.media.InitSegmentReference} */ (value);
  2562. const method = drmTag.getRequiredAttrValue('METHOD');
  2563. if (method != 'NONE') {
  2564. encrypted = true;
  2565. // According to the HLS spec, KEYFORMAT is optional and implicitly
  2566. // defaults to "identity".
  2567. // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4
  2568. const keyFormat =
  2569. drmTag.getAttributeValue('KEYFORMAT') || 'identity';
  2570. let drmInfo = null;
  2571. if (this.isAesMethod_(method)) {
  2572. // These keys are handled separately.
  2573. aesEncrypted = true;
  2574. continue;
  2575. } else if (keyFormat == 'identity') {
  2576. // eslint-disable-next-line no-await-in-loop
  2577. drmInfo = await this.identityDrmParser_(
  2578. drmTag, mimeType, getUris, initSegmentRef, variables);
  2579. } else {
  2580. const drmParser =
  2581. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_[keyFormat];
  2582. drmInfo = drmParser ? drmParser(drmTag, mimeType) : null;
  2583. }
  2584. if (drmInfo) {
  2585. if (drmInfo.keyIds) {
  2586. for (const keyId of drmInfo.keyIds) {
  2587. keyIds.add(keyId);
  2588. }
  2589. }
  2590. drmInfos.push(drmInfo);
  2591. } else {
  2592. shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat);
  2593. }
  2594. }
  2595. }
  2596. return {drmInfos, keyIds, encrypted, aesEncrypted};
  2597. }
  2598. /**
  2599. * @param {!shaka.hls.Tag} drmTag
  2600. * @param {!shaka.hls.Playlist} playlist
  2601. * @param {function():!Array.<string>} getUris
  2602. * @param {?Map.<string, string>=} variables
  2603. * @return {!shaka.extern.aesKey}
  2604. * @private
  2605. */
  2606. parseAESDrmTag_(drmTag, playlist, getUris, variables) {
  2607. // Check if the Web Crypto API is available.
  2608. if (!window.crypto || !window.crypto.subtle) {
  2609. shaka.log.alwaysWarn('Web Crypto API is not available to decrypt ' +
  2610. 'AES. (Web Crypto only exists in secure origins like https)');
  2611. throw new shaka.util.Error(
  2612. shaka.util.Error.Severity.CRITICAL,
  2613. shaka.util.Error.Category.MANIFEST,
  2614. shaka.util.Error.Code.NO_WEB_CRYPTO_API);
  2615. }
  2616. // HLS RFC 8216 Section 5.2:
  2617. // An EXT-X-KEY tag with a KEYFORMAT of "identity" that does not have an IV
  2618. // attribute indicates that the Media Sequence Number is to be used as the
  2619. // IV when decrypting a Media Segment, by putting its big-endian binary
  2620. // representation into a 16-octet (128-bit) buffer and padding (on the left)
  2621. // with zeros.
  2622. let firstMediaSequenceNumber = 0;
  2623. let iv;
  2624. const ivHex = drmTag.getAttributeValue('IV', '');
  2625. if (!ivHex) {
  2626. // Media Sequence Number will be used as IV.
  2627. firstMediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  2628. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  2629. } else {
  2630. // Exclude 0x at the start of string.
  2631. iv = shaka.util.Uint8ArrayUtils.fromHex(ivHex.substr(2));
  2632. if (iv.byteLength != 16) {
  2633. throw new shaka.util.Error(
  2634. shaka.util.Error.Severity.CRITICAL,
  2635. shaka.util.Error.Category.MANIFEST,
  2636. shaka.util.Error.Code.AES_128_INVALID_IV_LENGTH);
  2637. }
  2638. }
  2639. const aesKeyInfoKey = `${drmTag.toString()}-${firstMediaSequenceNumber}`;
  2640. if (!this.aesKeyInfoMap_.has(aesKeyInfoKey)) {
  2641. // Default AES-128
  2642. const keyInfo = {
  2643. bitsKey: 128,
  2644. blockCipherMode: 'CBC',
  2645. iv,
  2646. firstMediaSequenceNumber,
  2647. };
  2648. const method = drmTag.getRequiredAttrValue('METHOD');
  2649. switch (method) {
  2650. case 'AES-256':
  2651. keyInfo.bitsKey = 256;
  2652. break;
  2653. case 'AES-256-CTR':
  2654. keyInfo.bitsKey = 256;
  2655. keyInfo.blockCipherMode = 'CTR';
  2656. break;
  2657. }
  2658. // Don't download the key object until the segment is parsed, to avoid a
  2659. // startup delay for long manifests with lots of keys.
  2660. keyInfo.fetchKey = async () => {
  2661. const keyUris = shaka.hls.Utils.constructSegmentUris(
  2662. getUris(), drmTag.getRequiredAttrValue('URI'), variables);
  2663. const keyMapKey = keyUris.sort().join('');
  2664. if (!this.aesKeyMap_.has(keyMapKey)) {
  2665. const requestType = shaka.net.NetworkingEngine.RequestType.KEY;
  2666. const request = shaka.net.NetworkingEngine.makeRequest(
  2667. keyUris, this.config_.retryParameters);
  2668. const keyResponse = this.makeNetworkRequest_(request, requestType);
  2669. this.aesKeyMap_.set(keyMapKey, keyResponse);
  2670. }
  2671. const keyResponse = await this.aesKeyMap_.get(keyMapKey);
  2672. // keyResponse.status is undefined when URI is "data:text/plain;base64,"
  2673. if (!keyResponse.data ||
  2674. keyResponse.data.byteLength != (keyInfo.bitsKey / 8)) {
  2675. throw new shaka.util.Error(
  2676. shaka.util.Error.Severity.CRITICAL,
  2677. shaka.util.Error.Category.MANIFEST,
  2678. shaka.util.Error.Code.AES_128_INVALID_KEY_LENGTH);
  2679. }
  2680. const algorithm = {
  2681. name: keyInfo.blockCipherMode == 'CTR' ? 'AES-CTR' : 'AES-CBC',
  2682. length: keyInfo.bitsKey,
  2683. };
  2684. keyInfo.cryptoKey = await window.crypto.subtle.importKey(
  2685. 'raw', keyResponse.data, algorithm, true, ['decrypt']);
  2686. keyInfo.fetchKey = undefined; // No longer needed.
  2687. };
  2688. this.aesKeyInfoMap_.set(aesKeyInfoKey, keyInfo);
  2689. }
  2690. return this.aesKeyInfoMap_.get(aesKeyInfoKey);
  2691. }
  2692. /**
  2693. * @param {!shaka.hls.Playlist} playlist
  2694. * @private
  2695. */
  2696. determinePresentationType_(playlist) {
  2697. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  2698. const presentationTypeTag =
  2699. shaka.hls.Utils.getFirstTagWithName(playlist.tags,
  2700. 'EXT-X-PLAYLIST-TYPE');
  2701. const endListTag =
  2702. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST');
  2703. const isVod = (presentationTypeTag && presentationTypeTag.value == 'VOD') ||
  2704. endListTag;
  2705. const isEvent = presentationTypeTag &&
  2706. presentationTypeTag.value == 'EVENT' && !isVod;
  2707. const isLive = !isVod && !isEvent;
  2708. if (isVod) {
  2709. this.setPresentationType_(PresentationType.VOD);
  2710. } else {
  2711. // If it's not VOD, it must be presentation type LIVE or an ongoing EVENT.
  2712. if (isLive) {
  2713. this.setPresentationType_(PresentationType.LIVE);
  2714. } else {
  2715. this.setPresentationType_(PresentationType.EVENT);
  2716. }
  2717. }
  2718. }
  2719. /**
  2720. * @param {!shaka.hls.Playlist} playlist
  2721. * @private
  2722. */
  2723. determineLastTargetDuration_(playlist) {
  2724. let lastTargetDuration = Infinity;
  2725. const segments = playlist.segments;
  2726. if (segments.length) {
  2727. let segmentIndex = segments.length - 1;
  2728. while (segmentIndex >= 0) {
  2729. const segment = segments[segmentIndex];
  2730. const extinfTag =
  2731. shaka.hls.Utils.getFirstTagWithName(segment.tags, 'EXTINF');
  2732. if (extinfTag) {
  2733. // The EXTINF tag format is '#EXTINF:<duration>,[<title>]'.
  2734. // We're interested in the duration part.
  2735. const extinfValues = extinfTag.value.split(',');
  2736. lastTargetDuration = Number(extinfValues[0]);
  2737. break;
  2738. }
  2739. segmentIndex--;
  2740. }
  2741. }
  2742. const targetDurationTag = this.getRequiredTag_(playlist.tags,
  2743. 'EXT-X-TARGETDURATION');
  2744. const targetDuration = Number(targetDurationTag.value);
  2745. const partialTargetDurationTag =
  2746. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-PART-INF');
  2747. if (partialTargetDurationTag) {
  2748. this.partialTargetDuration_ = Number(
  2749. partialTargetDurationTag.getRequiredAttrValue('PART-TARGET'));
  2750. }
  2751. // Get the server-recommended min distance from the live edge.
  2752. const serverControlTag = shaka.hls.Utils.getFirstTagWithName(
  2753. playlist.tags, 'EXT-X-SERVER-CONTROL');
  2754. // According to the HLS spec, updates should not happen more often than
  2755. // once in targetDuration. It also requires us to only update the active
  2756. // variant. We might implement that later, but for now every variant
  2757. // will be updated. To get the update period, choose the smallest
  2758. // targetDuration value across all playlists.
  2759. // 1. Update the shortest one to use as update period and segment
  2760. // availability time (for LIVE).
  2761. if (this.lowLatencyMode_ && this.partialTargetDuration_) {
  2762. // For low latency streaming, use the partial segment target duration.
  2763. if (this.lowLatencyByterangeOptimization_) {
  2764. // We always have at least 1 partial segment part, and most servers
  2765. // allow you to make a request with _HLS_msn=X&_HLS_part=0 with a
  2766. // distance of 4 partial segments. With this we ensure that we
  2767. // obtain the minimum latency in this type of case.
  2768. if (this.partialTargetDuration_ * 5 <= lastTargetDuration) {
  2769. this.lastTargetDuration_ = Math.min(
  2770. this.partialTargetDuration_, this.lastTargetDuration_);
  2771. } else {
  2772. this.lastTargetDuration_ = Math.min(
  2773. lastTargetDuration, this.lastTargetDuration_);
  2774. }
  2775. } else {
  2776. this.lastTargetDuration_ = Math.min(
  2777. this.partialTargetDuration_, this.lastTargetDuration_);
  2778. }
  2779. // Use 'PART-HOLD-BACK' as the presentation delay for low latency mode.
  2780. this.lowLatencyPresentationDelay_ = serverControlTag ? Number(
  2781. serverControlTag.getRequiredAttrValue('PART-HOLD-BACK')) : 0;
  2782. } else {
  2783. this.lastTargetDuration_ = Math.min(
  2784. lastTargetDuration, this.lastTargetDuration_);
  2785. // Use 'HOLD-BACK' as the presentation delay for default if defined.
  2786. const holdBack = serverControlTag ?
  2787. serverControlTag.getAttribute('HOLD-BACK') : null;
  2788. this.presentationDelay_ = holdBack ? Number(holdBack.value) : 0;
  2789. }
  2790. // 2. Update the longest target duration if need be to use as a
  2791. // presentation delay later.
  2792. this.maxTargetDuration_ = Math.max(
  2793. targetDuration, this.maxTargetDuration_);
  2794. }
  2795. /**
  2796. * @param {!shaka.hls.Playlist} playlist
  2797. * @private
  2798. */
  2799. changePresentationTimelineToLive_(playlist) {
  2800. // The live edge will be calculated from segments, so we don't need to
  2801. // set a presentation start time. We will assert later that this is
  2802. // working as expected.
  2803. // The HLS spec (RFC 8216) states in 6.3.3:
  2804. //
  2805. // "The client SHALL choose which Media Segment to play first ... the
  2806. // client SHOULD NOT choose a segment that starts less than three target
  2807. // durations from the end of the Playlist file. Doing so can trigger
  2808. // playback stalls."
  2809. //
  2810. // We accomplish this in our DASH-y model by setting a presentation
  2811. // delay of configured value, or 3 segments duration if not configured.
  2812. // This will be the "live edge" of the presentation.
  2813. let presentationDelay;
  2814. if (this.config_.defaultPresentationDelay) {
  2815. presentationDelay = this.config_.defaultPresentationDelay;
  2816. } else if (this.lowLatencyPresentationDelay_) {
  2817. presentationDelay = this.lowLatencyPresentationDelay_;
  2818. } else if (this.presentationDelay_) {
  2819. presentationDelay = this.presentationDelay_;
  2820. } else {
  2821. const playlistSegments = playlist.segments.length;
  2822. let delaySegments = this.config_.hls.liveSegmentsDelay;
  2823. if (delaySegments > (playlistSegments - 2)) {
  2824. delaySegments = Math.max(1, playlistSegments - 2);
  2825. }
  2826. presentationDelay = this.maxTargetDuration_ * delaySegments;
  2827. }
  2828. this.presentationTimeline_.setPresentationStartTime(0);
  2829. this.presentationTimeline_.setDelay(presentationDelay);
  2830. this.presentationTimeline_.setStatic(false);
  2831. }
  2832. /**
  2833. * Get the InitSegmentReference for a segment if it has a EXT-X-MAP tag.
  2834. * @param {!shaka.hls.Playlist} playlist
  2835. * @param {!Array.<!shaka.hls.Tag>} tags Segment tags
  2836. * @param {function():!Array.<string>} getUris
  2837. * @param {?Map.<string, string>=} variables
  2838. * @return {shaka.media.InitSegmentReference}
  2839. * @private
  2840. */
  2841. getInitSegmentReference_(playlist, tags, getUris, variables) {
  2842. /** @type {?shaka.hls.Tag} */
  2843. const mapTag = shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-MAP');
  2844. if (!mapTag) {
  2845. return null;
  2846. }
  2847. // Map tag example: #EXT-X-MAP:URI="main.mp4",BYTERANGE="720@0"
  2848. const verbatimInitSegmentUri = mapTag.getRequiredAttrValue('URI');
  2849. const absoluteInitSegmentUris = shaka.hls.Utils.constructSegmentUris(
  2850. getUris(), verbatimInitSegmentUri, variables);
  2851. const mapTagKey = [
  2852. absoluteInitSegmentUris.toString(),
  2853. mapTag.getAttributeValue('BYTERANGE', ''),
  2854. ].join('-');
  2855. if (!this.mapTagToInitSegmentRefMap_.has(mapTagKey)) {
  2856. /** @type {shaka.extern.aesKey|undefined} */
  2857. let aesKey = undefined;
  2858. let byteRangeTag = null;
  2859. for (const tag of tags) {
  2860. if (tag.name == 'EXT-X-KEY') {
  2861. if (this.isAesMethod_(tag.getRequiredAttrValue('METHOD')) &&
  2862. tag.id < mapTag.id) {
  2863. aesKey =
  2864. this.parseAESDrmTag_(tag, playlist, getUris, variables);
  2865. }
  2866. } else if (tag.name == 'EXT-X-BYTERANGE' && tag.id < mapTag.id) {
  2867. byteRangeTag = tag;
  2868. }
  2869. }
  2870. const initSegmentRef = this.createInitSegmentReference_(
  2871. absoluteInitSegmentUris, mapTag, byteRangeTag, aesKey);
  2872. this.mapTagToInitSegmentRefMap_.set(mapTagKey, initSegmentRef);
  2873. }
  2874. return this.mapTagToInitSegmentRefMap_.get(mapTagKey);
  2875. }
  2876. /**
  2877. * Create an InitSegmentReference object for the EXT-X-MAP tag in the media
  2878. * playlist.
  2879. * @param {!Array.<string>} absoluteInitSegmentUris
  2880. * @param {!shaka.hls.Tag} mapTag EXT-X-MAP
  2881. * @param {shaka.hls.Tag=} byteRangeTag EXT-X-BYTERANGE
  2882. * @param {shaka.extern.aesKey=} aesKey
  2883. * @return {!shaka.media.InitSegmentReference}
  2884. * @private
  2885. */
  2886. createInitSegmentReference_(absoluteInitSegmentUris, mapTag, byteRangeTag,
  2887. aesKey) {
  2888. let startByte = 0;
  2889. let endByte = null;
  2890. let byterange = mapTag.getAttributeValue('BYTERANGE');
  2891. if (!byterange && byteRangeTag) {
  2892. byterange = byteRangeTag.value;
  2893. }
  2894. // If a BYTERANGE attribute is not specified, the segment consists
  2895. // of the entire resource.
  2896. if (byterange) {
  2897. const blocks = byterange.split('@');
  2898. const byteLength = Number(blocks[0]);
  2899. startByte = Number(blocks[1]);
  2900. endByte = startByte + byteLength - 1;
  2901. if (aesKey) {
  2902. // MAP segment encrypted with method AES, when served with
  2903. // HTTP Range, has the unencrypted size specified in the range.
  2904. // See: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
  2905. const length = (endByte + 1) - startByte;
  2906. if (length % 16) {
  2907. endByte += (16 - (length % 16));
  2908. }
  2909. }
  2910. }
  2911. const initSegmentRef = new shaka.media.InitSegmentReference(
  2912. () => absoluteInitSegmentUris,
  2913. startByte,
  2914. endByte,
  2915. /* mediaQuality= */ null,
  2916. /* timescale= */ null,
  2917. /* segmentData= */ null,
  2918. aesKey);
  2919. return initSegmentRef;
  2920. }
  2921. /**
  2922. * Parses one shaka.hls.Segment object into a shaka.media.SegmentReference.
  2923. *
  2924. * @param {shaka.media.InitSegmentReference} initSegmentReference
  2925. * @param {shaka.media.SegmentReference} previousReference
  2926. * @param {!shaka.hls.Segment} hlsSegment
  2927. * @param {number} startTime
  2928. * @param {!Map.<string, string>} variables
  2929. * @param {!shaka.hls.Playlist} playlist
  2930. * @param {shaka.extern.Stream} stream
  2931. * @param {function():!Array.<string>} getUris
  2932. * @param {shaka.extern.aesKey=} aesKey
  2933. * @return {shaka.media.SegmentReference}
  2934. * @private
  2935. */
  2936. createSegmentReference_(
  2937. initSegmentReference, previousReference, hlsSegment, startTime,
  2938. variables, playlist, stream, getUris, aesKey) {
  2939. const tags = hlsSegment.tags;
  2940. const extinfTag =
  2941. shaka.hls.Utils.getFirstTagWithName(tags, 'EXTINF');
  2942. let endTime = 0;
  2943. let startByte = 0;
  2944. let endByte = null;
  2945. if (hlsSegment.partialSegments.length && !this.lowLatencyMode_) {
  2946. shaka.log.alwaysWarn('Low-latency HLS live stream detected, but ' +
  2947. 'low-latency streaming mode is not enabled in Shaka ' +
  2948. 'Player. Set streaming.lowLatencyMode configuration to ' +
  2949. 'true, and see https://bit.ly/3clctcj for details.');
  2950. }
  2951. let syncTime = null;
  2952. if (!this.config_.hls.ignoreManifestProgramDateTime) {
  2953. const dateTimeTag =
  2954. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-PROGRAM-DATE-TIME');
  2955. if (dateTimeTag && dateTimeTag.value) {
  2956. syncTime = shaka.util.TXml.parseDate(dateTimeTag.value);
  2957. goog.asserts.assert(syncTime != null,
  2958. 'EXT-X-PROGRAM-DATE-TIME format not valid');
  2959. }
  2960. }
  2961. let status = shaka.media.SegmentReference.Status.AVAILABLE;
  2962. if (shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-GAP')) {
  2963. status = shaka.media.SegmentReference.Status.MISSING;
  2964. }
  2965. if (!extinfTag) {
  2966. if (hlsSegment.partialSegments.length == 0) {
  2967. // EXTINF tag must be available if the segment has no partial segments.
  2968. throw new shaka.util.Error(
  2969. shaka.util.Error.Severity.CRITICAL,
  2970. shaka.util.Error.Category.MANIFEST,
  2971. shaka.util.Error.Code.HLS_REQUIRED_TAG_MISSING, 'EXTINF');
  2972. } else if (!this.lowLatencyMode_) {
  2973. // Without EXTINF and without low-latency mode, partial segments get
  2974. // ignored.
  2975. return null;
  2976. }
  2977. }
  2978. // Create SegmentReferences for the partial segments.
  2979. let partialSegmentRefs = [];
  2980. // Optimization for LL-HLS with byterange
  2981. // More info in https://tinyurl.com/hls-open-byte-range
  2982. let segmentWithByteRangeOptimization = false;
  2983. let getUrisOptimization = null;
  2984. let somePartialSegmentWithGap = false;
  2985. let isPreloadSegment = false;
  2986. if (this.lowLatencyMode_ && hlsSegment.partialSegments.length) {
  2987. const byterangeOptimizationSupport = (stream.mimeType == 'video/mp4' ||
  2988. stream.mimeType == 'audio/mp4') && window.ReadableStream &&
  2989. this.config_.hls.allowLowLatencyByteRangeOptimization;
  2990. let partialSyncTime = syncTime;
  2991. for (let i = 0; i < hlsSegment.partialSegments.length; i++) {
  2992. const item = hlsSegment.partialSegments[i];
  2993. const pPreviousReference = i == 0 ?
  2994. previousReference : partialSegmentRefs[partialSegmentRefs.length - 1];
  2995. const pStartTime = (i == 0) ? startTime : pPreviousReference.endTime;
  2996. // If DURATION is missing from this partial segment, use the target
  2997. // partial duration from the top of the playlist, which is a required
  2998. // attribute for content with partial segments.
  2999. const pDuration = Number(item.getAttributeValue('DURATION')) ||
  3000. this.partialTargetDuration_;
  3001. // If for some reason we have neither an explicit duration, nor a target
  3002. // partial duration, we should SKIP this partial segment to avoid
  3003. // duplicating content in the presentation timeline.
  3004. if (!pDuration) {
  3005. continue;
  3006. }
  3007. const pEndTime = pStartTime + pDuration;
  3008. let pStartByte = 0;
  3009. let pEndByte = null;
  3010. if (item.name == 'EXT-X-PRELOAD-HINT') {
  3011. // A preload hinted partial segment may have byterange start info.
  3012. const pByterangeStart = item.getAttributeValue('BYTERANGE-START');
  3013. pStartByte = pByterangeStart ? Number(pByterangeStart) : 0;
  3014. // A preload hinted partial segment may have byterange length info.
  3015. const pByterangeLength = item.getAttributeValue('BYTERANGE-LENGTH');
  3016. if (pByterangeLength) {
  3017. pEndByte = pStartByte + Number(pByterangeLength) - 1;
  3018. } else if (pStartByte) {
  3019. // If we have a non-zero start byte, but no end byte, follow the
  3020. // recommendation of https://tinyurl.com/hls-open-byte-range and
  3021. // set the end byte explicitly to a large integer.
  3022. pEndByte = Number.MAX_SAFE_INTEGER;
  3023. }
  3024. } else {
  3025. const pByterange = item.getAttributeValue('BYTERANGE');
  3026. [pStartByte, pEndByte] =
  3027. this.parseByteRange_(pPreviousReference, pByterange);
  3028. }
  3029. const pUri = item.getAttributeValue('URI');
  3030. if (!pUri) {
  3031. continue;
  3032. }
  3033. let partialStatus = shaka.media.SegmentReference.Status.AVAILABLE;
  3034. if (item.getAttributeValue('GAP') == 'YES') {
  3035. partialStatus = shaka.media.SegmentReference.Status.MISSING;
  3036. somePartialSegmentWithGap = true;
  3037. }
  3038. let uris = null;
  3039. const getPartialUris = () => {
  3040. if (uris == null) {
  3041. goog.asserts.assert(pUri, 'Partial uri should be defined!');
  3042. uris = shaka.hls.Utils.constructSegmentUris(
  3043. getUris(), pUri, variables);
  3044. }
  3045. return uris;
  3046. };
  3047. if (byterangeOptimizationSupport &&
  3048. pStartByte >= 0 && pEndByte != null) {
  3049. getUrisOptimization = getPartialUris;
  3050. segmentWithByteRangeOptimization = true;
  3051. }
  3052. const partial = new shaka.media.SegmentReference(
  3053. pStartTime,
  3054. pEndTime,
  3055. getPartialUris,
  3056. pStartByte,
  3057. pEndByte,
  3058. initSegmentReference,
  3059. /* timestampOffset= */ 0,
  3060. /* appendWindowStart= */ 0,
  3061. /* appendWindowEnd= */ Infinity,
  3062. /* partialReferences= */ [],
  3063. /* tilesLayout= */ '',
  3064. /* tileDuration= */ null,
  3065. partialSyncTime,
  3066. partialStatus,
  3067. aesKey);
  3068. if (item.name == 'EXT-X-PRELOAD-HINT') {
  3069. partial.markAsPreload();
  3070. isPreloadSegment = true;
  3071. }
  3072. // The spec doesn't say that we can assume INDEPENDENT=YES for the
  3073. // first partial segment. It does call the flag "optional", though, and
  3074. // that cases where there are no such flags on any partial segments, it
  3075. // is sensible to assume the first one is independent.
  3076. if (item.getAttributeValue('INDEPENDENT') != 'YES' && i > 0) {
  3077. partial.markAsNonIndependent();
  3078. }
  3079. partialSegmentRefs.push(partial);
  3080. if (partialSyncTime) {
  3081. partialSyncTime += pDuration;
  3082. }
  3083. } // for-loop of hlsSegment.partialSegments
  3084. }
  3085. // If the segment has EXTINF tag, set the segment's end time, start byte
  3086. // and end byte based on the duration and byterange information.
  3087. // Otherwise, calculate the end time, start / end byte based on its partial
  3088. // segments.
  3089. // Note that the sum of partial segments durations may be slightly different
  3090. // from the parent segment's duration. In this case, use the duration from
  3091. // the parent segment tag.
  3092. if (extinfTag) {
  3093. // The EXTINF tag format is '#EXTINF:<duration>,[<title>]'.
  3094. // We're interested in the duration part.
  3095. const extinfValues = extinfTag.value.split(',');
  3096. const duration = Number(extinfValues[0]);
  3097. // Skip segments without duration
  3098. if (duration == 0) {
  3099. return null;
  3100. }
  3101. endTime = startTime + duration;
  3102. } else if (partialSegmentRefs.length) {
  3103. endTime = partialSegmentRefs[partialSegmentRefs.length - 1].endTime;
  3104. } else {
  3105. // Skip segments without duration and without partialsegments
  3106. return null;
  3107. }
  3108. if (segmentWithByteRangeOptimization) {
  3109. // We cannot optimize segments with gaps, or with a start byte that is
  3110. // not 0.
  3111. if (somePartialSegmentWithGap || partialSegmentRefs[0].startByte != 0) {
  3112. segmentWithByteRangeOptimization = false;
  3113. getUrisOptimization = null;
  3114. } else {
  3115. partialSegmentRefs = [];
  3116. }
  3117. }
  3118. // If the segment has EXT-X-BYTERANGE tag, set the start byte and end byte
  3119. // base on the byterange information. If segment has no EXT-X-BYTERANGE tag
  3120. // and has partial segments, set the start byte and end byte base on the
  3121. // partial segments.
  3122. const byterangeTag =
  3123. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-BYTERANGE');
  3124. if (byterangeTag) {
  3125. [startByte, endByte] =
  3126. this.parseByteRange_(previousReference, byterangeTag.value);
  3127. } else if (partialSegmentRefs.length) {
  3128. startByte = partialSegmentRefs[0].startByte;
  3129. endByte = partialSegmentRefs[partialSegmentRefs.length - 1].endByte;
  3130. }
  3131. let tilesLayout = '';
  3132. let tileDuration = null;
  3133. if (stream.type == shaka.util.ManifestParserUtils.ContentType.IMAGE) {
  3134. // By default in HLS the tilesLayout is 1x1
  3135. tilesLayout = '1x1';
  3136. const tilesTag =
  3137. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-TILES');
  3138. if (tilesTag) {
  3139. tilesLayout = tilesTag.getRequiredAttrValue('LAYOUT');
  3140. const duration = tilesTag.getAttributeValue('DURATION');
  3141. if (duration) {
  3142. tileDuration = Number(duration);
  3143. }
  3144. }
  3145. }
  3146. let uris = null;
  3147. const getSegmentUris = () => {
  3148. if (getUrisOptimization) {
  3149. return getUrisOptimization();
  3150. }
  3151. if (uris == null) {
  3152. uris = shaka.hls.Utils.constructSegmentUris(getUris(),
  3153. hlsSegment.verbatimSegmentUri, variables);
  3154. }
  3155. return uris || [];
  3156. };
  3157. const allPartialSegments = partialSegmentRefs.length > 0 &&
  3158. !!hlsSegment.verbatimSegmentUri;
  3159. const reference = new shaka.media.SegmentReference(
  3160. startTime,
  3161. endTime,
  3162. getSegmentUris,
  3163. startByte,
  3164. endByte,
  3165. initSegmentReference,
  3166. /* timestampOffset= */ 0,
  3167. /* appendWindowStart= */ 0,
  3168. /* appendWindowEnd= */ Infinity,
  3169. partialSegmentRefs,
  3170. tilesLayout,
  3171. tileDuration,
  3172. syncTime,
  3173. status,
  3174. aesKey,
  3175. allPartialSegments,
  3176. );
  3177. if (segmentWithByteRangeOptimization) {
  3178. this.lowLatencyByterangeOptimization_ = true;
  3179. reference.markAsByterangeOptimization();
  3180. if (isPreloadSegment) {
  3181. reference.markAsPreload();
  3182. }
  3183. }
  3184. return reference;
  3185. }
  3186. /**
  3187. * Parse the startByte and endByte.
  3188. * @param {shaka.media.SegmentReference} previousReference
  3189. * @param {?string} byterange
  3190. * @return {!Array.<number>} An array with the start byte and end byte.
  3191. * @private
  3192. */
  3193. parseByteRange_(previousReference, byterange) {
  3194. let startByte = 0;
  3195. let endByte = null;
  3196. // If BYTERANGE is not specified, the segment consists of the entire
  3197. // resource.
  3198. if (byterange) {
  3199. const blocks = byterange.split('@');
  3200. const byteLength = Number(blocks[0]);
  3201. if (blocks[1]) {
  3202. startByte = Number(blocks[1]);
  3203. } else {
  3204. goog.asserts.assert(previousReference,
  3205. 'Cannot refer back to previous HLS segment!');
  3206. startByte = previousReference.endByte + 1;
  3207. }
  3208. endByte = startByte + byteLength - 1;
  3209. }
  3210. return [startByte, endByte];
  3211. }
  3212. /**
  3213. * Parses shaka.hls.Segment objects into shaka.media.SegmentReferences and
  3214. * get the bandwidth necessary for this segments If it's defined in the
  3215. * playlist.
  3216. *
  3217. * @param {!shaka.hls.Playlist} playlist
  3218. * @param {shaka.extern.Stream} stream
  3219. * @param {!Map.<number, number>} mediaSequenceToStartTime
  3220. * @param {!Map.<string, string>} variables
  3221. * @param {function():!Array.<string>} getUris
  3222. * @param {string} type
  3223. * @return {{segments: !Array.<!shaka.media.SegmentReference>,
  3224. * bandwidth: (number|undefined)}}
  3225. * @private
  3226. */
  3227. createSegments_(playlist, stream, mediaSequenceToStartTime, variables,
  3228. getUris, type) {
  3229. /** @type {Array.<!shaka.hls.Segment>} */
  3230. const hlsSegments = playlist.segments;
  3231. goog.asserts.assert(hlsSegments.length, 'Playlist should have segments!');
  3232. /** @type {shaka.media.InitSegmentReference} */
  3233. let initSegmentRef;
  3234. /** @type {shaka.extern.aesKey|undefined} */
  3235. let aesKey = undefined;
  3236. let discontinuitySequence = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3237. playlist.tags, 'EXT-X-DISCONTINUITY-SEQUENCE', 0);
  3238. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3239. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  3240. const skipTag = shaka.hls.Utils.getFirstTagWithName(
  3241. playlist.tags, 'EXT-X-SKIP');
  3242. const skippedSegments =
  3243. skipTag ? Number(skipTag.getAttributeValue('SKIPPED-SEGMENTS')) : 0;
  3244. let position = mediaSequenceNumber + skippedSegments;
  3245. let firstStartTime = 0;
  3246. // For live stream, use the cached value in the mediaSequenceToStartTime
  3247. // map if available.
  3248. if (this.isLive_() && mediaSequenceToStartTime.has(position)) {
  3249. firstStartTime = mediaSequenceToStartTime.get(position);
  3250. }
  3251. // This is for recovering from disconnects.
  3252. if (firstStartTime === 0 &&
  3253. this.presentationType_ == shaka.hls.HlsParser.PresentationType_.LIVE &&
  3254. mediaSequenceToStartTime.size > 0 &&
  3255. !mediaSequenceToStartTime.has(position)) {
  3256. firstStartTime = this.presentationTimeline_.getSegmentAvailabilityStart();
  3257. }
  3258. /** @type {!Array.<!shaka.media.SegmentReference>} */
  3259. const references = [];
  3260. let previousReference = null;
  3261. /** @type {!Array.<{bitrate: number, duration: number}>} */
  3262. const bitrates = [];
  3263. for (let i = 0; i < hlsSegments.length; i++) {
  3264. const item = hlsSegments[i];
  3265. const startTime =
  3266. (i == 0) ? firstStartTime : previousReference.endTime;
  3267. position = mediaSequenceNumber + skippedSegments + i;
  3268. const discontinuityTag = shaka.hls.Utils.getFirstTagWithName(
  3269. item.tags, 'EXT-X-DISCONTINUITY');
  3270. if (discontinuityTag) {
  3271. discontinuitySequence++;
  3272. }
  3273. // Apply new AES tags as you see them, keeping a running total.
  3274. for (const drmTag of item.tags) {
  3275. if (drmTag.name == 'EXT-X-KEY') {
  3276. if (this.isAesMethod_(drmTag.getRequiredAttrValue('METHOD'))) {
  3277. aesKey =
  3278. this.parseAESDrmTag_(drmTag, playlist, getUris, variables);
  3279. } else {
  3280. aesKey = undefined;
  3281. }
  3282. }
  3283. }
  3284. mediaSequenceToStartTime.set(position, startTime);
  3285. initSegmentRef = this.getInitSegmentReference_(playlist,
  3286. item.tags, getUris, variables);
  3287. // If the stream is low latency and the user has not configured the
  3288. // lowLatencyMode, but if it has been configured to activate the
  3289. // lowLatencyMode if a stream of this type is detected, we automatically
  3290. // activate the lowLatencyMode.
  3291. if (!this.lowLatencyMode_) {
  3292. const autoLowLatencyMode = this.playerInterface_.isAutoLowLatencyMode();
  3293. if (autoLowLatencyMode) {
  3294. this.playerInterface_.enableLowLatencyMode();
  3295. this.lowLatencyMode_ = this.playerInterface_.isLowLatencyMode();
  3296. }
  3297. }
  3298. const reference = this.createSegmentReference_(
  3299. initSegmentRef,
  3300. previousReference,
  3301. item,
  3302. startTime,
  3303. variables,
  3304. playlist,
  3305. stream,
  3306. getUris,
  3307. aesKey);
  3308. if (reference) {
  3309. const bitrate = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3310. item.tags, 'EXT-X-BITRATE');
  3311. if (bitrate) {
  3312. bitrates.push({
  3313. bitrate,
  3314. duration: reference.endTime - reference.startTime,
  3315. });
  3316. } else if (bitrates.length) {
  3317. // It applies to every segment between it and the next EXT-X-BITRATE,
  3318. // so we use the latest bitrate value
  3319. const prevBitrate = bitrates.pop();
  3320. prevBitrate.duration += reference.endTime - reference.startTime;
  3321. bitrates.push(prevBitrate);
  3322. }
  3323. previousReference = reference;
  3324. reference.discontinuitySequence = discontinuitySequence;
  3325. if (this.ignoreManifestProgramDateTimeFor_(type) &&
  3326. this.minSequenceNumber_ != null &&
  3327. position < this.minSequenceNumber_) {
  3328. // This segment is ignored as part of our fallback synchronization
  3329. // method.
  3330. } else {
  3331. references.push(reference);
  3332. }
  3333. }
  3334. }
  3335. let bandwidth = undefined;
  3336. if (bitrates.length) {
  3337. const duration = bitrates.reduce((sum, value) => {
  3338. return sum + value.duration;
  3339. }, 0);
  3340. bandwidth = Math.round(bitrates.reduce((sum, value) => {
  3341. return sum + value.bitrate * value.duration;
  3342. }, 0) / duration * 1000);
  3343. }
  3344. // If some segments have sync times, but not all, extrapolate the sync
  3345. // times of the ones with none.
  3346. const someSyncTime = references.some((ref) => ref.syncTime != null);
  3347. if (someSyncTime) {
  3348. for (let i = 0; i < references.length; i++) {
  3349. const reference = references[i];
  3350. if (reference.syncTime != null) {
  3351. // No need to extrapolate.
  3352. continue;
  3353. }
  3354. // Find the nearest segment with syncTime, in either direction.
  3355. // This looks forward and backward simultaneously, keeping track of what
  3356. // to offset the syncTime it finds by as it goes.
  3357. let forwardAdd = 0;
  3358. let forwardI = i;
  3359. /**
  3360. * Look forwards one reference at a time, summing all durations as we
  3361. * go, until we find a reference with a syncTime to use as a basis.
  3362. * This DOES count the original reference, but DOESN'T count the first
  3363. * reference with a syncTime (as we approach it from behind).
  3364. * @return {?number}
  3365. */
  3366. const lookForward = () => {
  3367. const other = references[forwardI];
  3368. if (other) {
  3369. if (other.syncTime != null) {
  3370. return other.syncTime + forwardAdd;
  3371. }
  3372. forwardAdd -= other.endTime - other.startTime;
  3373. forwardI += 1;
  3374. }
  3375. return null;
  3376. };
  3377. let backwardAdd = 0;
  3378. let backwardI = i;
  3379. /**
  3380. * Look backwards one reference at a time, summing all durations as we
  3381. * go, until we find a reference with a syncTime to use as a basis.
  3382. * This DOESN'T count the original reference, but DOES count the first
  3383. * reference with a syncTime (as we approach it from ahead).
  3384. * @return {?number}
  3385. */
  3386. const lookBackward = () => {
  3387. const other = references[backwardI];
  3388. if (other) {
  3389. if (other != reference) {
  3390. backwardAdd += other.endTime - other.startTime;
  3391. }
  3392. if (other.syncTime != null) {
  3393. return other.syncTime + backwardAdd;
  3394. }
  3395. backwardI -= 1;
  3396. }
  3397. return null;
  3398. };
  3399. while (reference.syncTime == null) {
  3400. reference.syncTime = lookBackward();
  3401. if (reference.syncTime == null) {
  3402. reference.syncTime = lookForward();
  3403. }
  3404. }
  3405. }
  3406. }
  3407. // Split the sync times properly among partial segments.
  3408. if (someSyncTime) {
  3409. for (const reference of references) {
  3410. let syncTime = reference.syncTime;
  3411. for (const partial of reference.partialReferences) {
  3412. partial.syncTime = syncTime;
  3413. syncTime += partial.endTime - partial.startTime;
  3414. }
  3415. }
  3416. }
  3417. // lowestSyncTime is a value from a previous playlist update. Use it to
  3418. // set reference start times. If this is the first playlist parse, we will
  3419. // skip this step, and wait until we have sync time across stream types.
  3420. const lowestSyncTime = this.lowestSyncTime_;
  3421. if (someSyncTime && lowestSyncTime != Infinity) {
  3422. if (!this.ignoreManifestProgramDateTimeFor_(type)) {
  3423. for (const reference of references) {
  3424. reference.syncAgainst(lowestSyncTime);
  3425. }
  3426. }
  3427. }
  3428. return {
  3429. segments: references,
  3430. bandwidth,
  3431. };
  3432. }
  3433. /**
  3434. * Attempts to guess stream's mime type based on content type and URI.
  3435. *
  3436. * @param {string} contentType
  3437. * @param {string} codecs
  3438. * @return {?string}
  3439. * @private
  3440. */
  3441. guessMimeTypeBeforeLoading_(contentType, codecs) {
  3442. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  3443. if (codecs == 'vtt' || codecs == 'wvtt') {
  3444. // If codecs is 'vtt', it's WebVTT.
  3445. return 'text/vtt';
  3446. } else if (codecs && codecs !== '') {
  3447. // Otherwise, assume MP4-embedded text, since text-based formats tend
  3448. // not to have a codecs string at all.
  3449. return 'application/mp4';
  3450. }
  3451. }
  3452. if (contentType == shaka.util.ManifestParserUtils.ContentType.IMAGE) {
  3453. if (!codecs || codecs == 'jpeg') {
  3454. return 'image/jpeg';
  3455. }
  3456. }
  3457. if (contentType == shaka.util.ManifestParserUtils.ContentType.AUDIO) {
  3458. // See: https://bugs.chromium.org/p/chromium/issues/detail?id=489520
  3459. if (codecs == 'mp4a.40.34') {
  3460. return 'audio/mpeg';
  3461. }
  3462. }
  3463. if (codecs == 'mjpg') {
  3464. return 'application/mp4';
  3465. }
  3466. // Not enough information to guess from the content type and codecs.
  3467. return null;
  3468. }
  3469. /**
  3470. * Get a fallback mime type for the content. Used if all the better methods
  3471. * for determining the mime type have failed.
  3472. *
  3473. * @param {string} contentType
  3474. * @return {string}
  3475. * @private
  3476. */
  3477. guessMimeTypeFallback_(contentType) {
  3478. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  3479. // If there was no codecs string and no content-type, assume HLS text
  3480. // streams are WebVTT.
  3481. return 'text/vtt';
  3482. }
  3483. // If the HLS content is lacking in both MIME type metadata and
  3484. // segment file extensions, we fall back to assuming it's MP4.
  3485. const map = shaka.hls.HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_[contentType];
  3486. return map['mp4'];
  3487. }
  3488. /**
  3489. * Attempts to guess stream's mime type based on content type, URI, and
  3490. * contents of the playlist.
  3491. *
  3492. * @param {string} contentType
  3493. * @param {string} codecs
  3494. * @param {!shaka.hls.Playlist} playlist
  3495. * @param {!Map.<string, string>} variables
  3496. * @param {function():!Array.<string>} getUris
  3497. * @return {!Promise.<string>}
  3498. * @private
  3499. */
  3500. async guessMimeType_(contentType, codecs, playlist, variables, getUris) {
  3501. const HlsParser = shaka.hls.HlsParser;
  3502. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  3503. // If you wait long enough, requesting the first segment can fail
  3504. // because it has fallen off the left edge of DVR, so to be safer,
  3505. // let's request the middle segment.
  3506. goog.asserts.assert(playlist.segments.length,
  3507. 'Playlist should have segments!');
  3508. const middleSegmentIdx = Math.trunc((playlist.segments.length - 1) / 2);
  3509. const middleSegment = playlist.segments[middleSegmentIdx];
  3510. if (shaka.hls.Utils.getFirstTagWithName(middleSegment.tags, 'EXT-X-GAP')) {
  3511. return this.guessMimeTypeFallback_(contentType);
  3512. }
  3513. const middleSegmentUris = shaka.hls.Utils.constructSegmentUris(
  3514. getUris(),
  3515. middleSegment.verbatimSegmentUri,
  3516. variables);
  3517. const parsedUri = new goog.Uri(middleSegmentUris[0]);
  3518. const extension = parsedUri.getPath().split('.').pop();
  3519. const map = HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_[contentType];
  3520. let mimeType = map[extension];
  3521. if (mimeType) {
  3522. return mimeType;
  3523. }
  3524. mimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension];
  3525. if (mimeType) {
  3526. return mimeType;
  3527. }
  3528. // The extension map didn't work, so guess based on codecs.
  3529. mimeType = this.guessMimeTypeBeforeLoading_(contentType, codecs);
  3530. if (mimeType) {
  3531. return mimeType;
  3532. }
  3533. // If unable to guess mime type, request a segment and try getting it
  3534. // from the response.
  3535. let contentMimeType;
  3536. const type = shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT;
  3537. const headRequest = shaka.net.NetworkingEngine.makeRequest(
  3538. middleSegmentUris, this.config_.retryParameters);
  3539. try {
  3540. headRequest.method = 'HEAD';
  3541. const response = await this.makeNetworkRequest_(
  3542. headRequest, requestType, {type});
  3543. contentMimeType = response.headers['content-type'];
  3544. } catch (error) {
  3545. if (error &&
  3546. (error.code == shaka.util.Error.Code.HTTP_ERROR ||
  3547. error.code == shaka.util.Error.Code.BAD_HTTP_STATUS)) {
  3548. headRequest.method = 'GET';
  3549. const response = await this.makeNetworkRequest_(
  3550. headRequest, requestType, {type});
  3551. contentMimeType = response.headers['content-type'];
  3552. }
  3553. }
  3554. if (contentMimeType) {
  3555. // Split the MIME type in case the server sent additional parameters.
  3556. return contentMimeType.split(';')[0];
  3557. }
  3558. return this.guessMimeTypeFallback_(contentType);
  3559. }
  3560. /**
  3561. * Returns a tag with a given name.
  3562. * Throws an error if tag was not found.
  3563. *
  3564. * @param {!Array.<shaka.hls.Tag>} tags
  3565. * @param {string} tagName
  3566. * @return {!shaka.hls.Tag}
  3567. * @private
  3568. */
  3569. getRequiredTag_(tags, tagName) {
  3570. const tag = shaka.hls.Utils.getFirstTagWithName(tags, tagName);
  3571. if (!tag) {
  3572. throw new shaka.util.Error(
  3573. shaka.util.Error.Severity.CRITICAL,
  3574. shaka.util.Error.Category.MANIFEST,
  3575. shaka.util.Error.Code.HLS_REQUIRED_TAG_MISSING, tagName);
  3576. }
  3577. return tag;
  3578. }
  3579. /**
  3580. * @param {shaka.extern.Stream} stream
  3581. * @param {?string} width
  3582. * @param {?string} height
  3583. * @param {?string} frameRate
  3584. * @param {?string} videoRange
  3585. * @param {?string} videoLayout
  3586. * @private
  3587. */
  3588. addVideoAttributes_(stream, width, height, frameRate, videoRange,
  3589. videoLayout) {
  3590. if (stream) {
  3591. stream.width = Number(width) || undefined;
  3592. stream.height = Number(height) || undefined;
  3593. stream.frameRate = Number(frameRate) || undefined;
  3594. stream.hdr = videoRange || undefined;
  3595. stream.videoLayout = videoLayout || undefined;
  3596. }
  3597. }
  3598. /**
  3599. * Makes a network request for the manifest and returns a Promise
  3600. * with the resulting data.
  3601. *
  3602. * @param {!Array.<string>} uris
  3603. * @param {boolean=} isPlaylist
  3604. * @return {!Promise.<!shaka.extern.Response>}
  3605. * @private
  3606. */
  3607. requestManifest_(uris, isPlaylist) {
  3608. const requestType = shaka.net.NetworkingEngine.RequestType.MANIFEST;
  3609. const request = shaka.net.NetworkingEngine.makeRequest(
  3610. uris, this.config_.retryParameters);
  3611. const type = isPlaylist ?
  3612. shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_PLAYLIST :
  3613. shaka.net.NetworkingEngine.AdvancedRequestType.MASTER_PLAYLIST;
  3614. return this.makeNetworkRequest_(request, requestType, {type});
  3615. }
  3616. /**
  3617. * Called when the update timer ticks. Because parsing a manifest is async,
  3618. * this method is async. To work with this, this method will schedule the next
  3619. * update when it finished instead of using a repeating-start.
  3620. *
  3621. * @return {!Promise}
  3622. * @private
  3623. */
  3624. async onUpdate_() {
  3625. shaka.log.info('Updating manifest...');
  3626. goog.asserts.assert(
  3627. this.getUpdatePlaylistDelay_() > 0,
  3628. 'We should only call |onUpdate_| when we are suppose to be updating.');
  3629. // Detect a call to stop()
  3630. if (!this.playerInterface_) {
  3631. return;
  3632. }
  3633. try {
  3634. const startTime = Date.now();
  3635. await this.update();
  3636. // Keep track of how long the longest manifest update took.
  3637. const endTime = Date.now();
  3638. // This may have converted to VOD, in which case we stop updating.
  3639. if (this.isLive_()) {
  3640. const updateDuration = (endTime - startTime) / 1000.0;
  3641. this.averageUpdateDuration_.sample(1, updateDuration);
  3642. const delay = this.getUpdatePlaylistDelay_();
  3643. const finalDelay = Math.max(0,
  3644. delay - this.averageUpdateDuration_.getEstimate());
  3645. this.updatePlaylistTimer_.tickAfter(/* seconds= */ finalDelay);
  3646. }
  3647. } catch (error) {
  3648. // Detect a call to stop() during this.update()
  3649. if (!this.playerInterface_) {
  3650. return;
  3651. }
  3652. goog.asserts.assert(error instanceof shaka.util.Error,
  3653. 'Should only receive a Shaka error');
  3654. if (this.config_.raiseFatalErrorOnManifestUpdateRequestFailure) {
  3655. this.playerInterface_.onError(error);
  3656. return;
  3657. }
  3658. // We will retry updating, so override the severity of the error.
  3659. error.severity = shaka.util.Error.Severity.RECOVERABLE;
  3660. this.playerInterface_.onError(error);
  3661. // Try again very soon.
  3662. this.updatePlaylistTimer_.tickAfter(/* seconds= */ 0.1);
  3663. }
  3664. // Detect a call to stop()
  3665. if (!this.playerInterface_) {
  3666. return;
  3667. }
  3668. this.playerInterface_.onManifestUpdated();
  3669. }
  3670. /**
  3671. * @return {boolean}
  3672. * @private
  3673. */
  3674. isLive_() {
  3675. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  3676. return this.presentationType_ != PresentationType.VOD;
  3677. }
  3678. /**
  3679. * @return {number}
  3680. * @private
  3681. */
  3682. getUpdatePlaylistDelay_() {
  3683. // The HLS spec (RFC 8216) states in 6.3.4:
  3684. // "the client MUST wait for at least the target duration before
  3685. // attempting to reload the Playlist file again".
  3686. // For LL-HLS, the server must add a new partial segment to the Playlist
  3687. // every part target duration.
  3688. return this.lastTargetDuration_;
  3689. }
  3690. /**
  3691. * @param {shaka.hls.HlsParser.PresentationType_} type
  3692. * @private
  3693. */
  3694. setPresentationType_(type) {
  3695. this.presentationType_ = type;
  3696. if (this.presentationTimeline_) {
  3697. this.presentationTimeline_.setStatic(!this.isLive_());
  3698. }
  3699. // If this manifest is not for live content, then we have no reason to
  3700. // update it.
  3701. if (!this.isLive_()) {
  3702. this.updatePlaylistTimer_.stop();
  3703. }
  3704. }
  3705. /**
  3706. * Create a networking request. This will manage the request using the
  3707. * parser's operation manager. If the parser has already been stopped, the
  3708. * request will not be made.
  3709. *
  3710. * @param {shaka.extern.Request} request
  3711. * @param {shaka.net.NetworkingEngine.RequestType} type
  3712. * @param {shaka.extern.RequestContext=} context
  3713. * @return {!Promise.<shaka.extern.Response>}
  3714. * @private
  3715. */
  3716. makeNetworkRequest_(request, type, context) {
  3717. if (!this.operationManager_) {
  3718. throw new shaka.util.Error(
  3719. shaka.util.Error.Severity.CRITICAL,
  3720. shaka.util.Error.Category.PLAYER,
  3721. shaka.util.Error.Code.OPERATION_ABORTED);
  3722. }
  3723. const op = this.playerInterface_.networkingEngine.request(
  3724. type, request, context);
  3725. this.operationManager_.manage(op);
  3726. return op.promise;
  3727. }
  3728. /**
  3729. * @param {string} method
  3730. * @return {boolean}
  3731. * @private
  3732. */
  3733. isAesMethod_(method) {
  3734. return method == 'AES-128' ||
  3735. method == 'AES-256' ||
  3736. method == 'AES-256-CTR';
  3737. }
  3738. /**
  3739. * @param {!shaka.hls.Tag} drmTag
  3740. * @param {string} mimeType
  3741. * @return {?shaka.extern.DrmInfo}
  3742. * @private
  3743. */
  3744. static fairplayDrmParser_(drmTag, mimeType) {
  3745. if (mimeType == 'video/mp2t') {
  3746. throw new shaka.util.Error(
  3747. shaka.util.Error.Severity.CRITICAL,
  3748. shaka.util.Error.Category.MANIFEST,
  3749. shaka.util.Error.Code.HLS_MSE_ENCRYPTED_MP2T_NOT_SUPPORTED);
  3750. }
  3751. if (shaka.util.Platform.isMediaKeysPolyfilled()) {
  3752. throw new shaka.util.Error(
  3753. shaka.util.Error.Severity.CRITICAL,
  3754. shaka.util.Error.Category.MANIFEST,
  3755. shaka.util.Error.Code
  3756. .HLS_MSE_ENCRYPTED_LEGACY_APPLE_MEDIA_KEYS_NOT_SUPPORTED);
  3757. }
  3758. const method = drmTag.getRequiredAttrValue('METHOD');
  3759. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  3760. if (!VALID_METHODS.includes(method)) {
  3761. shaka.log.error('FairPlay in HLS is only supported with [',
  3762. VALID_METHODS.join(', '), '], not', method);
  3763. return null;
  3764. }
  3765. let encryptionScheme = 'cenc';
  3766. if (method == 'SAMPLE-AES') {
  3767. // It should be 'cbcs-1-9' but Safari doesn't support it.
  3768. // See: https://github.com/WebKit/WebKit/blob/main/Source/WebCore/Modules/encryptedmedia/MediaKeyEncryptionScheme.idl
  3769. encryptionScheme = 'cbcs';
  3770. }
  3771. /*
  3772. * Even if we're not able to construct initData through the HLS tag, adding
  3773. * a DRMInfo will allow DRM Engine to request a media key system access
  3774. * with the correct keySystem and initDataType
  3775. */
  3776. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  3777. 'com.apple.fps', encryptionScheme, [
  3778. {initDataType: 'sinf', initData: new Uint8Array(0), keyId: null},
  3779. ]);
  3780. return drmInfo;
  3781. }
  3782. /**
  3783. * @param {!shaka.hls.Tag} drmTag
  3784. * @return {?shaka.extern.DrmInfo}
  3785. * @private
  3786. */
  3787. static widevineDrmParser_(drmTag) {
  3788. const method = drmTag.getRequiredAttrValue('METHOD');
  3789. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  3790. if (!VALID_METHODS.includes(method)) {
  3791. shaka.log.error('Widevine in HLS is only supported with [',
  3792. VALID_METHODS.join(', '), '], not', method);
  3793. return null;
  3794. }
  3795. let encryptionScheme = 'cenc';
  3796. if (method == 'SAMPLE-AES') {
  3797. encryptionScheme = 'cbcs';
  3798. }
  3799. const uri = drmTag.getRequiredAttrValue('URI');
  3800. const parsedData = shaka.net.DataUriPlugin.parseRaw(uri.split('?')[0]);
  3801. // The data encoded in the URI is a PSSH box to be used as init data.
  3802. const pssh = shaka.util.BufferUtils.toUint8(parsedData.data);
  3803. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  3804. 'com.widevine.alpha', encryptionScheme, [
  3805. {initDataType: 'cenc', initData: pssh},
  3806. ]);
  3807. const keyId = drmTag.getAttributeValue('KEYID');
  3808. if (keyId) {
  3809. const keyIdLowerCase = keyId.toLowerCase();
  3810. // This value should begin with '0x':
  3811. goog.asserts.assert(
  3812. keyIdLowerCase.startsWith('0x'), 'Incorrect KEYID format!');
  3813. // But the output should not contain the '0x':
  3814. drmInfo.keyIds = new Set([keyIdLowerCase.substr(2)]);
  3815. }
  3816. return drmInfo;
  3817. }
  3818. /**
  3819. * See: https://docs.microsoft.com/en-us/playready/packaging/mp4-based-formats-supported-by-playready-clients?tabs=case4
  3820. *
  3821. * @param {!shaka.hls.Tag} drmTag
  3822. * @return {?shaka.extern.DrmInfo}
  3823. * @private
  3824. */
  3825. static playreadyDrmParser_(drmTag) {
  3826. const method = drmTag.getRequiredAttrValue('METHOD');
  3827. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  3828. if (!VALID_METHODS.includes(method)) {
  3829. shaka.log.error('PlayReady in HLS is only supported with [',
  3830. VALID_METHODS.join(', '), '], not', method);
  3831. return null;
  3832. }
  3833. let encryptionScheme = 'cenc';
  3834. if (method == 'SAMPLE-AES') {
  3835. encryptionScheme = 'cbcs';
  3836. }
  3837. const uri = drmTag.getRequiredAttrValue('URI');
  3838. const parsedData = shaka.net.DataUriPlugin.parseRaw(uri.split('?')[0]);
  3839. // The data encoded in the URI is a PlayReady Pro Object, so we need
  3840. // convert it to pssh.
  3841. const data = shaka.util.BufferUtils.toUint8(parsedData.data);
  3842. const systemId = new Uint8Array([
  3843. 0x9a, 0x04, 0xf0, 0x79, 0x98, 0x40, 0x42, 0x86,
  3844. 0xab, 0x92, 0xe6, 0x5b, 0xe0, 0x88, 0x5f, 0x95,
  3845. ]);
  3846. const keyIds = new Set();
  3847. const psshVersion = 0;
  3848. const pssh =
  3849. shaka.util.Pssh.createPssh(data, systemId, keyIds, psshVersion);
  3850. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  3851. 'com.microsoft.playready', encryptionScheme, [
  3852. {initDataType: 'cenc', initData: pssh},
  3853. ]);
  3854. return drmInfo;
  3855. }
  3856. /**
  3857. * See: https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-5.1
  3858. *
  3859. * @param {!shaka.hls.Tag} drmTag
  3860. * @param {string} mimeType
  3861. * @param {function():!Array.<string>} getUris
  3862. * @param {?shaka.media.InitSegmentReference} initSegmentRef
  3863. * @param {?Map.<string, string>=} variables
  3864. * @return {!Promise.<?shaka.extern.DrmInfo>}
  3865. * @private
  3866. */
  3867. async identityDrmParser_(drmTag, mimeType, getUris, initSegmentRef,
  3868. variables) {
  3869. if (mimeType == 'video/mp2t') {
  3870. throw new shaka.util.Error(
  3871. shaka.util.Error.Severity.CRITICAL,
  3872. shaka.util.Error.Category.MANIFEST,
  3873. shaka.util.Error.Code.HLS_MSE_ENCRYPTED_MP2T_NOT_SUPPORTED);
  3874. }
  3875. if (shaka.util.Platform.isMediaKeysPolyfilled()) {
  3876. throw new shaka.util.Error(
  3877. shaka.util.Error.Severity.CRITICAL,
  3878. shaka.util.Error.Category.MANIFEST,
  3879. shaka.util.Error.Code
  3880. .HLS_MSE_ENCRYPTED_LEGACY_APPLE_MEDIA_KEYS_NOT_SUPPORTED);
  3881. }
  3882. const method = drmTag.getRequiredAttrValue('METHOD');
  3883. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  3884. if (!VALID_METHODS.includes(method)) {
  3885. shaka.log.error('Identity (ClearKey) in HLS is only supported with [',
  3886. VALID_METHODS.join(', '), '], not', method);
  3887. return null;
  3888. }
  3889. const keyUris = shaka.hls.Utils.constructSegmentUris(
  3890. getUris(), drmTag.getRequiredAttrValue('URI'), variables);
  3891. let key;
  3892. if (keyUris[0].startsWith('data:text/plain;base64,')) {
  3893. key = shaka.util.Uint8ArrayUtils.toHex(
  3894. shaka.util.Uint8ArrayUtils.fromBase64(
  3895. keyUris[0].split('data:text/plain;base64,').pop()));
  3896. } else {
  3897. const keyMapKey = keyUris.sort().join('');
  3898. if (!this.identityKeyMap_.has(keyMapKey)) {
  3899. const requestType = shaka.net.NetworkingEngine.RequestType.KEY;
  3900. const request = shaka.net.NetworkingEngine.makeRequest(
  3901. keyUris, this.config_.retryParameters);
  3902. const keyResponse = this.makeNetworkRequest_(request, requestType);
  3903. this.identityKeyMap_.set(keyMapKey, keyResponse);
  3904. }
  3905. const keyResponse = await this.identityKeyMap_.get(keyMapKey);
  3906. key = shaka.util.Uint8ArrayUtils.toHex(keyResponse.data);
  3907. }
  3908. // NOTE: The ClearKey CDM requires a key-id to key mapping. HLS doesn't
  3909. // provide a key ID anywhere. So although we could use the 'URI' attribute
  3910. // to fetch the actual 16-byte key, without a key ID, we can't provide this
  3911. // automatically to the ClearKey CDM. By default we assume that keyId is 0,
  3912. // but we will try to get key ID from Init Segment.
  3913. // If the application want override this behavior will have to use
  3914. // player.configure('drm.clearKeys', { ... }) to provide the key IDs
  3915. // and keys or player.configure('drm.servers.org\.w3\.clearkey', ...) to
  3916. // provide a ClearKey license server URI.
  3917. let keyId = '00000000000000000000000000000000';
  3918. if (initSegmentRef) {
  3919. let defaultKID;
  3920. if (this.identityKidMap_.has(initSegmentRef)) {
  3921. defaultKID = this.identityKidMap_.get(initSegmentRef);
  3922. } else {
  3923. const initSegmentRequest = shaka.util.Networking.createSegmentRequest(
  3924. initSegmentRef.getUris(),
  3925. initSegmentRef.getStartByte(),
  3926. initSegmentRef.getEndByte(),
  3927. this.config_.retryParameters);
  3928. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  3929. const initType =
  3930. shaka.net.NetworkingEngine.AdvancedRequestType.INIT_SEGMENT;
  3931. const initResponse = await this.makeNetworkRequest_(
  3932. initSegmentRequest, requestType, {type: initType});
  3933. defaultKID = shaka.media.SegmentUtils.getDefaultKID(
  3934. initResponse.data);
  3935. this.identityKidMap_.set(initSegmentRef, defaultKID);
  3936. }
  3937. if (defaultKID) {
  3938. keyId = defaultKID;
  3939. }
  3940. }
  3941. const clearkeys = new Map();
  3942. clearkeys.set(keyId, key);
  3943. let encryptionScheme = 'cenc';
  3944. if (method == 'SAMPLE-AES') {
  3945. encryptionScheme = 'cbcs';
  3946. }
  3947. return shaka.util.ManifestParserUtils.createDrmInfoFromClearKeys(
  3948. clearkeys, encryptionScheme);
  3949. }
  3950. };
  3951. /**
  3952. * @typedef {{
  3953. * stream: !shaka.extern.Stream,
  3954. * type: string,
  3955. * redirectUris: !Array.<string>,
  3956. * getUris: function():!Array.<string>,
  3957. * minTimestamp: number,
  3958. * maxTimestamp: number,
  3959. * mediaSequenceToStartTime: !Map.<number, number>,
  3960. * canSkipSegments: boolean,
  3961. * canBlockReload: boolean,
  3962. * hasEndList: boolean,
  3963. * firstSequenceNumber: number,
  3964. * nextMediaSequence: number,
  3965. * nextPart: number,
  3966. * loadedOnce: boolean
  3967. * }}
  3968. *
  3969. * @description
  3970. * Contains a stream and information about it.
  3971. *
  3972. * @property {!shaka.extern.Stream} stream
  3973. * The Stream itself.
  3974. * @property {string} type
  3975. * The type value. Could be 'video', 'audio', 'text', or 'image'.
  3976. * @property {!Array.<string>} redirectUris
  3977. * The redirect URIs.
  3978. * @property {function():!Array.<string>} getUris
  3979. * The verbatim media playlist URIs, as it appeared in the master playlist.
  3980. * @property {number} minTimestamp
  3981. * The minimum timestamp found in the stream.
  3982. * @property {number} maxTimestamp
  3983. * The maximum timestamp found in the stream.
  3984. * @property {!Map.<number, number>} mediaSequenceToStartTime
  3985. * A map of media sequence numbers to media start times.
  3986. * Only used for VOD content.
  3987. * @property {boolean} canSkipSegments
  3988. * True if the server supports delta playlist updates, and we can send a
  3989. * request for a playlist that can skip older media segments.
  3990. * @property {boolean} canBlockReload
  3991. * True if the server supports blocking playlist reload, and we can send a
  3992. * request for a playlist that can block reload until some segments are
  3993. * present.
  3994. * @property {boolean} hasEndList
  3995. * True if the stream has an EXT-X-ENDLIST tag.
  3996. * @property {number} firstSequenceNumber
  3997. * The sequence number of the first reference. Only calculated if needed.
  3998. * @property {number} nextMediaSequence
  3999. * The next media sequence.
  4000. * @property {number} nextPart
  4001. * The next part.
  4002. * @property {boolean} loadedOnce
  4003. * True if the stream has been loaded at least once.
  4004. */
  4005. shaka.hls.HlsParser.StreamInfo;
  4006. /**
  4007. * @typedef {{
  4008. * audio: !Array.<shaka.hls.HlsParser.StreamInfo>,
  4009. * video: !Array.<shaka.hls.HlsParser.StreamInfo>
  4010. * }}
  4011. *
  4012. * @description Audio and video stream infos.
  4013. * @property {!Array.<shaka.hls.HlsParser.StreamInfo>} audio
  4014. * @property {!Array.<shaka.hls.HlsParser.StreamInfo>} video
  4015. */
  4016. shaka.hls.HlsParser.StreamInfos;
  4017. /**
  4018. * @const {!Object.<string, string>}
  4019. * @private
  4020. */
  4021. shaka.hls.HlsParser.RAW_FORMATS_TO_MIME_TYPES_ = {
  4022. 'aac': 'audio/aac',
  4023. 'ac3': 'audio/ac3',
  4024. 'ec3': 'audio/ec3',
  4025. 'mp3': 'audio/mpeg',
  4026. };
  4027. /**
  4028. * @const {!Object.<string, string>}
  4029. * @private
  4030. */
  4031. shaka.hls.HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_ = {
  4032. 'mp4': 'audio/mp4',
  4033. 'mp4a': 'audio/mp4',
  4034. 'm4s': 'audio/mp4',
  4035. 'm4i': 'audio/mp4',
  4036. 'm4a': 'audio/mp4',
  4037. 'm4f': 'audio/mp4',
  4038. 'cmfa': 'audio/mp4',
  4039. // MPEG2-TS also uses video/ for audio: https://bit.ly/TsMse
  4040. 'ts': 'video/mp2t',
  4041. 'tsa': 'video/mp2t',
  4042. };
  4043. /**
  4044. * @const {!Object.<string, string>}
  4045. * @private
  4046. */
  4047. shaka.hls.HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_ = {
  4048. 'mp4': 'video/mp4',
  4049. 'mp4v': 'video/mp4',
  4050. 'm4s': 'video/mp4',
  4051. 'm4i': 'video/mp4',
  4052. 'm4v': 'video/mp4',
  4053. 'm4f': 'video/mp4',
  4054. 'cmfv': 'video/mp4',
  4055. 'ts': 'video/mp2t',
  4056. 'tsv': 'video/mp2t',
  4057. };
  4058. /**
  4059. * @const {!Object.<string, string>}
  4060. * @private
  4061. */
  4062. shaka.hls.HlsParser.TEXT_EXTENSIONS_TO_MIME_TYPES_ = {
  4063. 'mp4': 'application/mp4',
  4064. 'm4s': 'application/mp4',
  4065. 'm4i': 'application/mp4',
  4066. 'm4f': 'application/mp4',
  4067. 'cmft': 'application/mp4',
  4068. 'vtt': 'text/vtt',
  4069. 'webvtt': 'text/vtt',
  4070. 'ttml': 'application/ttml+xml',
  4071. };
  4072. /**
  4073. * @const {!Object.<string, string>}
  4074. * @private
  4075. */
  4076. shaka.hls.HlsParser.IMAGE_EXTENSIONS_TO_MIME_TYPES_ = {
  4077. 'jpg': 'image/jpeg',
  4078. 'png': 'image/png',
  4079. 'svg': 'image/svg+xml',
  4080. 'webp': 'image/webp',
  4081. 'avif': 'image/avif',
  4082. };
  4083. /**
  4084. * @const {!Object.<string, !Object.<string, string>>}
  4085. * @private
  4086. */
  4087. shaka.hls.HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_ = {
  4088. 'audio': shaka.hls.HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_,
  4089. 'video': shaka.hls.HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_,
  4090. 'text': shaka.hls.HlsParser.TEXT_EXTENSIONS_TO_MIME_TYPES_,
  4091. 'image': shaka.hls.HlsParser.IMAGE_EXTENSIONS_TO_MIME_TYPES_,
  4092. };
  4093. /**
  4094. * @typedef {function(!shaka.hls.Tag, string):?shaka.extern.DrmInfo}
  4095. * @private
  4096. */
  4097. shaka.hls.HlsParser.DrmParser_;
  4098. /**
  4099. * @const {!Object.<string, shaka.hls.HlsParser.DrmParser_>}
  4100. * @private
  4101. */
  4102. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_ = {
  4103. 'com.apple.streamingkeydelivery':
  4104. shaka.hls.HlsParser.fairplayDrmParser_,
  4105. 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed':
  4106. shaka.hls.HlsParser.widevineDrmParser_,
  4107. 'com.microsoft.playready':
  4108. shaka.hls.HlsParser.playreadyDrmParser_,
  4109. };
  4110. /**
  4111. * @enum {string}
  4112. * @private
  4113. */
  4114. shaka.hls.HlsParser.PresentationType_ = {
  4115. VOD: 'VOD',
  4116. EVENT: 'EVENT',
  4117. LIVE: 'LIVE',
  4118. };
  4119. shaka.media.ManifestParser.registerParserByMime(
  4120. 'application/x-mpegurl', () => new shaka.hls.HlsParser());
  4121. shaka.media.ManifestParser.registerParserByMime(
  4122. 'application/vnd.apple.mpegurl', () => new shaka.hls.HlsParser());