Source: lib/media/media_source_engine.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.media.MediaSourceEngine');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.log');
  9. goog.require('shaka.config.CodecSwitchingStrategy');
  10. goog.require('shaka.device.DeviceFactory');
  11. goog.require('shaka.device.IDevice');
  12. goog.require('shaka.media.Capabilities');
  13. goog.require('shaka.media.ContentWorkarounds');
  14. goog.require('shaka.media.ClosedCaptionParser');
  15. goog.require('shaka.media.IClosedCaptionParser');
  16. goog.require('shaka.media.ManifestParser');
  17. goog.require('shaka.media.SegmentReference');
  18. goog.require('shaka.media.TimeRangesUtils');
  19. goog.require('shaka.text.TextEngine');
  20. goog.require('shaka.transmuxer.TransmuxerEngine');
  21. goog.require('shaka.util.BufferUtils');
  22. goog.require('shaka.util.Destroyer');
  23. goog.require('shaka.util.Dom');
  24. goog.require('shaka.util.Error');
  25. goog.require('shaka.util.EventManager');
  26. goog.require('shaka.util.FakeEvent');
  27. goog.require('shaka.util.Functional');
  28. goog.require('shaka.util.IDestroyable');
  29. goog.require('shaka.util.Id3Utils');
  30. goog.require('shaka.util.ManifestParserUtils');
  31. goog.require('shaka.util.MimeUtils');
  32. goog.require('shaka.util.Mp4BoxParsers');
  33. goog.require('shaka.util.Mp4Parser');
  34. goog.require('shaka.util.PublicPromise');
  35. goog.require('shaka.util.StreamUtils');
  36. goog.require('shaka.util.TimeUtils');
  37. goog.require('shaka.util.TsParser');
  38. goog.require('shaka.lcevc.Dec');
  39. /**
  40. * @summary
  41. * MediaSourceEngine wraps all operations on MediaSource and SourceBuffers.
  42. * All asynchronous operations return a Promise, and all operations are
  43. * internally synchronized and serialized as needed. Operations that can
  44. * be done in parallel will be done in parallel.
  45. *
  46. * @implements {shaka.util.IDestroyable}
  47. */
  48. shaka.media.MediaSourceEngine = class {
  49. /**
  50. * @param {HTMLMediaElement} video The video element, whose source is tied to
  51. * MediaSource during the lifetime of the MediaSourceEngine.
  52. * @param {!shaka.extern.TextDisplayer} textDisplayer
  53. * The text displayer that will be used with the text engine.
  54. * MediaSourceEngine takes ownership of the displayer. When
  55. * MediaSourceEngine is destroyed, it will destroy the displayer.
  56. * @param {!shaka.media.MediaSourceEngine.PlayerInterface} playerInterface
  57. * Interface for common player methods.
  58. * @param {shaka.extern.MediaSourceConfiguration} config
  59. * @param {?shaka.lcevc.Dec} [lcevcDec] Optional - LCEVC Decoder Object
  60. */
  61. constructor(video, textDisplayer, playerInterface, config, lcevcDec) {
  62. /** @private {HTMLMediaElement} */
  63. this.video_ = video;
  64. /** @private {?shaka.media.MediaSourceEngine.PlayerInterface} */
  65. this.playerInterface_ = playerInterface;
  66. /** @private {?shaka.extern.MediaSourceConfiguration} */
  67. this.config_ = config;
  68. /** @private {shaka.extern.TextDisplayer} */
  69. this.textDisplayer_ = textDisplayer;
  70. /**
  71. * @private {!Map<shaka.util.ManifestParserUtils.ContentType, SourceBuffer>}
  72. */
  73. this.sourceBuffers_ = new Map();
  74. /**
  75. * @private {!Map<shaka.util.ManifestParserUtils.ContentType, string>}
  76. */
  77. this.sourceBufferTypes_ = new Map();
  78. /**
  79. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  80. * boolean>}
  81. */
  82. this.expectedEncryption_ = new Map();
  83. /** @private {shaka.text.TextEngine} */
  84. this.textEngine_ = null;
  85. /** @private {boolean} */
  86. this.segmentRelativeVttTiming_ = false;
  87. /** @private {?shaka.lcevc.Dec} */
  88. this.lcevcDec_ = lcevcDec || null;
  89. /**
  90. * @private {!Map<string, !Array<shaka.media.MediaSourceEngine.Operation>>}
  91. */
  92. this.queues_ = new Map();
  93. /** @private {shaka.util.EventManager} */
  94. this.eventManager_ = new shaka.util.EventManager();
  95. /**
  96. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  97. !shaka.extern.Transmuxer>} */
  98. this.transmuxers_ = new Map();
  99. /** @private {?shaka.media.IClosedCaptionParser} */
  100. this.captionParser_ = null;
  101. /** @private {!shaka.util.PublicPromise} */
  102. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  103. /** @private {string} */
  104. this.url_ = '';
  105. /** @private {boolean} */
  106. this.playbackHasBegun_ = false;
  107. /** @private {boolean} */
  108. this.streamingAllowed_ = true;
  109. /** @private {boolean} */
  110. this.usingRemotePlayback_ = false;
  111. /** @private {HTMLSourceElement} */
  112. this.source_ = null;
  113. /**
  114. * Fallback source element with direct media URI, used for casting
  115. * purposes.
  116. * @private {HTMLSourceElement}
  117. */
  118. this.secondarySource_ = null;
  119. /** @private {MediaSource} */
  120. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  121. /** @private {boolean} */
  122. this.reloadingMediaSource_ = false;
  123. /** @private {boolean} */
  124. this.playAfterReset_ = false;
  125. /** @type {!shaka.util.Destroyer} */
  126. this.destroyer_ = new shaka.util.Destroyer(() => this.doDestroy_());
  127. /** @private {boolean} */
  128. this.sequenceMode_ = false;
  129. /** @private {string} */
  130. this.manifestType_ = shaka.media.ManifestParser.UNKNOWN;
  131. /** @private {boolean} */
  132. this.ignoreManifestTimestampsInSegmentsMode_ = false;
  133. /** @private {boolean} */
  134. this.attemptTimestampOffsetCalculation_ = false;
  135. /** @private {!shaka.util.PublicPromise<number>} */
  136. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  137. /** @private {boolean} */
  138. this.needSplitMuxedContent_ = false;
  139. /** @private {?number} */
  140. this.lastDuration_ = null;
  141. /**
  142. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  143. * !shaka.util.TsParser>}
  144. */
  145. this.tsParsers_ = new Map();
  146. /** @private {?number} */
  147. this.firstVideoTimestamp_ = null;
  148. /** @private {?number} */
  149. this.firstVideoReferenceStartTime_ = null;
  150. /** @private {?number} */
  151. this.firstAudioTimestamp_ = null;
  152. /** @private {?number} */
  153. this.firstAudioReferenceStartTime_ = null;
  154. /** @private {!shaka.util.PublicPromise<number>} */
  155. this.audioCompensation_ = new shaka.util.PublicPromise();
  156. if (this.video_.remote) {
  157. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  158. this.eventManager_.listen(this.video_.remote, 'connect', () => {
  159. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  160. });
  161. this.eventManager_.listen(this.video_.remote, 'connecting', () => {
  162. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  163. });
  164. this.eventManager_.listen(this.video_.remote, 'disconnect', () => {
  165. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  166. });
  167. }
  168. }
  169. /**
  170. * Create a MediaSource object, attach it to the video element, and return it.
  171. * Resolves the given promise when the MediaSource is ready.
  172. *
  173. * Replaced by unit tests.
  174. *
  175. * @param {!shaka.util.PublicPromise} p
  176. * @return {!MediaSource}
  177. */
  178. createMediaSource(p) {
  179. this.streamingAllowed_ = true;
  180. /** @type {!MediaSource} */
  181. let mediaSource;
  182. if (window.ManagedMediaSource) {
  183. if (!this.secondarySource_) {
  184. this.video_.disableRemotePlayback = true;
  185. }
  186. mediaSource = new ManagedMediaSource();
  187. this.eventManager_.listen(
  188. mediaSource, 'startstreaming', () => {
  189. shaka.log.info('MMS startstreaming');
  190. this.streamingAllowed_ = true;
  191. });
  192. this.eventManager_.listen(
  193. mediaSource, 'endstreaming', () => {
  194. shaka.log.info('MMS endstreaming');
  195. this.streamingAllowed_ = false;
  196. });
  197. } else {
  198. mediaSource = new MediaSource();
  199. }
  200. // Set up MediaSource on the video element.
  201. this.eventManager_.listenOnce(
  202. mediaSource, 'sourceopen', () => this.onSourceOpen_(p));
  203. // Correctly set when playback has begun.
  204. this.eventManager_.listenOnce(this.video_, 'playing', () => {
  205. this.playbackHasBegun_ = true;
  206. });
  207. // Store the object URL for releasing it later.
  208. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource);
  209. if (this.config_.useSourceElements) {
  210. this.video_.removeAttribute('src');
  211. if (this.source_) {
  212. this.video_.removeChild(this.source_);
  213. }
  214. if (this.secondarySource_) {
  215. this.video_.removeChild(this.secondarySource_);
  216. }
  217. this.source_ = shaka.util.Dom.createSourceElement(this.url_);
  218. this.video_.appendChild(this.source_);
  219. if (this.secondarySource_) {
  220. this.video_.appendChild(this.secondarySource_);
  221. }
  222. this.video_.load();
  223. } else {
  224. this.video_.src = this.url_;
  225. }
  226. return mediaSource;
  227. }
  228. /**
  229. * @param {string} uri
  230. * @param {string} mimeType
  231. */
  232. addSecondarySource(uri, mimeType) {
  233. if (!this.video_ || !window.ManagedMediaSource || !this.mediaSource_) {
  234. shaka.log.warning(
  235. 'Secondary source is used only with ManagedMediaSource');
  236. return;
  237. }
  238. if (!this.config_.useSourceElements) {
  239. return;
  240. }
  241. if (this.secondarySource_) {
  242. this.video_.removeChild(this.secondarySource_);
  243. }
  244. this.secondarySource_ = shaka.util.Dom.createSourceElement(uri, mimeType);
  245. this.video_.appendChild(this.secondarySource_);
  246. this.video_.disableRemotePlayback = false;
  247. }
  248. /**
  249. * @param {shaka.util.PublicPromise} p
  250. * @private
  251. */
  252. onSourceOpen_(p) {
  253. goog.asserts.assert(this.url_, 'Must have object URL');
  254. // Release the object URL that was previously created, to prevent memory
  255. // leak.
  256. // createObjectURL creates a strong reference to the MediaSource object
  257. // inside the browser. Setting the src of the video then creates another
  258. // reference within the video element. revokeObjectURL will remove the
  259. // strong reference to the MediaSource object, and allow it to be
  260. // garbage-collected later.
  261. URL.revokeObjectURL(this.url_);
  262. p.resolve();
  263. }
  264. /**
  265. * Returns a map of MediaSource support for well-known types.
  266. *
  267. * @return {!Object<string, boolean>}
  268. */
  269. static probeSupport() {
  270. const testMimeTypes = [
  271. // MP4 types
  272. 'video/mp4; codecs="avc1.42E01E"',
  273. 'video/mp4; codecs="avc3.42E01E"',
  274. 'video/mp4; codecs="hev1.1.6.L93.90"',
  275. 'video/mp4; codecs="hvc1.1.6.L93.90"',
  276. 'video/mp4; codecs="hev1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  277. 'video/mp4; codecs="hvc1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  278. 'video/mp4; codecs="vp9"',
  279. 'video/mp4; codecs="vp09.00.10.08"',
  280. 'video/mp4; codecs="av01.0.01M.08"',
  281. 'video/mp4; codecs="dvh1.05.01"',
  282. 'video/mp4; codecs="dvh1.20.01"',
  283. 'audio/mp4; codecs="mp4a.40.2"',
  284. 'audio/mp4; codecs="ac-3"',
  285. 'audio/mp4; codecs="ec-3"',
  286. 'audio/mp4; codecs="ac-4.02.01.01"',
  287. 'audio/mp4; codecs="opus"',
  288. 'audio/mp4; codecs="flac"',
  289. 'audio/mp4; codecs="dtsc"', // DTS Digital Surround
  290. 'audio/mp4; codecs="dtse"', // DTS Express
  291. 'audio/mp4; codecs="dtsx"', // DTS:X
  292. 'audio/mp4; codecs="apac.31.00"',
  293. // WebM types
  294. 'video/webm; codecs="vp8"',
  295. 'video/webm; codecs="vp9"',
  296. 'video/webm; codecs="vp09.00.10.08"',
  297. 'audio/webm; codecs="vorbis"',
  298. 'audio/webm; codecs="opus"',
  299. // MPEG2 TS types (video/ is also used for audio: https://bit.ly/TsMse)
  300. 'video/mp2t; codecs="avc1.42E01E"',
  301. 'video/mp2t; codecs="avc3.42E01E"',
  302. 'video/mp2t; codecs="hvc1.1.6.L93.90"',
  303. 'video/mp2t; codecs="mp4a.40.2"',
  304. 'video/mp2t; codecs="ac-3"',
  305. 'video/mp2t; codecs="ec-3"',
  306. // WebVTT types
  307. 'text/vtt',
  308. 'application/mp4; codecs="wvtt"',
  309. // TTML types
  310. 'application/ttml+xml',
  311. 'application/mp4; codecs="stpp"',
  312. // Containerless types
  313. ...shaka.util.MimeUtils.RAW_FORMATS,
  314. ];
  315. const support = {};
  316. const device = shaka.device.DeviceFactory.getDevice();
  317. for (const type of testMimeTypes) {
  318. if (shaka.text.TextEngine.isTypeSupported(type)) {
  319. support[type] = true;
  320. } else if (device.supportsMediaSource()) {
  321. support[type] = shaka.media.Capabilities.isTypeSupported(type) ||
  322. shaka.transmuxer.TransmuxerEngine.isSupported(type);
  323. } else {
  324. support[type] = device.supportsMediaType(type);
  325. }
  326. const basicType = type.split(';')[0];
  327. support[basicType] = support[basicType] || support[type];
  328. }
  329. return support;
  330. }
  331. /** @override */
  332. destroy() {
  333. return this.destroyer_.destroy();
  334. }
  335. /** @private */
  336. async doDestroy_() {
  337. const Functional = shaka.util.Functional;
  338. const cleanup = [];
  339. for (const [key, q] of this.queues_) {
  340. // Make a local copy of the queue and the first item.
  341. const inProgress = q[0];
  342. const contentType = /** @type {string} */(key);
  343. // Drop everything else out of the original queue.
  344. this.queues_.set(contentType, q.slice(0, 1));
  345. // We will wait for this item to complete/fail.
  346. if (inProgress) {
  347. cleanup.push(inProgress.p.catch(Functional.noop));
  348. }
  349. // The rest will be rejected silently if possible.
  350. for (const item of q.slice(1)) {
  351. item.p.reject(shaka.util.Destroyer.destroyedError());
  352. }
  353. }
  354. if (this.textEngine_) {
  355. cleanup.push(this.textEngine_.destroy());
  356. }
  357. await Promise.all(cleanup);
  358. for (const transmuxer of this.transmuxers_.values()) {
  359. transmuxer.destroy();
  360. }
  361. if (this.eventManager_) {
  362. this.eventManager_.release();
  363. this.eventManager_ = null;
  364. }
  365. if (this.video_ && this.secondarySource_) {
  366. this.video_.removeChild(this.secondarySource_);
  367. }
  368. if (this.video_ && this.source_) {
  369. // "unload" the video element.
  370. this.video_.removeChild(this.source_);
  371. this.video_.load();
  372. this.video_.disableRemotePlayback = false;
  373. }
  374. this.video_ = null;
  375. this.source_ = null;
  376. this.secondarySource_ = null;
  377. this.config_ = null;
  378. this.mediaSource_ = null;
  379. this.textEngine_ = null;
  380. this.textDisplayer_ = null;
  381. this.sourceBuffers_.clear();
  382. this.expectedEncryption_.clear();
  383. this.transmuxers_.clear();
  384. this.captionParser_ = null;
  385. if (goog.DEBUG) {
  386. for (const [contentType, q] of this.queues_) {
  387. goog.asserts.assert(
  388. q.length == 0,
  389. contentType + ' queue should be empty after destroy!');
  390. }
  391. }
  392. this.queues_.clear();
  393. // This object is owned by Player
  394. this.lcevcDec_ = null;
  395. this.tsParsers_.clear();
  396. this.playerInterface_ = null;
  397. }
  398. /**
  399. * @return {!Promise} Resolved when MediaSource is open and attached to the
  400. * media element. This process is actually initiated by the constructor.
  401. */
  402. open() {
  403. return this.mediaSourceOpen_;
  404. }
  405. /**
  406. * Initialize MediaSourceEngine.
  407. *
  408. * Note that it is not valid to call this multiple times, except to add or
  409. * reinitialize text streams.
  410. *
  411. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  412. * shaka.extern.Stream>} streamsByType
  413. * A map of content types to streams.
  414. * @param {boolean=} sequenceMode
  415. * If true, the media segments are appended to the SourceBuffer in strict
  416. * sequence.
  417. * @param {string=} manifestType
  418. * Indicates the type of the manifest.
  419. * @param {boolean=} ignoreManifestTimestampsInSegmentsMode
  420. * If true, don't adjust the timestamp offset to account for manifest
  421. * segment durations being out of sync with segment durations. In other
  422. * words, assume that there are no gaps in the segments when appending
  423. * to the SourceBuffer, even if the manifest and segment times disagree.
  424. * Indicates if the manifest has text streams.
  425. *
  426. * @return {!Promise}
  427. */
  428. async init(streamsByType, sequenceMode=false,
  429. manifestType=shaka.media.ManifestParser.UNKNOWN,
  430. ignoreManifestTimestampsInSegmentsMode=false) {
  431. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  432. await this.mediaSourceOpen_;
  433. if (this.ended() || this.closed()) {
  434. shaka.log.alwaysError('Expected MediaSource to be open during init(); ' +
  435. 'reopening the media source.');
  436. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  437. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  438. await this.mediaSourceOpen_;
  439. }
  440. this.sequenceMode_ = sequenceMode;
  441. this.manifestType_ = manifestType;
  442. this.ignoreManifestTimestampsInSegmentsMode_ =
  443. ignoreManifestTimestampsInSegmentsMode;
  444. this.attemptTimestampOffsetCalculation_ = !this.sequenceMode_ &&
  445. this.manifestType_ == shaka.media.ManifestParser.HLS &&
  446. !this.ignoreManifestTimestampsInSegmentsMode_;
  447. this.tsParsers_.clear();
  448. this.firstVideoTimestamp_ = null;
  449. this.firstVideoReferenceStartTime_ = null;
  450. this.firstAudioTimestamp_ = null;
  451. this.firstAudioReferenceStartTime_ = null;
  452. this.audioCompensation_ = new shaka.util.PublicPromise();
  453. for (const contentType of streamsByType.keys()) {
  454. const stream = streamsByType.get(contentType);
  455. this.initSourceBuffer_(contentType, stream, stream.codecs);
  456. if (this.needSplitMuxedContent_) {
  457. this.queues_.set(ContentType.AUDIO, []);
  458. this.queues_.set(ContentType.VIDEO, []);
  459. } else {
  460. this.queues_.set(contentType, []);
  461. }
  462. }
  463. const audio = streamsByType.get(ContentType.AUDIO);
  464. if (audio && audio.isAudioMuxedInVideo) {
  465. this.needSplitMuxedContent_ = true;
  466. }
  467. }
  468. /**
  469. * Initialize a specific SourceBuffer.
  470. *
  471. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  472. * @param {shaka.extern.Stream} stream
  473. * @param {string} codecs
  474. * @private
  475. */
  476. initSourceBuffer_(contentType, stream, codecs) {
  477. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  478. if (contentType == ContentType.AUDIO && codecs) {
  479. codecs = shaka.util.StreamUtils.getCorrectAudioCodecs(
  480. codecs, stream.mimeType);
  481. }
  482. let mimeType = shaka.util.MimeUtils.getFullType(
  483. stream.mimeType, codecs);
  484. if (contentType == ContentType.TEXT) {
  485. this.reinitText(mimeType, this.sequenceMode_, stream.external);
  486. } else {
  487. let needTransmux = this.config_.forceTransmux;
  488. if (!shaka.media.Capabilities.isTypeSupported(mimeType) ||
  489. (!this.sequenceMode_ &&
  490. shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType))) {
  491. needTransmux = true;
  492. }
  493. const mimeTypeWithAllCodecs =
  494. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  495. stream.mimeType, codecs);
  496. if (needTransmux) {
  497. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  498. ContentType.AUDIO, (codecs || '').split(','));
  499. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  500. ContentType.VIDEO, (codecs || '').split(','));
  501. if (audioCodec && videoCodec) {
  502. this.needSplitMuxedContent_ = true;
  503. this.initSourceBuffer_(ContentType.AUDIO, stream, audioCodec);
  504. this.initSourceBuffer_(ContentType.VIDEO, stream, videoCodec);
  505. return;
  506. }
  507. const transmuxerPlugin = shaka.transmuxer.TransmuxerEngine
  508. .findTransmuxer(mimeTypeWithAllCodecs);
  509. if (transmuxerPlugin) {
  510. const transmuxer = transmuxerPlugin();
  511. this.transmuxers_.set(contentType, transmuxer);
  512. mimeType =
  513. transmuxer.convertCodecs(contentType, mimeTypeWithAllCodecs);
  514. }
  515. }
  516. const type = this.addExtraFeaturesToMimeType_(mimeType);
  517. this.destroyer_.ensureNotDestroyed();
  518. let sourceBuffer;
  519. try {
  520. sourceBuffer = this.mediaSource_.addSourceBuffer(type);
  521. } catch (exception) {
  522. throw new shaka.util.Error(
  523. shaka.util.Error.Severity.CRITICAL,
  524. shaka.util.Error.Category.MEDIA,
  525. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  526. exception,
  527. 'The mediaSource_ status was ' + this.mediaSource_.readyState +
  528. ' expected \'open\'',
  529. null);
  530. }
  531. if (this.sequenceMode_) {
  532. sourceBuffer.mode =
  533. shaka.media.MediaSourceEngine.SourceBufferMode_.SEQUENCE;
  534. }
  535. this.eventManager_.listen(
  536. sourceBuffer, 'error',
  537. () => this.onError_(contentType));
  538. this.eventManager_.listen(
  539. sourceBuffer, 'updateend',
  540. () => this.onUpdateEnd_(contentType));
  541. this.sourceBuffers_.set(contentType, sourceBuffer);
  542. this.sourceBufferTypes_.set(contentType, mimeType);
  543. this.expectedEncryption_.set(contentType, !!stream.drmInfos.length);
  544. }
  545. }
  546. /**
  547. * Called by the Player to provide an updated configuration any time it
  548. * changes. Must be called at least once before init().
  549. *
  550. * @param {shaka.extern.MediaSourceConfiguration} config
  551. */
  552. configure(config) {
  553. this.config_ = config;
  554. if (this.textEngine_) {
  555. this.textEngine_.setModifyCueCallback(config.modifyCueCallback);
  556. }
  557. }
  558. /**
  559. * Indicate if the streaming is allowed by MediaSourceEngine.
  560. * If we using MediaSource we always returns true.
  561. *
  562. * @return {boolean}
  563. */
  564. isStreamingAllowed() {
  565. return this.streamingAllowed_ && !this.usingRemotePlayback_ &&
  566. !this.reloadingMediaSource_;
  567. }
  568. /**
  569. * Reinitialize the TextEngine for a new text type.
  570. * @param {string} mimeType
  571. * @param {boolean} sequenceMode
  572. * @param {boolean} external
  573. */
  574. reinitText(mimeType, sequenceMode, external) {
  575. if (!this.textEngine_) {
  576. this.textEngine_ = new shaka.text.TextEngine(this.textDisplayer_);
  577. if (this.textEngine_) {
  578. this.textEngine_.setModifyCueCallback(this.config_.modifyCueCallback);
  579. }
  580. }
  581. this.textEngine_.initParser(mimeType, sequenceMode,
  582. external || this.segmentRelativeVttTiming_, this.manifestType_);
  583. }
  584. /**
  585. * @return {boolean} True if the MediaSource is in an "ended" state, or if the
  586. * object has been destroyed.
  587. */
  588. ended() {
  589. if (this.reloadingMediaSource_) {
  590. return false;
  591. }
  592. return this.mediaSource_ ? this.mediaSource_.readyState == 'ended' : true;
  593. }
  594. /**
  595. * @return {boolean} True if the MediaSource is in an "closed" state, or if
  596. * the object has been destroyed.
  597. */
  598. closed() {
  599. if (this.reloadingMediaSource_) {
  600. return false;
  601. }
  602. return this.mediaSource_ ? this.mediaSource_.readyState == 'closed' : true;
  603. }
  604. /**
  605. * Gets the first timestamp in buffer for the given content type.
  606. *
  607. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  608. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  609. */
  610. bufferStart(contentType) {
  611. if (!this.sourceBuffers_.size) {
  612. return null;
  613. }
  614. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  615. if (contentType == ContentType.TEXT) {
  616. return this.textEngine_.bufferStart();
  617. }
  618. return shaka.media.TimeRangesUtils.bufferStart(
  619. this.getBuffered_(contentType));
  620. }
  621. /**
  622. * Gets the last timestamp in buffer for the given content type.
  623. *
  624. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  625. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  626. */
  627. bufferEnd(contentType) {
  628. if (!this.sourceBuffers_.size) {
  629. return null;
  630. }
  631. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  632. if (contentType == ContentType.TEXT) {
  633. return this.textEngine_.bufferEnd();
  634. }
  635. return shaka.media.TimeRangesUtils.bufferEnd(
  636. this.getBuffered_(contentType));
  637. }
  638. /**
  639. * Determines if the given time is inside the buffered range of the given
  640. * content type.
  641. *
  642. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  643. * @param {number} time Playhead time
  644. * @return {boolean}
  645. */
  646. isBuffered(contentType, time) {
  647. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  648. if (contentType == ContentType.TEXT) {
  649. return this.textEngine_.isBuffered(time);
  650. } else {
  651. const buffered = this.getBuffered_(contentType);
  652. return shaka.media.TimeRangesUtils.isBuffered(buffered, time);
  653. }
  654. }
  655. /**
  656. * Computes how far ahead of the given timestamp is buffered for the given
  657. * content type.
  658. *
  659. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  660. * @param {number} time
  661. * @return {number} The amount of time buffered ahead in seconds.
  662. */
  663. bufferedAheadOf(contentType, time) {
  664. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  665. if (contentType == ContentType.TEXT) {
  666. return this.textEngine_.bufferedAheadOf(time);
  667. } else {
  668. const buffered = this.getBuffered_(contentType);
  669. return shaka.media.TimeRangesUtils.bufferedAheadOf(buffered, time);
  670. }
  671. }
  672. /**
  673. * Returns info about what is currently buffered.
  674. * @return {shaka.extern.BufferedInfo}
  675. */
  676. getBufferedInfo() {
  677. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  678. const TimeRangesUtils = shaka.media.TimeRangesUtils;
  679. const info = {
  680. total: this.reloadingMediaSource_ ? [] :
  681. TimeRangesUtils.getBufferedInfo(this.video_.buffered),
  682. audio:
  683. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.AUDIO)),
  684. video:
  685. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.VIDEO)),
  686. text: [],
  687. };
  688. if (this.textEngine_) {
  689. const start = this.textEngine_.bufferStart();
  690. const end = this.textEngine_.bufferEnd();
  691. if (start != null && end != null) {
  692. info.text.push({start: start, end: end});
  693. }
  694. }
  695. return info;
  696. }
  697. /**
  698. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  699. * @return {TimeRanges} The buffered ranges for the given content type, or
  700. * null if the buffered ranges could not be obtained.
  701. * @private
  702. */
  703. getBuffered_(contentType) {
  704. if (this.reloadingMediaSource_ || this.usingRemotePlayback_) {
  705. return null;
  706. }
  707. try {
  708. return this.sourceBuffers_.get(contentType).buffered;
  709. } catch (exception) {
  710. if (this.sourceBuffers_.has(contentType)) {
  711. // Note: previous MediaSource errors may cause access to |buffered| to
  712. // throw.
  713. shaka.log.error('failed to get buffered range for ' + contentType,
  714. exception);
  715. }
  716. return null;
  717. }
  718. }
  719. /**
  720. * Create a new closed caption parser. This will ONLY be replaced by tests as
  721. * a way to inject fake closed caption parser instances.
  722. *
  723. * @param {string} mimeType
  724. * @return {!shaka.media.IClosedCaptionParser}
  725. */
  726. getCaptionParser(mimeType) {
  727. return new shaka.media.ClosedCaptionParser(mimeType);
  728. }
  729. /**
  730. * This method is only public for testing.
  731. *
  732. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  733. * @param {!BufferSource} data
  734. * @param {!shaka.media.SegmentReference} reference The segment reference
  735. * we are appending
  736. * @param {shaka.extern.Stream} stream
  737. * @param {!string} mimeType
  738. * @return {{timestamp: ?number, metadata: !Array<shaka.extern.ID3Metadata>}}
  739. */
  740. getTimestampAndDispatchMetadata(contentType, data, reference, stream,
  741. mimeType) {
  742. let timestamp = null;
  743. let metadata = [];
  744. const uint8ArrayData = shaka.util.BufferUtils.toUint8(data);
  745. if (shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType)) {
  746. const frames = shaka.util.Id3Utils.getID3Frames(uint8ArrayData);
  747. if (frames.length && reference) {
  748. const metadataTimestamp = frames.find((frame) => {
  749. return frame.description ===
  750. 'com.apple.streaming.transportStreamTimestamp';
  751. });
  752. if (metadataTimestamp && typeof metadataTimestamp.data == 'number') {
  753. timestamp = Math.round(metadataTimestamp.data) / 1000;
  754. }
  755. /** @private {shaka.extern.ID3Metadata} */
  756. const id3Metadata = {
  757. cueTime: reference.startTime,
  758. data: uint8ArrayData,
  759. frames: frames,
  760. dts: reference.startTime,
  761. pts: reference.startTime,
  762. };
  763. this.playerInterface_.onMetadata(
  764. [id3Metadata], /* offset= */ 0, reference.endTime);
  765. }
  766. } else if (mimeType.includes('/mp4') &&
  767. reference &&
  768. reference.initSegmentReference &&
  769. reference.initSegmentReference.timescale) {
  770. const timescale = reference.initSegmentReference.timescale;
  771. if (!isNaN(timescale)) {
  772. const hasEmsg = ((stream.emsgSchemeIdUris != null &&
  773. stream.emsgSchemeIdUris.length > 0) ||
  774. this.config_.dispatchAllEmsgBoxes);
  775. const Mp4Parser = shaka.util.Mp4Parser;
  776. let startTime = 0;
  777. let parsedMedia = false;
  778. const parser = new Mp4Parser();
  779. if (hasEmsg) {
  780. parser.fullBox('emsg', (box) =>
  781. this.parseEMSG_(reference, stream.emsgSchemeIdUris, box));
  782. }
  783. parser.fullBox('prft', (box) => this.parsePrft_(timescale, box))
  784. .box('moof', Mp4Parser.children)
  785. .box('traf', Mp4Parser.children)
  786. .fullBox('tfdt', (box) => {
  787. if (!parsedMedia) {
  788. goog.asserts.assert(
  789. box.version == 0 || box.version == 1,
  790. 'TFDT version can only be 0 or 1');
  791. const parsed = shaka.util.Mp4BoxParsers.parseTFDTInaccurate(
  792. box.reader, box.version);
  793. startTime = parsed.baseMediaDecodeTime / timescale;
  794. parsedMedia = true;
  795. if (!hasEmsg) {
  796. box.parser.stop();
  797. }
  798. }
  799. }).parse(data, /* partialOkay= */ true);
  800. if (parsedMedia && reference.timestampOffset == 0) {
  801. timestamp = startTime;
  802. }
  803. }
  804. } else if (!mimeType.includes('/mp4') && !mimeType.includes('/webm') &&
  805. shaka.util.TsParser.probe(uint8ArrayData)) {
  806. if (!this.tsParsers_.has(contentType)) {
  807. this.tsParsers_.set(contentType, new shaka.util.TsParser());
  808. }
  809. const tsParser = this.tsParsers_.get(contentType);
  810. tsParser.clearData();
  811. tsParser.setDiscontinuitySequence(reference.discontinuitySequence);
  812. tsParser.parse(uint8ArrayData);
  813. const startTime = tsParser.getStartTime(contentType);
  814. if (startTime != null) {
  815. timestamp = startTime;
  816. }
  817. metadata = tsParser.getMetadata();
  818. }
  819. return {timestamp, metadata};
  820. }
  821. /**
  822. * Parse the EMSG box from a MP4 container.
  823. *
  824. * @param {!shaka.media.SegmentReference} reference
  825. * @param {?Array<string>} emsgSchemeIdUris Array of emsg
  826. * scheme_id_uri for which emsg boxes should be parsed.
  827. * @param {!shaka.extern.ParsedBox} box
  828. * @private
  829. * https://dashif-documents.azurewebsites.net/Events/master/event.html#emsg-format
  830. * aligned(8) class DASHEventMessageBox
  831. * extends FullBox(‘emsg’, version, flags = 0){
  832. * if (version==0) {
  833. * string scheme_id_uri;
  834. * string value;
  835. * unsigned int(32) timescale;
  836. * unsigned int(32) presentation_time_delta;
  837. * unsigned int(32) event_duration;
  838. * unsigned int(32) id;
  839. * } else if (version==1) {
  840. * unsigned int(32) timescale;
  841. * unsigned int(64) presentation_time;
  842. * unsigned int(32) event_duration;
  843. * unsigned int(32) id;
  844. * string scheme_id_uri;
  845. * string value;
  846. * }
  847. * unsigned int(8) message_data[];
  848. */
  849. parseEMSG_(reference, emsgSchemeIdUris, box) {
  850. let timescale;
  851. let id;
  852. let eventDuration;
  853. let schemeId;
  854. let startTime;
  855. let presentationTimeDelta;
  856. let value;
  857. if (box.version === 0) {
  858. schemeId = box.reader.readTerminatedString();
  859. value = box.reader.readTerminatedString();
  860. timescale = box.reader.readUint32();
  861. presentationTimeDelta = box.reader.readUint32();
  862. eventDuration = box.reader.readUint32();
  863. id = box.reader.readUint32();
  864. startTime = reference.startTime + (presentationTimeDelta / timescale);
  865. } else {
  866. timescale = box.reader.readUint32();
  867. const pts = box.reader.readUint64();
  868. startTime = (pts / timescale) + reference.timestampOffset;
  869. presentationTimeDelta = startTime - reference.startTime;
  870. eventDuration = box.reader.readUint32();
  871. id = box.reader.readUint32();
  872. schemeId = box.reader.readTerminatedString();
  873. value = box.reader.readTerminatedString();
  874. }
  875. const messageData = box.reader.readBytes(
  876. box.reader.getLength() - box.reader.getPosition());
  877. // See DASH sec. 5.10.3.3.1
  878. // If a DASH client detects an event message box with a scheme that is not
  879. // defined in MPD, the client is expected to ignore it.
  880. if ((emsgSchemeIdUris && emsgSchemeIdUris.includes(schemeId)) ||
  881. this.config_.dispatchAllEmsgBoxes) {
  882. // See DASH sec. 5.10.4.1
  883. // A special scheme in DASH used to signal manifest updates.
  884. if (schemeId == 'urn:mpeg:dash:event:2012') {
  885. this.playerInterface_.onManifestUpdate();
  886. } else {
  887. // All other schemes are dispatched as a general 'emsg' event.
  888. const endTime = startTime + (eventDuration / timescale);
  889. /** @type {shaka.extern.EmsgInfo} */
  890. const emsg = {
  891. startTime: startTime,
  892. endTime: endTime,
  893. schemeIdUri: schemeId,
  894. value: value,
  895. timescale: timescale,
  896. presentationTimeDelta: presentationTimeDelta,
  897. eventDuration: eventDuration,
  898. id: id,
  899. messageData: messageData,
  900. };
  901. this.playerInterface_.onEmsg(emsg);
  902. // Additionally, ID3 events generate a 'metadata' event. This is a
  903. // pre-parsed version of the metadata blob already dispatched in the
  904. // 'emsg' event.
  905. if (schemeId == 'https://aomedia.org/emsg/ID3' ||
  906. schemeId == 'https://developer.apple.com/streaming/emsg-id3') {
  907. // See https://aomediacodec.github.io/id3-emsg/
  908. const frames = shaka.util.Id3Utils.getID3Frames(messageData);
  909. if (frames.length) {
  910. /** @private {shaka.extern.ID3Metadata} */
  911. const metadata = {
  912. cueTime: startTime,
  913. data: messageData,
  914. frames: frames,
  915. dts: startTime,
  916. pts: startTime,
  917. };
  918. this.playerInterface_.onMetadata(
  919. [metadata], /* offset= */ 0, endTime);
  920. }
  921. }
  922. }
  923. }
  924. }
  925. /**
  926. * Parse PRFT box.
  927. * @param {number} timescale
  928. * @param {!shaka.extern.ParsedBox} box
  929. * @private
  930. */
  931. parsePrft_(timescale, box) {
  932. goog.asserts.assert(
  933. box.version == 0 || box.version == 1,
  934. 'PRFT version can only be 0 or 1');
  935. const parsed = shaka.util.Mp4BoxParsers.parsePRFTInaccurate(
  936. box.reader, box.version);
  937. const wallClockTime = shaka.util.TimeUtils.convertNtp(parsed.ntpTimestamp);
  938. const programStartDate = new Date(wallClockTime -
  939. (parsed.mediaTime / timescale) * 1000);
  940. /** @type {shaka.extern.ProducerReferenceTime} */
  941. const prftInfo = {
  942. wallClockTime,
  943. programStartDate,
  944. };
  945. const eventName = shaka.util.FakeEvent.EventName.Prft;
  946. const data = (new Map()).set('detail', prftInfo);
  947. const event = new shaka.util.FakeEvent(
  948. eventName, data);
  949. this.playerInterface_.onEvent(event);
  950. }
  951. /**
  952. * Enqueue an operation to append data to the SourceBuffer.
  953. * Start and end times are needed for TextEngine, but not for MediaSource.
  954. * Start and end times may be null for initialization segments; if present
  955. * they are relative to the presentation timeline.
  956. *
  957. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  958. * @param {!BufferSource} data
  959. * @param {?shaka.media.SegmentReference} reference The segment reference
  960. * we are appending, or null for init segments
  961. * @param {shaka.extern.Stream} stream
  962. * @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
  963. * captions
  964. * @param {boolean=} seeked True if we just seeked
  965. * @param {boolean=} adaptation True if we just automatically switched active
  966. * variant(s).
  967. * @param {boolean=} isChunkedData True if we add to the buffer from the
  968. * @param {boolean=} fromSplit
  969. * @param {number=} continuityTimeline an optional continuity timeline
  970. * @return {!Promise}
  971. */
  972. async appendBuffer(
  973. contentType, data, reference, stream, hasClosedCaptions, seeked = false,
  974. adaptation = false, isChunkedData = false, fromSplit = false,
  975. continuityTimeline) {
  976. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  977. if (contentType == ContentType.TEXT) {
  978. if (this.manifestType_ == shaka.media.ManifestParser.HLS) {
  979. // This won't be known until the first video segment is appended.
  980. const offset = await this.textSequenceModeOffset_;
  981. this.textEngine_.setTimestampOffset(offset);
  982. }
  983. await this.textEngine_.appendBuffer(
  984. data,
  985. reference ? reference.startTime : null,
  986. reference ? reference.endTime : null,
  987. reference ? reference.getUris()[0] : null);
  988. return;
  989. }
  990. if (!fromSplit && this.needSplitMuxedContent_) {
  991. await this.appendBuffer(ContentType.AUDIO, data, reference, stream,
  992. hasClosedCaptions, seeked, adaptation, isChunkedData,
  993. /* fromSplit= */ true);
  994. await this.appendBuffer(ContentType.VIDEO, data, reference, stream,
  995. hasClosedCaptions, seeked, adaptation, isChunkedData,
  996. /* fromSplit= */ true);
  997. return;
  998. }
  999. if (!this.sourceBuffers_.has(contentType)) {
  1000. shaka.log.warning('Attempted to restore a non-existent source buffer');
  1001. return;
  1002. }
  1003. let timestampOffset = this.sourceBuffers_.get(contentType).timestampOffset;
  1004. let mimeType = this.sourceBufferTypes_.get(contentType);
  1005. if (this.transmuxers_.has(contentType)) {
  1006. mimeType = this.transmuxers_.get(contentType).getOriginalMimeType();
  1007. }
  1008. if (reference) {
  1009. const {timestamp, metadata} = this.getTimestampAndDispatchMetadata(
  1010. contentType, data, reference, stream, mimeType);
  1011. if (timestamp != null) {
  1012. if (this.firstVideoTimestamp_ == null &&
  1013. contentType == ContentType.VIDEO) {
  1014. this.firstVideoTimestamp_ = timestamp;
  1015. this.firstVideoReferenceStartTime_ = reference.startTime;
  1016. if (this.firstAudioTimestamp_ != null) {
  1017. let compensation = 0;
  1018. // Only apply compensation if video and audio segment startTime
  1019. // match, to avoid introducing sync issues.
  1020. if (this.firstVideoReferenceStartTime_ ==
  1021. this.firstAudioReferenceStartTime_) {
  1022. compensation =
  1023. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  1024. }
  1025. this.audioCompensation_.resolve(compensation);
  1026. }
  1027. }
  1028. if (this.firstAudioTimestamp_ == null &&
  1029. contentType == ContentType.AUDIO) {
  1030. this.firstAudioTimestamp_ = timestamp;
  1031. this.firstAudioReferenceStartTime_ = reference.startTime;
  1032. if (this.firstVideoTimestamp_ != null) {
  1033. let compensation = 0;
  1034. // Only apply compensation if video and audio segment startTime
  1035. // match, to avoid introducing sync issues.
  1036. if (this.firstVideoReferenceStartTime_ ==
  1037. this.firstAudioReferenceStartTime_) {
  1038. compensation =
  1039. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  1040. }
  1041. this.audioCompensation_.resolve(compensation);
  1042. }
  1043. }
  1044. let realTimestamp = timestamp;
  1045. const RAW_FORMATS = shaka.util.MimeUtils.RAW_FORMATS;
  1046. // For formats without containers and using segments mode, we need to
  1047. // adjust TimestampOffset relative to 0 because segments do not have
  1048. // any timestamp information.
  1049. if (!this.sequenceMode_ &&
  1050. RAW_FORMATS.includes(this.sourceBufferTypes_.get(contentType))) {
  1051. realTimestamp = 0;
  1052. }
  1053. const calculatedTimestampOffset = reference.startTime - realTimestamp;
  1054. const timestampOffsetDifference =
  1055. Math.abs(timestampOffset - calculatedTimestampOffset);
  1056. if ((timestampOffsetDifference >= 0.001 || seeked || adaptation) &&
  1057. (!isChunkedData || calculatedTimestampOffset > 0 ||
  1058. !timestampOffset)) {
  1059. timestampOffset = calculatedTimestampOffset;
  1060. if (this.attemptTimestampOffsetCalculation_) {
  1061. this.enqueueOperation_(
  1062. contentType,
  1063. () => this.abort_(contentType),
  1064. null);
  1065. this.enqueueOperation_(
  1066. contentType,
  1067. () => this.setTimestampOffset_(contentType, timestampOffset),
  1068. null);
  1069. }
  1070. }
  1071. // Timestamps can only be reliably extracted from video, not audio.
  1072. // Packed audio formats do not have internal timestamps at all.
  1073. // Prefer video for this when available.
  1074. const isBestSourceBufferForTimestamps =
  1075. contentType == ContentType.VIDEO ||
  1076. !(this.sourceBuffers_.has(ContentType.VIDEO));
  1077. if (isBestSourceBufferForTimestamps) {
  1078. this.textSequenceModeOffset_.resolve(timestampOffset);
  1079. }
  1080. }
  1081. if (metadata.length) {
  1082. this.playerInterface_.onMetadata(metadata, timestampOffset,
  1083. reference ? reference.endTime : null);
  1084. }
  1085. }
  1086. if (hasClosedCaptions && contentType == ContentType.VIDEO) {
  1087. if (!this.textEngine_) {
  1088. this.reinitText(shaka.util.MimeUtils.CEA608_CLOSED_CAPTION_MIMETYPE,
  1089. this.sequenceMode_, /* external= */ false);
  1090. }
  1091. if (!this.captionParser_) {
  1092. const basicType = mimeType.split(';', 1)[0];
  1093. this.captionParser_ = this.getCaptionParser(basicType);
  1094. }
  1095. // If it is the init segment for closed captions, initialize the closed
  1096. // caption parser.
  1097. if (!reference) {
  1098. this.captionParser_.init(data, adaptation, continuityTimeline);
  1099. } else {
  1100. const closedCaptions = this.captionParser_.parseFrom(data);
  1101. if (closedCaptions.length) {
  1102. this.textEngine_.storeAndAppendClosedCaptions(
  1103. closedCaptions,
  1104. reference.startTime,
  1105. reference.endTime,
  1106. timestampOffset);
  1107. }
  1108. }
  1109. }
  1110. if (this.transmuxers_.has(contentType)) {
  1111. data = await this.transmuxers_.get(contentType).transmux(
  1112. data, stream, reference, this.mediaSource_.duration, contentType);
  1113. }
  1114. data = this.workAroundBrokenPlatforms_(
  1115. stream, data, reference, contentType);
  1116. if (reference && this.sequenceMode_ && contentType != ContentType.TEXT) {
  1117. // In sequence mode, for non-text streams, if we just cleared the buffer
  1118. // and are either performing an unbuffered seek or handling an automatic
  1119. // adaptation, we need to set a new timestampOffset on the sourceBuffer.
  1120. if (seeked || adaptation) {
  1121. let timestampOffset = reference.startTime;
  1122. // Audio and video may not be aligned, so we will compensate for audio
  1123. // if necessary.
  1124. if (this.manifestType_ == shaka.media.ManifestParser.HLS &&
  1125. !this.needSplitMuxedContent_ &&
  1126. contentType == ContentType.AUDIO &&
  1127. this.sourceBuffers_.has(ContentType.VIDEO)) {
  1128. const compensation = await this.audioCompensation_;
  1129. // Only apply compensation if the difference is greater than 150ms
  1130. if (Math.abs(compensation) > 0.15) {
  1131. timestampOffset -= compensation;
  1132. }
  1133. }
  1134. // The logic to call abort() before setting the timestampOffset is
  1135. // extended during unbuffered seeks or automatic adaptations; it is
  1136. // possible for the append state to be PARSING_MEDIA_SEGMENT from the
  1137. // previous SourceBuffer#appendBuffer() call.
  1138. this.enqueueOperation_(
  1139. contentType,
  1140. () => this.abort_(contentType),
  1141. null);
  1142. this.enqueueOperation_(
  1143. contentType,
  1144. () => this.setTimestampOffset_(contentType, timestampOffset),
  1145. null);
  1146. }
  1147. }
  1148. let bufferedBefore = null;
  1149. await this.enqueueOperation_(contentType, () => {
  1150. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  1151. bufferedBefore = this.getBuffered_(contentType);
  1152. }
  1153. this.append_(contentType, data, timestampOffset, stream);
  1154. }, reference ? reference.getUris()[0] : null);
  1155. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  1156. const bufferedAfter = this.getBuffered_(contentType);
  1157. const newBuffered = shaka.media.TimeRangesUtils.computeAddedRange(
  1158. bufferedBefore, bufferedAfter);
  1159. if (newBuffered) {
  1160. const segmentDuration = reference.endTime - reference.startTime;
  1161. const timeAdded = newBuffered.end - newBuffered.start;
  1162. // Check end times instead of start times. We may be overwriting a
  1163. // buffer and only the end changes, and that would be fine.
  1164. // Also, exclude tiny segments. Sometimes alignment segments as small
  1165. // as 33ms are seen in Google DAI content. For such tiny segments,
  1166. // half a segment duration would be no issue.
  1167. const offset = Math.abs(newBuffered.end - reference.endTime);
  1168. if (segmentDuration > 0.100 && (offset > segmentDuration / 2 ||
  1169. Math.abs(segmentDuration - timeAdded) > 0.030)) {
  1170. shaka.log.error('Possible encoding problem detected!',
  1171. 'Unexpected buffered range for reference', reference,
  1172. 'from URIs', reference.getUris(),
  1173. 'should be', {start: reference.startTime, end: reference.endTime},
  1174. 'but got', newBuffered);
  1175. }
  1176. }
  1177. }
  1178. }
  1179. /**
  1180. * Set the selected closed captions Id and language.
  1181. *
  1182. * @param {string} id
  1183. */
  1184. setSelectedClosedCaptionId(id) {
  1185. const VIDEO = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  1186. const videoBufferEndTime = this.bufferEnd(VIDEO) || 0;
  1187. this.textEngine_.setSelectedClosedCaptionId(id, videoBufferEndTime);
  1188. }
  1189. /** Disable embedded closed captions. */
  1190. clearSelectedClosedCaptionId() {
  1191. if (this.textEngine_) {
  1192. this.textEngine_.setSelectedClosedCaptionId('', 0);
  1193. }
  1194. }
  1195. /**
  1196. * Enqueue an operation to remove data from the SourceBuffer.
  1197. *
  1198. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1199. * @param {number} startTime relative to the start of the presentation
  1200. * @param {number} endTime relative to the start of the presentation
  1201. * @param {Array<number>=} continuityTimelines a list of continuity timelines
  1202. * that are still available on the stream.
  1203. * @return {!Promise}
  1204. */
  1205. async remove(contentType, startTime, endTime, continuityTimelines) {
  1206. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1207. if (contentType == ContentType.VIDEO && this.captionParser_) {
  1208. this.captionParser_.remove(continuityTimelines);
  1209. }
  1210. if (contentType == ContentType.TEXT) {
  1211. await this.textEngine_.remove(startTime, endTime);
  1212. } else if (endTime > startTime) {
  1213. await this.enqueueOperation_(
  1214. contentType,
  1215. () => this.remove_(contentType, startTime, endTime),
  1216. null);
  1217. if (this.needSplitMuxedContent_) {
  1218. await this.enqueueOperation_(
  1219. ContentType.AUDIO,
  1220. () => this.remove_(ContentType.AUDIO, startTime, endTime),
  1221. null);
  1222. }
  1223. }
  1224. }
  1225. /**
  1226. * Enqueue an operation to clear the SourceBuffer.
  1227. *
  1228. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1229. * @return {!Promise}
  1230. */
  1231. async clear(contentType) {
  1232. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1233. if (contentType == ContentType.TEXT) {
  1234. if (!this.textEngine_) {
  1235. return;
  1236. }
  1237. await this.textEngine_.remove(0, Infinity);
  1238. } else {
  1239. // Note that not all platforms allow clearing to Infinity.
  1240. await this.enqueueOperation_(
  1241. contentType,
  1242. () => this.remove_(contentType, 0, this.mediaSource_.duration),
  1243. null);
  1244. if (this.needSplitMuxedContent_) {
  1245. await this.enqueueOperation_(
  1246. ContentType.AUDIO,
  1247. () => this.remove_(
  1248. ContentType.AUDIO, 0, this.mediaSource_.duration),
  1249. null);
  1250. }
  1251. }
  1252. }
  1253. /**
  1254. * Fully reset the state of the caption parser owned by MediaSourceEngine.
  1255. */
  1256. resetCaptionParser() {
  1257. if (this.captionParser_) {
  1258. this.captionParser_.reset();
  1259. }
  1260. }
  1261. /**
  1262. * Enqueue an operation to flush the SourceBuffer.
  1263. * This is a workaround for what we believe is a Chromecast bug.
  1264. *
  1265. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1266. * @return {!Promise}
  1267. */
  1268. async flush(contentType) {
  1269. // Flush the pipeline. Necessary on Chromecast, even though we have removed
  1270. // everything.
  1271. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1272. if (contentType == ContentType.TEXT) {
  1273. // Nothing to flush for text.
  1274. return;
  1275. }
  1276. await this.enqueueOperation_(
  1277. contentType,
  1278. () => this.flush_(contentType),
  1279. null);
  1280. if (this.needSplitMuxedContent_) {
  1281. await this.enqueueOperation_(
  1282. ContentType.AUDIO,
  1283. () => this.flush_(ContentType.AUDIO),
  1284. null);
  1285. }
  1286. }
  1287. /**
  1288. * Sets the timestamp offset and append window end for the given content type.
  1289. *
  1290. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1291. * @param {number} timestampOffset The timestamp offset. Segments which start
  1292. * at time t will be inserted at time t + timestampOffset instead. This
  1293. * value does not affect segments which have already been inserted.
  1294. * @param {number} appendWindowStart The timestamp to set the append window
  1295. * start to. For future appends, frames/samples with timestamps less than
  1296. * this value will be dropped.
  1297. * @param {number} appendWindowEnd The timestamp to set the append window end
  1298. * to. For future appends, frames/samples with timestamps greater than this
  1299. * value will be dropped.
  1300. * @param {boolean} ignoreTimestampOffset If true, the timestampOffset will
  1301. * not be applied in this step.
  1302. * @param {string} mimeType
  1303. * @param {string} codecs
  1304. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  1305. * shaka.extern.Stream>} streamsByType
  1306. * A map of content types to streams.
  1307. *
  1308. * @return {!Promise}
  1309. */
  1310. async setStreamProperties(
  1311. contentType, timestampOffset, appendWindowStart, appendWindowEnd,
  1312. ignoreTimestampOffset, mimeType, codecs, streamsByType) {
  1313. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1314. if (contentType == ContentType.TEXT) {
  1315. if (!ignoreTimestampOffset) {
  1316. this.textEngine_.setTimestampOffset(timestampOffset);
  1317. }
  1318. this.textEngine_.setAppendWindow(appendWindowStart, appendWindowEnd);
  1319. return;
  1320. }
  1321. const operations = [];
  1322. const hasChangedCodecs = await this.codecSwitchIfNecessary_(
  1323. contentType, mimeType, codecs, streamsByType);
  1324. if (!hasChangedCodecs) {
  1325. // Queue an abort() to help MSE splice together overlapping segments.
  1326. // We set appendWindowEnd when we change periods in DASH content, and the
  1327. // period transition may result in overlap.
  1328. //
  1329. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1330. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1331. // timestamp offset. By calling abort(), we reset the state so we can
  1332. // set it.
  1333. operations.push(this.enqueueOperation_(
  1334. contentType,
  1335. () => this.abort_(contentType),
  1336. null));
  1337. if (this.needSplitMuxedContent_) {
  1338. operations.push(this.enqueueOperation_(
  1339. ContentType.AUDIO,
  1340. () => this.abort_(ContentType.AUDIO),
  1341. null));
  1342. }
  1343. }
  1344. if (!ignoreTimestampOffset) {
  1345. operations.push(this.enqueueOperation_(
  1346. contentType,
  1347. () => this.setTimestampOffset_(contentType, timestampOffset),
  1348. null));
  1349. if (this.needSplitMuxedContent_) {
  1350. operations.push(this.enqueueOperation_(
  1351. ContentType.AUDIO,
  1352. () => this.setTimestampOffset_(
  1353. ContentType.AUDIO, timestampOffset),
  1354. null));
  1355. }
  1356. }
  1357. if (appendWindowStart != 0 || appendWindowEnd != Infinity) {
  1358. operations.push(this.enqueueOperation_(
  1359. contentType,
  1360. () => this.setAppendWindow_(
  1361. contentType, appendWindowStart, appendWindowEnd),
  1362. null));
  1363. if (this.needSplitMuxedContent_) {
  1364. operations.push(this.enqueueOperation_(
  1365. ContentType.AUDIO,
  1366. () => this.setAppendWindow_(
  1367. ContentType.AUDIO, appendWindowStart, appendWindowEnd),
  1368. null));
  1369. }
  1370. }
  1371. if (operations.length) {
  1372. await Promise.all(operations);
  1373. }
  1374. }
  1375. /**
  1376. * Adjust timestamp offset to maintain AV sync across discontinuities.
  1377. *
  1378. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1379. * @param {number} timestampOffset
  1380. * @return {!Promise}
  1381. */
  1382. async resync(contentType, timestampOffset) {
  1383. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1384. if (contentType == ContentType.TEXT) {
  1385. // This operation is for audio and video only.
  1386. return;
  1387. }
  1388. // Reset the promise in case the timestamp offset changed during
  1389. // a period/discontinuity transition.
  1390. if (contentType == ContentType.VIDEO) {
  1391. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  1392. }
  1393. if (!this.sequenceMode_) {
  1394. return;
  1395. }
  1396. // Avoid changing timestampOffset when the difference is less than 100 ms
  1397. // from the end of the current buffer.
  1398. const bufferEnd = this.bufferEnd(contentType);
  1399. if (bufferEnd && Math.abs(bufferEnd - timestampOffset) < 0.15) {
  1400. return;
  1401. }
  1402. // Queue an abort() to help MSE splice together overlapping segments.
  1403. // We set appendWindowEnd when we change periods in DASH content, and the
  1404. // period transition may result in overlap.
  1405. //
  1406. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1407. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1408. // timestamp offset. By calling abort(), we reset the state so we can
  1409. // set it.
  1410. this.enqueueOperation_(
  1411. contentType,
  1412. () => this.abort_(contentType),
  1413. null);
  1414. if (this.needSplitMuxedContent_) {
  1415. this.enqueueOperation_(
  1416. ContentType.AUDIO,
  1417. () => this.abort_(ContentType.AUDIO),
  1418. null);
  1419. }
  1420. await this.enqueueOperation_(
  1421. contentType,
  1422. () => this.setTimestampOffset_(contentType, timestampOffset),
  1423. null);
  1424. if (this.needSplitMuxedContent_) {
  1425. await this.enqueueOperation_(
  1426. ContentType.AUDIO,
  1427. () => this.setTimestampOffset_(ContentType.AUDIO, timestampOffset),
  1428. null);
  1429. }
  1430. }
  1431. /**
  1432. * @param {string=} reason Valid reasons are 'network' and 'decode'.
  1433. * @return {!Promise}
  1434. * @see http://w3c.github.io/media-source/#idl-def-EndOfStreamError
  1435. */
  1436. async endOfStream(reason) {
  1437. await this.enqueueBlockingOperation_(() => {
  1438. // If endOfStream() has already been called on the media source,
  1439. // don't call it again. Also do not call if readyState is
  1440. // 'closed' (not attached to video element) since it is not a
  1441. // valid operation.
  1442. if (this.ended() || this.closed()) {
  1443. return;
  1444. }
  1445. // Tizen won't let us pass undefined, but it will let us omit the
  1446. // argument.
  1447. if (reason) {
  1448. this.mediaSource_.endOfStream(reason);
  1449. } else {
  1450. this.mediaSource_.endOfStream();
  1451. }
  1452. });
  1453. }
  1454. /**
  1455. * @param {number} duration
  1456. * @return {!Promise}
  1457. */
  1458. async setDuration(duration) {
  1459. await this.enqueueBlockingOperation_(() => {
  1460. // https://www.w3.org/TR/media-source-2/#duration-change-algorithm
  1461. // "Duration reductions that would truncate currently buffered media
  1462. // are disallowed.
  1463. // When truncation is necessary, use remove() to reduce the buffered
  1464. // range before updating duration."
  1465. // But in some platforms, truncating the duration causes the
  1466. // buffer range removal algorithm to run which triggers an
  1467. // 'updateend' event to fire.
  1468. // To handle this scenario, we have to insert a dummy operation into
  1469. // the beginning of each queue, which the 'updateend' handler will remove.
  1470. // Using config to disable it by default and enable only
  1471. // on relevant platforms.
  1472. if (this.config_.durationReductionEmitsUpdateEnd &&
  1473. duration < this.mediaSource_.duration) {
  1474. for (const contentType of this.sourceBuffers_.keys()) {
  1475. const dummyOperation = {
  1476. start: () => {},
  1477. p: new shaka.util.PublicPromise(),
  1478. uri: null,
  1479. };
  1480. this.queues_.get(contentType).unshift(dummyOperation);
  1481. }
  1482. }
  1483. this.mediaSource_.duration = duration;
  1484. this.lastDuration_ = duration;
  1485. });
  1486. }
  1487. /**
  1488. * Get the current MediaSource duration.
  1489. *
  1490. * @return {number}
  1491. */
  1492. getDuration() {
  1493. return this.mediaSource_.duration;
  1494. }
  1495. /**
  1496. * Updates the live seekable range.
  1497. *
  1498. * @param {number} startTime
  1499. * @param {number} endTime
  1500. */
  1501. async setLiveSeekableRange(startTime, endTime) {
  1502. if (this.destroyer_.destroyed() || this.video_.error ||
  1503. this.usingRemotePlayback_ || this.reloadingMediaSource_) {
  1504. return;
  1505. }
  1506. goog.asserts.assert('setLiveSeekableRange' in this.mediaSource_,
  1507. 'Using setLiveSeekableRange on not supported platform');
  1508. if (this.ended() || this.closed()) {
  1509. return;
  1510. }
  1511. await this.enqueueBlockingOperation_(() => {
  1512. if (this.ended() || this.closed()) {
  1513. return;
  1514. }
  1515. this.mediaSource_.setLiveSeekableRange(startTime, endTime);
  1516. });
  1517. }
  1518. /**
  1519. * Clear the current live seekable range.
  1520. */
  1521. async clearLiveSeekableRange() {
  1522. if (this.destroyer_.destroyed() || this.video_.error ||
  1523. this.usingRemotePlayback_ || this.reloadingMediaSource_) {
  1524. return;
  1525. }
  1526. goog.asserts.assert('clearLiveSeekableRange' in this.mediaSource_,
  1527. 'Using clearLiveSeekableRange on not supported platform');
  1528. if (this.ended() || this.closed()) {
  1529. return;
  1530. }
  1531. await this.enqueueBlockingOperation_(() => {
  1532. if (this.ended() || this.closed()) {
  1533. return;
  1534. }
  1535. this.mediaSource_.clearLiveSeekableRange();
  1536. });
  1537. }
  1538. /**
  1539. * Append dependency data.
  1540. * @param {BufferSource} data
  1541. * @param {number} timestampOffset
  1542. * @param {shaka.extern.Stream} stream
  1543. */
  1544. appendDependency(data, timestampOffset, stream) {
  1545. if (this.lcevcDec_) {
  1546. // Append buffers to the LCEVC Dec for parsing and storing
  1547. // of LCEVC data.
  1548. this.lcevcDec_.appendBuffer(data, timestampOffset, stream);
  1549. }
  1550. }
  1551. /**
  1552. * Append data to the SourceBuffer.
  1553. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1554. * @param {BufferSource} data
  1555. * @param {number} timestampOffset
  1556. * @param {shaka.extern.Stream} stream
  1557. * @private
  1558. */
  1559. append_(contentType, data, timestampOffset, stream) {
  1560. this.appendDependency(data, timestampOffset, stream);
  1561. // This will trigger an 'updateend' event.
  1562. this.sourceBuffers_.get(contentType).appendBuffer(data);
  1563. }
  1564. /**
  1565. * Remove data from the SourceBuffer.
  1566. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1567. * @param {number} startTime relative to the start of the presentation
  1568. * @param {number} endTime relative to the start of the presentation
  1569. * @private
  1570. */
  1571. remove_(contentType, startTime, endTime) {
  1572. if (endTime <= startTime) {
  1573. // Ignore removal of inverted or empty ranges.
  1574. // Fake 'updateend' event to resolve the operation.
  1575. this.onUpdateEnd_(contentType);
  1576. return;
  1577. }
  1578. // This will trigger an 'updateend' event.
  1579. this.sourceBuffers_.get(contentType).remove(startTime, endTime);
  1580. }
  1581. /**
  1582. * Call abort() on the SourceBuffer.
  1583. * This resets MSE's last_decode_timestamp on all track buffers, which should
  1584. * trigger the splicing logic for overlapping segments.
  1585. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1586. * @private
  1587. */
  1588. abort_(contentType) {
  1589. const sourceBuffer = this.sourceBuffers_.get(contentType);
  1590. // Save the append window, which is reset on abort().
  1591. const appendWindowStart = sourceBuffer.appendWindowStart;
  1592. const appendWindowEnd = sourceBuffer.appendWindowEnd;
  1593. // This will not trigger an 'updateend' event, since nothing is happening.
  1594. // This is only to reset MSE internals, not to abort an actual operation.
  1595. sourceBuffer.abort();
  1596. // Restore the append window.
  1597. sourceBuffer.appendWindowStart = appendWindowStart;
  1598. sourceBuffer.appendWindowEnd = appendWindowEnd;
  1599. // Fake an 'updateend' event to resolve the operation.
  1600. this.onUpdateEnd_(contentType);
  1601. }
  1602. /**
  1603. * Nudge the playhead to force the media pipeline to be flushed.
  1604. * This seems to be necessary on Chromecast to get new content to replace old
  1605. * content.
  1606. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1607. * @private
  1608. */
  1609. flush_(contentType) {
  1610. // Never use flush_ if there's data. It causes a hiccup in playback.
  1611. goog.asserts.assert(
  1612. this.video_.buffered.length == 0, 'MediaSourceEngine.flush_ should ' +
  1613. 'only be used after clearing all data!');
  1614. // Seeking forces the pipeline to be flushed.
  1615. this.video_.currentTime -= 0.001;
  1616. // Fake an 'updateend' event to resolve the operation.
  1617. this.onUpdateEnd_(contentType);
  1618. }
  1619. /**
  1620. * Set the SourceBuffer's timestamp offset.
  1621. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1622. * @param {number} timestampOffset
  1623. * @private
  1624. */
  1625. setTimestampOffset_(contentType, timestampOffset) {
  1626. // Work around for
  1627. // https://github.com/shaka-project/shaka-player/issues/1281:
  1628. // TODO(https://bit.ly/2ttKiBU): follow up when this is fixed in Edge
  1629. if (timestampOffset < 0) {
  1630. // Try to prevent rounding errors in Edge from removing the first
  1631. // keyframe.
  1632. timestampOffset += 0.001;
  1633. }
  1634. this.sourceBuffers_.get(contentType).timestampOffset = timestampOffset;
  1635. // Fake an 'updateend' event to resolve the operation.
  1636. this.onUpdateEnd_(contentType);
  1637. }
  1638. /**
  1639. * Set the SourceBuffer's append window end.
  1640. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1641. * @param {number} appendWindowStart
  1642. * @param {number} appendWindowEnd
  1643. * @private
  1644. */
  1645. setAppendWindow_(contentType, appendWindowStart, appendWindowEnd) {
  1646. const sourceBuffer = this.sourceBuffers_.get(contentType);
  1647. if (sourceBuffer.appendWindowEnd !== appendWindowEnd ||
  1648. sourceBuffer.appendWindowStart !== appendWindowStart) {
  1649. // You can't set start > end, so first set start to 0, then set the new
  1650. // end, then set the new start. That way, there are no intermediate
  1651. // states which are invalid.
  1652. sourceBuffer.appendWindowStart = 0;
  1653. sourceBuffer.appendWindowEnd = appendWindowEnd;
  1654. sourceBuffer.appendWindowStart = appendWindowStart;
  1655. }
  1656. // Fake an 'updateend' event to resolve the operation.
  1657. this.onUpdateEnd_(contentType);
  1658. }
  1659. /**
  1660. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1661. * @private
  1662. */
  1663. onError_(contentType) {
  1664. const operation = this.queues_.get(contentType)[0];
  1665. goog.asserts.assert(operation, 'Spurious error event!');
  1666. goog.asserts.assert(!this.sourceBuffers_.get(contentType).updating,
  1667. 'SourceBuffer should not be updating on error!');
  1668. const code = this.video_.error ? this.video_.error.code : 0;
  1669. operation.p.reject(new shaka.util.Error(
  1670. shaka.util.Error.Severity.CRITICAL,
  1671. shaka.util.Error.Category.MEDIA,
  1672. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_FAILED,
  1673. code, operation.uri));
  1674. // Do not pop from queue. An 'updateend' event will fire next, and to
  1675. // avoid synchronizing these two event handlers, we will allow that one to
  1676. // pop from the queue as normal. Note that because the operation has
  1677. // already been rejected, the call to resolve() in the 'updateend' handler
  1678. // will have no effect.
  1679. }
  1680. /**
  1681. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1682. * @private
  1683. */
  1684. onUpdateEnd_(contentType) {
  1685. // If we're reloading or have been destroyed, clear the queue for this
  1686. // content type.
  1687. if (this.reloadingMediaSource_ || this.destroyer_.destroyed()) {
  1688. // Resolve any pending operations in this content type's queue
  1689. const queue = this.queues_.get(contentType);
  1690. if (queue && queue.length) {
  1691. // Resolve the first operation that triggered this updateEnd
  1692. const firstOperation = queue[0];
  1693. if (firstOperation && firstOperation.p) {
  1694. firstOperation.p.resolve();
  1695. }
  1696. // Clear the rest of the queue
  1697. this.queues_.set(contentType, []);
  1698. }
  1699. return;
  1700. }
  1701. const operation = this.queues_.get(contentType)[0];
  1702. goog.asserts.assert(operation, 'Spurious updateend event!');
  1703. if (!operation) {
  1704. return;
  1705. }
  1706. goog.asserts.assert(!this.sourceBuffers_.get(contentType).updating,
  1707. 'SourceBuffer should not be updating on updateend!');
  1708. operation.p.resolve();
  1709. this.popFromQueue_(contentType);
  1710. }
  1711. /**
  1712. * Enqueue an operation and start it if appropriate.
  1713. *
  1714. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1715. * @param {function()} start
  1716. * @param {?string} uri
  1717. * @return {!Promise}
  1718. * @private
  1719. */
  1720. enqueueOperation_(contentType, start, uri) {
  1721. this.destroyer_.ensureNotDestroyed();
  1722. const operation = {
  1723. start: start,
  1724. p: new shaka.util.PublicPromise(),
  1725. uri,
  1726. };
  1727. this.queues_.get(contentType).push(operation);
  1728. if (this.queues_.get(contentType).length == 1) {
  1729. this.startOperation_(contentType);
  1730. }
  1731. return operation.p;
  1732. }
  1733. /**
  1734. * Enqueue an operation which must block all other operations on all
  1735. * SourceBuffers.
  1736. *
  1737. * @param {function():(Promise|undefined)} run
  1738. * @return {!Promise}
  1739. * @private
  1740. */
  1741. async enqueueBlockingOperation_(run) {
  1742. this.destroyer_.ensureNotDestroyed();
  1743. /** @type {!Array<!shaka.util.PublicPromise>} */
  1744. const allWaiters = [];
  1745. /** @type {!Array<!shaka.util.ManifestParserUtils.ContentType>} */
  1746. const contentTypes = Array.from(this.sourceBuffers_.keys());
  1747. // Enqueue a 'wait' operation onto each queue.
  1748. // This operation signals its readiness when it starts.
  1749. // When all wait operations are ready, the real operation takes place.
  1750. for (const contentType of contentTypes) {
  1751. const ready = new shaka.util.PublicPromise();
  1752. const operation = {
  1753. start: () => ready.resolve(),
  1754. p: ready,
  1755. uri: null,
  1756. };
  1757. const queue = this.queues_.get(contentType);
  1758. queue.push(operation);
  1759. allWaiters.push(ready);
  1760. if (queue.length == 1) {
  1761. operation.start();
  1762. }
  1763. }
  1764. // Return a Promise to the real operation, which waits to begin until
  1765. // there are no other in-progress operations on any SourceBuffers.
  1766. try {
  1767. await Promise.all(allWaiters);
  1768. } catch (error) {
  1769. // One of the waiters failed, which means we've been destroyed.
  1770. goog.asserts.assert(
  1771. this.destroyer_.destroyed(), 'Should be destroyed by now');
  1772. // We haven't popped from the queue. Canceled waiters have been removed
  1773. // by destroy. What's left now should just be resolved waiters. In
  1774. // uncompiled mode, we will maintain good hygiene and make sure the
  1775. // assert at the end of destroy passes. In compiled mode, the queues
  1776. // are wiped in destroy.
  1777. if (goog.DEBUG) {
  1778. for (const contentType of contentTypes) {
  1779. const queue = this.queues_.get(contentType);
  1780. if (queue.length) {
  1781. goog.asserts.assert(queue.length == 1,
  1782. 'Should be at most one item in queue!');
  1783. goog.asserts.assert(allWaiters.includes(queue[0].p),
  1784. 'The item in queue should be one of our waiters!');
  1785. queue.shift();
  1786. }
  1787. }
  1788. }
  1789. throw error;
  1790. }
  1791. if (goog.DEBUG) {
  1792. // If we did it correctly, nothing is updating.
  1793. for (const contentType of contentTypes) {
  1794. goog.asserts.assert(
  1795. this.sourceBuffers_.get(contentType).updating == false,
  1796. 'SourceBuffers should not be updating after a blocking op!');
  1797. }
  1798. }
  1799. // Run the real operation, which can be asynchronous.
  1800. try {
  1801. await run();
  1802. } catch (exception) {
  1803. throw new shaka.util.Error(
  1804. shaka.util.Error.Severity.CRITICAL,
  1805. shaka.util.Error.Category.MEDIA,
  1806. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1807. exception,
  1808. this.video_.error || 'No error in the media element',
  1809. null);
  1810. } finally {
  1811. // Unblock the queues.
  1812. for (const contentType of contentTypes) {
  1813. this.popFromQueue_(contentType);
  1814. }
  1815. }
  1816. }
  1817. /**
  1818. * Pop from the front of the queue and start a new operation.
  1819. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1820. * @private
  1821. */
  1822. popFromQueue_(contentType) {
  1823. goog.asserts.assert(this.queues_.has(contentType), 'Queue should exist');
  1824. // Remove the in-progress operation, which is now complete.
  1825. this.queues_.get(contentType).shift();
  1826. this.startOperation_(contentType);
  1827. }
  1828. /**
  1829. * Starts the next operation in the queue.
  1830. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1831. * @private
  1832. */
  1833. startOperation_(contentType) {
  1834. // Retrieve the next operation, if any, from the queue and start it.
  1835. const next = this.queues_.get(contentType)[0];
  1836. if (next) {
  1837. try {
  1838. next.start();
  1839. } catch (exception) {
  1840. if (exception.name == 'QuotaExceededError') {
  1841. next.p.reject(new shaka.util.Error(
  1842. shaka.util.Error.Severity.CRITICAL,
  1843. shaka.util.Error.Category.MEDIA,
  1844. shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR,
  1845. contentType));
  1846. } else if (!this.isStreamingAllowed()) {
  1847. next.p.reject(new shaka.util.Error(
  1848. shaka.util.Error.Severity.CRITICAL,
  1849. shaka.util.Error.Category.MEDIA,
  1850. shaka.util.Error.Code.STREAMING_NOT_ALLOWED,
  1851. contentType));
  1852. } else {
  1853. next.p.reject(new shaka.util.Error(
  1854. shaka.util.Error.Severity.CRITICAL,
  1855. shaka.util.Error.Category.MEDIA,
  1856. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1857. exception,
  1858. this.video_.error || 'No error in the media element',
  1859. next.uri));
  1860. }
  1861. this.popFromQueue_(contentType);
  1862. }
  1863. }
  1864. }
  1865. /**
  1866. * @return {!shaka.extern.TextDisplayer}
  1867. */
  1868. getTextDisplayer() {
  1869. goog.asserts.assert(
  1870. this.textDisplayer_,
  1871. 'TextDisplayer should only be null when this is destroyed');
  1872. return this.textDisplayer_;
  1873. }
  1874. /**
  1875. * @param {!shaka.extern.TextDisplayer} textDisplayer
  1876. */
  1877. setTextDisplayer(textDisplayer) {
  1878. this.textDisplayer_ = textDisplayer;
  1879. if (this.textEngine_) {
  1880. this.textEngine_.setDisplayer(textDisplayer);
  1881. }
  1882. }
  1883. /**
  1884. * @param {boolean} segmentRelativeVttTiming
  1885. */
  1886. setSegmentRelativeVttTiming(segmentRelativeVttTiming) {
  1887. this.segmentRelativeVttTiming_ = segmentRelativeVttTiming;
  1888. }
  1889. /**
  1890. * Apply platform-specific transformations to this segment to work around
  1891. * issues in the platform.
  1892. *
  1893. * @param {shaka.extern.Stream} stream
  1894. * @param {!BufferSource} segment
  1895. * @param {?shaka.media.SegmentReference} reference
  1896. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1897. * @return {!BufferSource}
  1898. * @private
  1899. */
  1900. workAroundBrokenPlatforms_(stream, segment, reference, contentType) {
  1901. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1902. const isMp4 = shaka.util.MimeUtils.getContainerType(
  1903. this.sourceBufferTypes_.get(contentType)) == 'mp4';
  1904. if (!isMp4) {
  1905. return segment;
  1906. }
  1907. const isInitSegment = reference === null;
  1908. const encryptionExpected = this.expectedEncryption_.get(contentType);
  1909. const keySystem = this.playerInterface_.getKeySystem();
  1910. let isEncrypted = false;
  1911. if (reference && reference.initSegmentReference) {
  1912. isEncrypted = reference.initSegmentReference.encrypted;
  1913. }
  1914. const uri = reference ? reference.getUris()[0] : null;
  1915. const device = shaka.device.DeviceFactory.getDevice();
  1916. if (this.config_.correctEc3Enca &&
  1917. isInitSegment &&
  1918. contentType === ContentType.AUDIO) {
  1919. segment = shaka.media.ContentWorkarounds.correctEnca(segment);
  1920. }
  1921. // If:
  1922. // 1. the configuration tells to insert fake encryption,
  1923. // 2. and this is an init segment or media segment,
  1924. // 3. and encryption is expected,
  1925. // 4. and the platform requires encryption in all init or media segments
  1926. // of current content type,
  1927. // then insert fake encryption metadata for init segments that lack it.
  1928. // The MP4 requirement is because we can currently only do this
  1929. // transformation on MP4 containers.
  1930. // See: https://github.com/shaka-project/shaka-player/issues/2759
  1931. if (this.config_.insertFakeEncryptionInInit && encryptionExpected &&
  1932. device.requiresEncryptionInfoInAllInitSegments(keySystem,
  1933. contentType)) {
  1934. if (isInitSegment) {
  1935. shaka.log.debug('Forcing fake encryption information in init segment.');
  1936. segment =
  1937. shaka.media.ContentWorkarounds.fakeEncryption(stream, segment, uri);
  1938. } else if (!isEncrypted && device.requiresTfhdFix(contentType)) {
  1939. shaka.log.debug(
  1940. 'Forcing fake encryption information in media segment.');
  1941. segment = shaka.media.ContentWorkarounds.fakeMediaEncryption(segment);
  1942. }
  1943. }
  1944. if (isInitSegment && device.requiresEC3InitSegments()) {
  1945. shaka.log.debug('Forcing fake EC-3 information in init segment.');
  1946. segment = shaka.media.ContentWorkarounds.fakeEC3(segment);
  1947. }
  1948. return segment;
  1949. }
  1950. /**
  1951. * Prepare the SourceBuffer to parse a potentially new type or codec.
  1952. *
  1953. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1954. * @param {string} mimeType
  1955. * @param {?shaka.extern.Transmuxer} transmuxer
  1956. * @private
  1957. */
  1958. change_(contentType, mimeType, transmuxer) {
  1959. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1960. if (contentType === ContentType.TEXT) {
  1961. shaka.log.debug(`Change not supported for ${contentType}`);
  1962. return;
  1963. }
  1964. const sourceBuffer = this.sourceBufferTypes_.get(contentType);
  1965. shaka.log.debug(
  1966. `Change Type: ${sourceBuffer} -> ${mimeType}`);
  1967. if (shaka.media.Capabilities.isChangeTypeSupported()) {
  1968. if (this.transmuxers_.has(contentType)) {
  1969. this.transmuxers_.get(contentType).destroy();
  1970. this.transmuxers_.delete(contentType);
  1971. }
  1972. if (transmuxer) {
  1973. this.transmuxers_.set(contentType, transmuxer);
  1974. }
  1975. const type = this.addExtraFeaturesToMimeType_(mimeType);
  1976. this.sourceBuffers_.get(contentType).changeType(type);
  1977. this.sourceBufferTypes_.set(contentType, mimeType);
  1978. } else {
  1979. shaka.log.debug('Change Type not supported');
  1980. }
  1981. // Fake an 'updateend' event to resolve the operation.
  1982. this.onUpdateEnd_(contentType);
  1983. }
  1984. /**
  1985. * Enqueue an operation to prepare the SourceBuffer to parse a potentially new
  1986. * type or codec.
  1987. *
  1988. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1989. * @param {string} mimeType
  1990. * @param {?shaka.extern.Transmuxer} transmuxer
  1991. * @return {!Promise}
  1992. */
  1993. changeType(contentType, mimeType, transmuxer) {
  1994. return this.enqueueOperation_(
  1995. contentType,
  1996. () => this.change_(contentType, mimeType, transmuxer),
  1997. null);
  1998. }
  1999. /**
  2000. * Resets the MediaSource and re-adds source buffers due to codec mismatch
  2001. *
  2002. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2003. * shaka.extern.Stream>} streamsByType
  2004. * @private
  2005. */
  2006. async reset_(streamsByType) {
  2007. if (this.reloadingMediaSource_ || this.usingRemotePlayback_) {
  2008. return;
  2009. }
  2010. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2011. this.reloadingMediaSource_ = true;
  2012. this.needSplitMuxedContent_ = false;
  2013. const currentTime = this.video_.currentTime;
  2014. // When codec switching if the user is currently paused we don't want
  2015. // to trigger a play when switching codec.
  2016. // Playing can also end up in a paused state after a codec switch
  2017. // so we need to remember the current states.
  2018. const previousAutoPlayState = this.video_.autoplay;
  2019. if (!this.video_.paused) {
  2020. this.playAfterReset_ = true;
  2021. }
  2022. if (this.playbackHasBegun_) {
  2023. // Only set autoplay to false if the video playback has already begun.
  2024. // When a codec switch happens before playback has begun this can cause
  2025. // autoplay not to work as expected.
  2026. this.video_.autoplay = false;
  2027. }
  2028. try {
  2029. this.eventManager_.removeAll();
  2030. for (const transmuxer of this.transmuxers_.values()) {
  2031. transmuxer.destroy();
  2032. }
  2033. for (const sourceBuffer of this.sourceBuffers_.values()) {
  2034. try {
  2035. this.mediaSource_.removeSourceBuffer(sourceBuffer);
  2036. } catch (e) {
  2037. shaka.log.debug('Exception on removeSourceBuffer', e);
  2038. }
  2039. }
  2040. this.transmuxers_.clear();
  2041. this.sourceBuffers_.clear();
  2042. const previousDuration = this.mediaSource_.duration;
  2043. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  2044. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  2045. await this.mediaSourceOpen_;
  2046. if (!isNaN(previousDuration) && previousDuration) {
  2047. this.mediaSource_.duration = previousDuration;
  2048. } else if (!isNaN(this.lastDuration_) && this.lastDuration_) {
  2049. this.mediaSource_.duration = this.lastDuration_;
  2050. }
  2051. const sourceBufferAdded = new shaka.util.PublicPromise();
  2052. const sourceBuffers =
  2053. /** @type {EventTarget} */(this.mediaSource_.sourceBuffers);
  2054. const totalOfBuffers = streamsByType.size;
  2055. let numberOfSourceBufferAdded = 0;
  2056. const onSourceBufferAdded = () => {
  2057. numberOfSourceBufferAdded++;
  2058. if (numberOfSourceBufferAdded === totalOfBuffers) {
  2059. sourceBufferAdded.resolve();
  2060. this.eventManager_.unlisten(sourceBuffers, 'addsourcebuffer',
  2061. onSourceBufferAdded);
  2062. }
  2063. };
  2064. this.eventManager_.listen(sourceBuffers, 'addsourcebuffer',
  2065. onSourceBufferAdded);
  2066. for (const contentType of streamsByType.keys()) {
  2067. const stream = streamsByType.get(contentType);
  2068. this.initSourceBuffer_(contentType, stream, stream.codecs);
  2069. }
  2070. const audio = streamsByType.get(ContentType.AUDIO);
  2071. if (audio && audio.isAudioMuxedInVideo) {
  2072. this.needSplitMuxedContent_ = true;
  2073. }
  2074. if (this.needSplitMuxedContent_ && !this.queues_.has(ContentType.AUDIO)) {
  2075. this.queues_.set(ContentType.AUDIO, []);
  2076. }
  2077. // Fake a seek to catchup the playhead.
  2078. this.video_.currentTime = currentTime;
  2079. await sourceBufferAdded;
  2080. } finally {
  2081. this.reloadingMediaSource_ = false;
  2082. this.destroyer_.ensureNotDestroyed();
  2083. this.eventManager_.listenOnce(this.video_, 'canplaythrough', () => {
  2084. // Don't use ensureNotDestroyed() from this event listener, because
  2085. // that results in an uncaught exception. Instead, just check the
  2086. // flag.
  2087. if (this.destroyer_.destroyed()) {
  2088. return;
  2089. }
  2090. this.video_.autoplay = previousAutoPlayState;
  2091. if (this.playAfterReset_) {
  2092. this.playAfterReset_ = false;
  2093. this.video_.play();
  2094. }
  2095. });
  2096. }
  2097. }
  2098. /**
  2099. * Resets the Media Source
  2100. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2101. * shaka.extern.Stream>} streamsByType
  2102. * @return {!Promise}
  2103. */
  2104. reset(streamsByType) {
  2105. return this.enqueueBlockingOperation_(
  2106. () => this.reset_(streamsByType));
  2107. }
  2108. /**
  2109. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2110. * @param {string} mimeType
  2111. * @param {string} codecs
  2112. * @return {{
  2113. * transmuxer: ?shaka.extern.Transmuxer,
  2114. * transmuxerMuxed: boolean,
  2115. * basicType: string,
  2116. * codec: string,
  2117. * mimeType: string,
  2118. * }}
  2119. * @private
  2120. */
  2121. getRealInfo_(contentType, mimeType, codecs) {
  2122. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2123. const MimeUtils = shaka.util.MimeUtils;
  2124. /** @type {?shaka.extern.Transmuxer} */
  2125. let transmuxer;
  2126. let transmuxerMuxed = false;
  2127. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  2128. ContentType.AUDIO, (codecs || '').split(','));
  2129. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  2130. ContentType.VIDEO, (codecs || '').split(','));
  2131. let codec = videoCodec;
  2132. if (contentType == ContentType.AUDIO) {
  2133. codec = audioCodec;
  2134. }
  2135. if (!codec) {
  2136. codec = codecs;
  2137. }
  2138. let newMimeType = shaka.util.MimeUtils.getFullType(mimeType, codec);
  2139. const currentBasicType = MimeUtils.getBasicType(
  2140. this.sourceBufferTypes_.get(contentType));
  2141. let needTransmux = this.config_.forceTransmux;
  2142. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  2143. (!this.sequenceMode_ &&
  2144. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  2145. needTransmux = true;
  2146. } else if (!needTransmux && mimeType != currentBasicType) {
  2147. const device = shaka.device.DeviceFactory.getDevice();
  2148. needTransmux = device.getBrowserEngine() ===
  2149. shaka.device.IDevice.BrowserEngine.WEBKIT &&
  2150. shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType);
  2151. }
  2152. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  2153. if (needTransmux) {
  2154. const newMimeTypeWithAllCodecs =
  2155. shaka.util.MimeUtils.getFullTypeWithAllCodecs(mimeType, codec);
  2156. const transmuxerPlugin =
  2157. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  2158. if (transmuxerPlugin) {
  2159. transmuxer = transmuxerPlugin();
  2160. if (audioCodec && videoCodec) {
  2161. transmuxerMuxed = true;
  2162. }
  2163. newMimeType =
  2164. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  2165. }
  2166. }
  2167. const newCodec = MimeUtils.getNormalizedCodec(
  2168. MimeUtils.getCodecs(newMimeType));
  2169. const newBasicType = MimeUtils.getBasicType(newMimeType);
  2170. return {
  2171. transmuxer,
  2172. transmuxerMuxed,
  2173. basicType: newBasicType,
  2174. codec: newCodec,
  2175. mimeType: newMimeType,
  2176. };
  2177. }
  2178. /**
  2179. * Codec switch if necessary, this will not resolve until the codec
  2180. * switch is over.
  2181. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2182. * @param {string} mimeType
  2183. * @param {string} codecs
  2184. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2185. * shaka.extern.Stream>} streamsByType
  2186. * @return {{
  2187. * type: string,
  2188. * newMimeType: string,
  2189. * transmuxer: ?shaka.extern.Transmuxer,
  2190. * }}
  2191. * @private
  2192. */
  2193. getInfoAboutResetOrChangeType_(contentType, mimeType, codecs, streamsByType) {
  2194. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2195. if (contentType == ContentType.TEXT) {
  2196. return {
  2197. type: shaka.media.MediaSourceEngine.ResetMode_.NONE,
  2198. newMimeType: mimeType,
  2199. transmuxer: null,
  2200. };
  2201. }
  2202. const MimeUtils = shaka.util.MimeUtils;
  2203. const currentCodec = MimeUtils.getNormalizedCodec(
  2204. MimeUtils.getCodecs(this.sourceBufferTypes_.get(contentType)));
  2205. const currentBasicType = MimeUtils.getBasicType(
  2206. this.sourceBufferTypes_.get(contentType));
  2207. const realInfo = this.getRealInfo_(contentType, mimeType, codecs);
  2208. const transmuxer = realInfo.transmuxer;
  2209. const transmuxerMuxed = realInfo.transmuxerMuxed;
  2210. const newBasicType = realInfo.basicType;
  2211. const newCodec = realInfo.codec;
  2212. const newMimeType = realInfo.mimeType;
  2213. let muxedContentCheck = true;
  2214. if (transmuxerMuxed &&
  2215. this.sourceBufferTypes_.has(ContentType.AUDIO)) {
  2216. const muxedRealInfo =
  2217. this.getRealInfo_(ContentType.AUDIO, mimeType, codecs);
  2218. const muxedCurrentCodec = MimeUtils.getNormalizedCodec(
  2219. MimeUtils.getCodecs(this.sourceBufferTypes_.get(ContentType.AUDIO)));
  2220. const muxedCurrentBasicType = MimeUtils.getBasicType(
  2221. this.sourceBufferTypes_.get(ContentType.AUDIO));
  2222. muxedContentCheck = muxedCurrentCodec == muxedRealInfo.codec &&
  2223. muxedCurrentBasicType == muxedRealInfo.basicType;
  2224. if (muxedRealInfo.transmuxer) {
  2225. muxedRealInfo.transmuxer.destroy();
  2226. }
  2227. }
  2228. // Current/new codecs base and basic type match then no need to switch
  2229. if (currentCodec === newCodec && currentBasicType === newBasicType &&
  2230. muxedContentCheck) {
  2231. return {
  2232. type: shaka.media.MediaSourceEngine.ResetMode_.NONE,
  2233. newMimeType,
  2234. transmuxer,
  2235. };
  2236. }
  2237. let allowChangeType = true;
  2238. if ((this.needSplitMuxedContent_ &&
  2239. !streamsByType.has(ContentType.AUDIO)) || (transmuxerMuxed &&
  2240. transmuxer && !this.transmuxers_.has(contentType))) {
  2241. allowChangeType = false;
  2242. }
  2243. if (allowChangeType && this.config_.codecSwitchingStrategy ===
  2244. shaka.config.CodecSwitchingStrategy.SMOOTH &&
  2245. shaka.media.Capabilities.isChangeTypeSupported()) {
  2246. return {
  2247. type: shaka.media.MediaSourceEngine.ResetMode_.CHANGE_TYPE,
  2248. newMimeType,
  2249. transmuxer,
  2250. };
  2251. } else {
  2252. if (transmuxer) {
  2253. transmuxer.destroy();
  2254. }
  2255. return {
  2256. type: shaka.media.MediaSourceEngine.ResetMode_.RESET,
  2257. newMimeType,
  2258. transmuxer: null,
  2259. };
  2260. }
  2261. }
  2262. /**
  2263. * Codec switch if necessary, this will not resolve until the codec
  2264. * switch is over.
  2265. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2266. * @param {string} mimeType
  2267. * @param {string} codecs
  2268. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2269. * shaka.extern.Stream>} streamsByType
  2270. * @return {!Promise<boolean>} true if there was a codec switch,
  2271. * false otherwise.
  2272. * @private
  2273. */
  2274. async codecSwitchIfNecessary_(contentType, mimeType, codecs, streamsByType) {
  2275. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2276. const {type, transmuxer, newMimeType} = this.getInfoAboutResetOrChangeType_(
  2277. contentType, mimeType, codecs, streamsByType);
  2278. const newAudioStream = streamsByType.get(ContentType.AUDIO);
  2279. if (newAudioStream) {
  2280. this.needSplitMuxedContent_ = newAudioStream.isAudioMuxedInVideo;
  2281. }
  2282. if (type == shaka.media.MediaSourceEngine.ResetMode_.NONE) {
  2283. if (this.transmuxers_.has(contentType) && !transmuxer) {
  2284. this.transmuxers_.get(contentType).destroy();
  2285. this.transmuxers_.delete(contentType);
  2286. } else if (!this.transmuxers_.has(contentType) && transmuxer) {
  2287. this.transmuxers_.set(contentType, transmuxer);
  2288. } else if (transmuxer) {
  2289. // Compare if the transmuxer is different
  2290. if (this.transmuxers_.has(contentType) &&
  2291. this.transmuxers_.get(contentType).transmux !==
  2292. transmuxer.transmux) {
  2293. this.transmuxers_.get(contentType).destroy();
  2294. this.transmuxers_.set(contentType, transmuxer);
  2295. } else {
  2296. transmuxer.destroy();
  2297. }
  2298. }
  2299. return false;
  2300. }
  2301. if (type == shaka.media.MediaSourceEngine.ResetMode_.CHANGE_TYPE) {
  2302. await this.changeType(contentType, newMimeType, transmuxer);
  2303. } else if (type == shaka.media.MediaSourceEngine.ResetMode_.RESET) {
  2304. if (transmuxer) {
  2305. transmuxer.destroy();
  2306. }
  2307. await this.reset(streamsByType);
  2308. }
  2309. return true;
  2310. }
  2311. /**
  2312. * Returns true if it's necessary reset the media source to load the
  2313. * new stream.
  2314. *
  2315. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2316. * @param {string} mimeType
  2317. * @param {string} codecs
  2318. * @return {boolean}
  2319. */
  2320. isResetMediaSourceNecessary(contentType, mimeType, codecs, streamsByType) {
  2321. const info = this.getInfoAboutResetOrChangeType_(
  2322. contentType, mimeType, codecs, streamsByType);
  2323. if (info.transmuxer) {
  2324. info.transmuxer.destroy();
  2325. }
  2326. return info.type == shaka.media.MediaSourceEngine.ResetMode_.RESET;
  2327. }
  2328. /**
  2329. * Update LCEVC Decoder object when ready for LCEVC Decode.
  2330. * @param {?shaka.lcevc.Dec} lcevcDec
  2331. */
  2332. updateLcevcDec(lcevcDec) {
  2333. this.lcevcDec_ = lcevcDec;
  2334. }
  2335. /**
  2336. * @param {string} mimeType
  2337. * @return {string}
  2338. * @private
  2339. */
  2340. addExtraFeaturesToMimeType_(mimeType) {
  2341. const extraFeatures = this.config_.addExtraFeaturesToSourceBuffer(mimeType);
  2342. const extendedType = mimeType + extraFeatures;
  2343. shaka.log.debug('Using full mime type', extendedType);
  2344. return extendedType;
  2345. }
  2346. };
  2347. /**
  2348. * Internal reference to window.URL.createObjectURL function to avoid
  2349. * compatibility issues with other libraries and frameworks such as React
  2350. * Native. For use in unit tests only, not meant for external use.
  2351. *
  2352. * @type {function(?):string}
  2353. */
  2354. shaka.media.MediaSourceEngine.createObjectURL = window.URL.createObjectURL;
  2355. /**
  2356. * @typedef {{
  2357. * start: function(),
  2358. * p: !shaka.util.PublicPromise,
  2359. * uri: ?string,
  2360. * }}
  2361. *
  2362. * @summary An operation in queue.
  2363. * @property {function()} start
  2364. * The function which starts the operation.
  2365. * @property {!shaka.util.PublicPromise} p
  2366. * The PublicPromise which is associated with this operation.
  2367. * @property {?string} uri
  2368. * A segment URI (if any) associated with this operation.
  2369. */
  2370. shaka.media.MediaSourceEngine.Operation;
  2371. /**
  2372. * @enum {string}
  2373. * @private
  2374. */
  2375. shaka.media.MediaSourceEngine.SourceBufferMode_ = {
  2376. SEQUENCE: 'sequence',
  2377. SEGMENTS: 'segments',
  2378. };
  2379. /**
  2380. * @enum {string}
  2381. * @private
  2382. */
  2383. shaka.media.MediaSourceEngine.ResetMode_ = {
  2384. NONE: 'none',
  2385. RESET: 'reset',
  2386. CHANGE_TYPE: 'changeType',
  2387. };
  2388. /**
  2389. * @typedef {{
  2390. * getKeySystem: function():?string,
  2391. * onMetadata: function(!Array<shaka.extern.ID3Metadata>, number, ?number),
  2392. * onEmsg: function(!shaka.extern.EmsgInfo),
  2393. * onEvent: function(!Event),
  2394. * onManifestUpdate: function(),
  2395. * }}
  2396. *
  2397. * @summary Player interface
  2398. * @property {function():?string} getKeySystem
  2399. * Gets currently used key system or null if not used.
  2400. * @property {function(
  2401. * !Array<shaka.extern.ID3Metadata>, number, ?number)} onMetadata
  2402. * Callback to use when metadata arrives.
  2403. * @property {function(!shaka.extern.EmsgInfo)} onEmsg
  2404. * Callback to use when EMSG arrives.
  2405. * @property {function(!Event)} onEvent
  2406. * Called when an event occurs that should be sent to the app.
  2407. * @property {function()} onManifestUpdate
  2408. * Called when an embedded 'emsg' box should trigger a manifest update.
  2409. */
  2410. shaka.media.MediaSourceEngine.PlayerInterface;