The version of vichan running on lainchan.org
Você não pode selecionar mais de 25 tópicos Os tópicos devem começar com uma letra ou um número, podem incluir traços ('-') e podem ter até 35 caracteres.

18869 linhas
604KB

  1. /**
  2. * videojs-contrib-hls
  3. * @version 5.5.0
  4. * @copyright 2017 Brightcove, Inc
  5. * @license Apache-2.0
  6. */
  7. (function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.videojsContribHls = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
  8. /**
  9. * @file ad-cue-tags.js
  10. */
  11. 'use strict';
  12. Object.defineProperty(exports, '__esModule', {
  13. value: true
  14. });
  15. var _slicedToArray = (function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i['return']) _i['return'](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError('Invalid attempt to destructure non-iterable instance'); } }; })();
  16. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  17. var _globalWindow = require('global/window');
  18. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  19. /**
  20. * Searches for an ad cue that overlaps with the given mediaTime
  21. */
  22. var findAdCue = function findAdCue(track, mediaTime) {
  23. var cues = track.cues;
  24. for (var i = 0; i < cues.length; i++) {
  25. var cue = cues[i];
  26. if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
  27. return cue;
  28. }
  29. }
  30. return null;
  31. };
  32. var updateAdCues = function updateAdCues(media, track) {
  33. var offset = arguments.length <= 2 || arguments[2] === undefined ? 0 : arguments[2];
  34. if (!media.segments) {
  35. return;
  36. }
  37. var mediaTime = offset;
  38. var cue = undefined;
  39. for (var i = 0; i < media.segments.length; i++) {
  40. var segment = media.segments[i];
  41. if (!cue) {
  42. // Since the cues will span for at least the segment duration, adding a fudge
  43. // factor of half segment duration will prevent duplicate cues from being
  44. // created when timing info is not exact (e.g. cue start time initialized
  45. // at 10.006677, but next call mediaTime is 10.003332 )
  46. cue = findAdCue(track, mediaTime + segment.duration / 2);
  47. }
  48. if (cue) {
  49. if ('cueIn' in segment) {
  50. // Found a CUE-IN so end the cue
  51. cue.endTime = mediaTime;
  52. cue.adEndTime = mediaTime;
  53. mediaTime += segment.duration;
  54. cue = null;
  55. continue;
  56. }
  57. if (mediaTime < cue.endTime) {
  58. // Already processed this mediaTime for this cue
  59. mediaTime += segment.duration;
  60. continue;
  61. }
  62. // otherwise extend cue until a CUE-IN is found
  63. cue.endTime += segment.duration;
  64. } else {
  65. if ('cueOut' in segment) {
  66. cue = new _globalWindow2['default'].VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
  67. cue.adStartTime = mediaTime;
  68. // Assumes tag format to be
  69. // #EXT-X-CUE-OUT:30
  70. cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
  71. track.addCue(cue);
  72. }
  73. if ('cueOutCont' in segment) {
  74. // Entered into the middle of an ad cue
  75. var adOffset = undefined;
  76. var adTotal = undefined;
  77. // Assumes tag formate to be
  78. // #EXT-X-CUE-OUT-CONT:10/30
  79. var _segment$cueOutCont$split$map = segment.cueOutCont.split('/').map(parseFloat);
  80. var _segment$cueOutCont$split$map2 = _slicedToArray(_segment$cueOutCont$split$map, 2);
  81. adOffset = _segment$cueOutCont$split$map2[0];
  82. adTotal = _segment$cueOutCont$split$map2[1];
  83. cue = new _globalWindow2['default'].VTTCue(mediaTime, mediaTime + segment.duration, '');
  84. cue.adStartTime = mediaTime - adOffset;
  85. cue.adEndTime = cue.adStartTime + adTotal;
  86. track.addCue(cue);
  87. }
  88. }
  89. mediaTime += segment.duration;
  90. }
  91. };
  92. exports['default'] = {
  93. updateAdCues: updateAdCues,
  94. findAdCue: findAdCue
  95. };
  96. module.exports = exports['default'];
  97. },{"global/window":30}],2:[function(require,module,exports){
  98. /**
  99. * @file bin-utils.js
  100. */
  101. /**
  102. * convert a TimeRange to text
  103. *
  104. * @param {TimeRange} range the timerange to use for conversion
  105. * @param {Number} i the iterator on the range to convert
  106. */
  107. 'use strict';
  108. Object.defineProperty(exports, '__esModule', {
  109. value: true
  110. });
  111. var textRange = function textRange(range, i) {
  112. return range.start(i) + '-' + range.end(i);
  113. };
  114. /**
  115. * format a number as hex string
  116. *
  117. * @param {Number} e The number
  118. * @param {Number} i the iterator
  119. */
  120. var formatHexString = function formatHexString(e, i) {
  121. var value = e.toString(16);
  122. return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
  123. };
  124. var formatAsciiString = function formatAsciiString(e) {
  125. if (e >= 0x20 && e < 0x7e) {
  126. return String.fromCharCode(e);
  127. }
  128. return '.';
  129. };
  130. /**
  131. * Creates an object for sending to a web worker modifying properties that are TypedArrays
  132. * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
  133. *
  134. * @param {Object} message
  135. * Object of properties and values to send to the web worker
  136. * @return {Object}
  137. * Modified message with TypedArray values expanded
  138. * @function createTransferableMessage
  139. */
  140. var createTransferableMessage = function createTransferableMessage(message) {
  141. var transferable = {};
  142. Object.keys(message).forEach(function (key) {
  143. var value = message[key];
  144. if (ArrayBuffer.isView(value)) {
  145. transferable[key] = {
  146. bytes: value.buffer,
  147. byteOffset: value.byteOffset,
  148. byteLength: value.byteLength
  149. };
  150. } else {
  151. transferable[key] = value;
  152. }
  153. });
  154. return transferable;
  155. };
  156. /**
  157. * Returns a unique string identifier for a media initialization
  158. * segment.
  159. */
  160. var initSegmentId = function initSegmentId(initSegment) {
  161. var byterange = initSegment.byterange || {
  162. length: Infinity,
  163. offset: 0
  164. };
  165. return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
  166. };
  167. /**
  168. * utils to help dump binary data to the console
  169. */
  170. var utils = {
  171. hexDump: function hexDump(data) {
  172. var bytes = Array.prototype.slice.call(data);
  173. var step = 16;
  174. var result = '';
  175. var hex = undefined;
  176. var ascii = undefined;
  177. for (var j = 0; j < bytes.length / step; j++) {
  178. hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
  179. ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
  180. result += hex + ' ' + ascii + '\n';
  181. }
  182. return result;
  183. },
  184. tagDump: function tagDump(tag) {
  185. return utils.hexDump(tag.bytes);
  186. },
  187. textRanges: function textRanges(ranges) {
  188. var result = '';
  189. var i = undefined;
  190. for (i = 0; i < ranges.length; i++) {
  191. result += textRange(ranges, i) + ' ';
  192. }
  193. return result;
  194. },
  195. createTransferableMessage: createTransferableMessage,
  196. initSegmentId: initSegmentId
  197. };
  198. exports['default'] = utils;
  199. module.exports = exports['default'];
  200. },{}],3:[function(require,module,exports){
  201. "use strict";
  202. Object.defineProperty(exports, "__esModule", {
  203. value: true
  204. });
  205. exports["default"] = {
  206. GOAL_BUFFER_LENGTH: 30
  207. };
  208. module.exports = exports["default"];
  209. },{}],4:[function(require,module,exports){
  210. 'use strict';
  211. Object.defineProperty(exports, '__esModule', {
  212. value: true
  213. });
  214. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  215. var _globalWindow = require('global/window');
  216. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  217. var _aesDecrypter = require('aes-decrypter');
  218. var _binUtils = require('./bin-utils');
  219. /**
  220. * Our web worker interface so that things can talk to aes-decrypter
  221. * that will be running in a web worker. the scope is passed to this by
  222. * webworkify.
  223. *
  224. * @param {Object} self
  225. * the scope for the web worker
  226. */
  227. var DecrypterWorker = function DecrypterWorker(self) {
  228. self.onmessage = function (event) {
  229. var data = event.data;
  230. var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
  231. var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
  232. var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
  233. /* eslint-disable no-new, handle-callback-err */
  234. new _aesDecrypter.Decrypter(encrypted, key, iv, function (err, bytes) {
  235. _globalWindow2['default'].postMessage((0, _binUtils.createTransferableMessage)({
  236. source: data.source,
  237. decrypted: bytes
  238. }), [bytes.buffer]);
  239. });
  240. /* eslint-enable */
  241. };
  242. };
  243. exports['default'] = function (self) {
  244. return new DecrypterWorker(self);
  245. };
  246. module.exports = exports['default'];
  247. },{"./bin-utils":2,"aes-decrypter":23,"global/window":30}],5:[function(require,module,exports){
  248. (function (global){
  249. /**
  250. * @file master-playlist-controller.js
  251. */
  252. 'use strict';
  253. Object.defineProperty(exports, '__esModule', {
  254. value: true
  255. });
  256. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  257. var _get = function get(_x2, _x3, _x4) { var _again = true; _function: while (_again) { var object = _x2, property = _x3, receiver = _x4; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x2 = parent; _x3 = property; _x4 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  258. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  259. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  260. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  261. var _playlistLoader = require('./playlist-loader');
  262. var _playlistLoader2 = _interopRequireDefault(_playlistLoader);
  263. var _segmentLoader = require('./segment-loader');
  264. var _segmentLoader2 = _interopRequireDefault(_segmentLoader);
  265. var _vttSegmentLoader = require('./vtt-segment-loader');
  266. var _vttSegmentLoader2 = _interopRequireDefault(_vttSegmentLoader);
  267. var _ranges = require('./ranges');
  268. var _ranges2 = _interopRequireDefault(_ranges);
  269. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  270. var _videoJs2 = _interopRequireDefault(_videoJs);
  271. var _adCueTags = require('./ad-cue-tags');
  272. var _adCueTags2 = _interopRequireDefault(_adCueTags);
  273. var _syncController = require('./sync-controller');
  274. var _syncController2 = _interopRequireDefault(_syncController);
  275. var _videojsContribMediaSourcesEs5CodecUtils = require('videojs-contrib-media-sources/es5/codec-utils');
  276. var _webworkify = require('webworkify');
  277. var _webworkify2 = _interopRequireDefault(_webworkify);
  278. var _decrypterWorker = require('./decrypter-worker');
  279. var _decrypterWorker2 = _interopRequireDefault(_decrypterWorker);
  280. var Hls = undefined;
  281. // SegmentLoader stats that need to have each loader's
  282. // values summed to calculate the final value
  283. var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred'];
  284. var sumLoaderStat = function sumLoaderStat(stat) {
  285. return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
  286. };
  287. /**
  288. * determine if an object a is differnt from
  289. * and object b. both only having one dimensional
  290. * properties
  291. *
  292. * @param {Object} a object one
  293. * @param {Object} b object two
  294. * @return {Boolean} if the object has changed or not
  295. */
  296. var objectChanged = function objectChanged(a, b) {
  297. if (typeof a !== typeof b) {
  298. return true;
  299. }
  300. // if we have a different number of elements
  301. // something has changed
  302. if (Object.keys(a).length !== Object.keys(b).length) {
  303. return true;
  304. }
  305. for (var prop in a) {
  306. if (a[prop] !== b[prop]) {
  307. return true;
  308. }
  309. }
  310. return false;
  311. };
  312. /**
  313. * Parses a codec string to retrieve the number of codecs specified,
  314. * the video codec and object type indicator, and the audio profile.
  315. *
  316. * @private
  317. */
  318. var parseCodecs = function parseCodecs(codecs) {
  319. var result = {
  320. codecCount: 0,
  321. videoCodec: null,
  322. videoObjectTypeIndicator: null,
  323. audioProfile: null
  324. };
  325. var parsed = undefined;
  326. result.codecCount = codecs.split(',').length;
  327. result.codecCount = result.codecCount || 2;
  328. // parse the video codec
  329. parsed = /(^|\s|,)+(avc1)([^ ,]*)/i.exec(codecs);
  330. if (parsed) {
  331. result.videoCodec = parsed[2];
  332. result.videoObjectTypeIndicator = parsed[3];
  333. }
  334. // parse the last field of the audio codec
  335. result.audioProfile = /(^|\s|,)+mp4a.[0-9A-Fa-f]+\.([0-9A-Fa-f]+)/i.exec(codecs);
  336. result.audioProfile = result.audioProfile && result.audioProfile[2];
  337. return result;
  338. };
  339. /**
  340. * Replace codecs in the codec string with the old apple-style `avc1.<dd>.<dd>` to the
  341. * standard `avc1.<hhhhhh>`.
  342. *
  343. * @param codecString {String} the codec string
  344. * @return {String} the codec string with old apple-style codecs replaced
  345. *
  346. * @private
  347. */
  348. var mapLegacyAvcCodecs_ = function mapLegacyAvcCodecs_(codecString) {
  349. return codecString.replace(/avc1\.(\d+)\.(\d+)/i, function (match) {
  350. return (0, _videojsContribMediaSourcesEs5CodecUtils.translateLegacyCodecs)([match])[0];
  351. });
  352. };
  353. exports.mapLegacyAvcCodecs_ = mapLegacyAvcCodecs_;
  354. /**
  355. * Calculates the MIME type strings for a working configuration of
  356. * SourceBuffers to play variant streams in a master playlist. If
  357. * there is no possible working configuration, an empty array will be
  358. * returned.
  359. *
  360. * @param master {Object} the m3u8 object for the master playlist
  361. * @param media {Object} the m3u8 object for the variant playlist
  362. * @return {Array} the MIME type strings. If the array has more than
  363. * one entry, the first element should be applied to the video
  364. * SourceBuffer and the second to the audio SourceBuffer.
  365. *
  366. * @private
  367. */
  368. var mimeTypesForPlaylist_ = function mimeTypesForPlaylist_(master, media) {
  369. var container = 'mp2t';
  370. var codecs = {
  371. videoCodec: 'avc1',
  372. videoObjectTypeIndicator: '.4d400d',
  373. audioProfile: '2'
  374. };
  375. var audioGroup = [];
  376. var mediaAttributes = undefined;
  377. var previousGroup = null;
  378. if (!media) {
  379. // not enough information, return an error
  380. return [];
  381. }
  382. // An initialization segment means the media playlists is an iframe
  383. // playlist or is using the mp4 container. We don't currently
  384. // support iframe playlists, so assume this is signalling mp4
  385. // fragments.
  386. // the existence check for segments can be removed once
  387. // https://github.com/videojs/m3u8-parser/issues/8 is closed
  388. if (media.segments && media.segments.length && media.segments[0].map) {
  389. container = 'mp4';
  390. }
  391. // if the codecs were explicitly specified, use them instead of the
  392. // defaults
  393. mediaAttributes = media.attributes || {};
  394. if (mediaAttributes.CODECS) {
  395. (function () {
  396. var parsedCodecs = parseCodecs(mediaAttributes.CODECS);
  397. Object.keys(parsedCodecs).forEach(function (key) {
  398. codecs[key] = parsedCodecs[key] || codecs[key];
  399. });
  400. })();
  401. }
  402. if (master.mediaGroups.AUDIO) {
  403. audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
  404. }
  405. // if audio could be muxed or unmuxed, use mime types appropriate
  406. // for both scenarios
  407. for (var groupId in audioGroup) {
  408. if (previousGroup && !!audioGroup[groupId].uri !== !!previousGroup.uri) {
  409. // one source buffer with muxed video and audio and another for
  410. // the alternate audio
  411. return ['video/' + container + '; codecs="' + codecs.videoCodec + codecs.videoObjectTypeIndicator + ', mp4a.40.' + codecs.audioProfile + '"', 'audio/' + container + '; codecs="mp4a.40.' + codecs.audioProfile + '"'];
  412. }
  413. previousGroup = audioGroup[groupId];
  414. }
  415. // if all video and audio is unmuxed, use two single-codec mime
  416. // types
  417. if (previousGroup && previousGroup.uri) {
  418. return ['video/' + container + '; codecs="' + codecs.videoCodec + codecs.videoObjectTypeIndicator + '"', 'audio/' + container + '; codecs="mp4a.40.' + codecs.audioProfile + '"'];
  419. }
  420. // all video and audio are muxed, use a dual-codec mime type
  421. return ['video/' + container + '; codecs="' + codecs.videoCodec + codecs.videoObjectTypeIndicator + ', mp4a.40.' + codecs.audioProfile + '"'];
  422. };
  423. exports.mimeTypesForPlaylist_ = mimeTypesForPlaylist_;
  424. /**
  425. * the master playlist controller controller all interactons
  426. * between playlists and segmentloaders. At this time this mainly
  427. * involves a master playlist and a series of audio playlists
  428. * if they are available
  429. *
  430. * @class MasterPlaylistController
  431. * @extends videojs.EventTarget
  432. */
  433. var MasterPlaylistController = (function (_videojs$EventTarget) {
  434. _inherits(MasterPlaylistController, _videojs$EventTarget);
  435. function MasterPlaylistController(options) {
  436. var _this = this;
  437. _classCallCheck(this, MasterPlaylistController);
  438. _get(Object.getPrototypeOf(MasterPlaylistController.prototype), 'constructor', this).call(this);
  439. var url = options.url;
  440. var withCredentials = options.withCredentials;
  441. var mode = options.mode;
  442. var tech = options.tech;
  443. var bandwidth = options.bandwidth;
  444. var externHls = options.externHls;
  445. var useCueTags = options.useCueTags;
  446. var blacklistDuration = options.blacklistDuration;
  447. if (!url) {
  448. throw new Error('A non-empty playlist URL is required');
  449. }
  450. Hls = externHls;
  451. this.withCredentials = withCredentials;
  452. this.tech_ = tech;
  453. this.hls_ = tech.hls;
  454. this.mode_ = mode;
  455. this.useCueTags_ = useCueTags;
  456. this.blacklistDuration = blacklistDuration;
  457. if (this.useCueTags_) {
  458. this.cueTagsTrack_ = this.tech_.addTextTrack('metadata', 'ad-cues');
  459. this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
  460. }
  461. this.requestOptions_ = {
  462. withCredentials: this.withCredentials,
  463. timeout: null
  464. };
  465. this.audioGroups_ = {};
  466. this.subtitleGroups_ = { groups: {}, tracks: {} };
  467. this.mediaSource = new _videoJs2['default'].MediaSource({ mode: mode });
  468. this.audioinfo_ = null;
  469. this.mediaSource.on('audioinfo', this.handleAudioinfoUpdate_.bind(this));
  470. // load the media source into the player
  471. this.mediaSource.addEventListener('sourceopen', this.handleSourceOpen_.bind(this));
  472. this.seekable_ = _videoJs2['default'].createTimeRanges();
  473. this.hasPlayed_ = function () {
  474. return false;
  475. };
  476. this.syncController_ = new _syncController2['default']();
  477. this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
  478. kind: 'metadata',
  479. label: 'segment-metadata'
  480. }, true).track;
  481. this.decrypter_ = (0, _webworkify2['default'])(_decrypterWorker2['default']);
  482. var segmentLoaderOptions = {
  483. hls: this.hls_,
  484. mediaSource: this.mediaSource,
  485. currentTime: this.tech_.currentTime.bind(this.tech_),
  486. seekable: function seekable() {
  487. return _this.seekable();
  488. },
  489. seeking: function seeking() {
  490. return _this.tech_.seeking();
  491. },
  492. duration: function duration() {
  493. return _this.mediaSource.duration;
  494. },
  495. hasPlayed: function hasPlayed() {
  496. return _this.hasPlayed_();
  497. },
  498. bandwidth: bandwidth,
  499. syncController: this.syncController_,
  500. decrypter: this.decrypter_
  501. };
  502. // setup playlist loaders
  503. this.masterPlaylistLoader_ = new _playlistLoader2['default'](url, this.hls_, this.withCredentials);
  504. this.setupMasterPlaylistLoaderListeners_();
  505. this.audioPlaylistLoader_ = null;
  506. this.subtitlePlaylistLoader_ = null;
  507. // setup segment loaders
  508. // combined audio/video or just video when alternate audio track is selected
  509. this.mainSegmentLoader_ = new _segmentLoader2['default'](_videoJs2['default'].mergeOptions(segmentLoaderOptions, {
  510. segmentMetadataTrack: this.segmentMetadataTrack_,
  511. loaderType: 'main'
  512. }));
  513. // alternate audio track
  514. this.audioSegmentLoader_ = new _segmentLoader2['default'](_videoJs2['default'].mergeOptions(segmentLoaderOptions, {
  515. loaderType: 'audio'
  516. }));
  517. this.subtitleSegmentLoader_ = new _vttSegmentLoader2['default'](_videoJs2['default'].mergeOptions(segmentLoaderOptions, {
  518. loaderType: 'vtt'
  519. }));
  520. this.setupSegmentLoaderListeners_();
  521. // Create SegmentLoader stat-getters
  522. loaderStats.forEach(function (stat) {
  523. _this[stat + '_'] = sumLoaderStat.bind(_this, stat);
  524. });
  525. this.masterPlaylistLoader_.load();
  526. }
  527. /**
  528. * Register event handlers on the master playlist loader. A helper
  529. * function for construction time.
  530. *
  531. * @private
  532. */
  533. _createClass(MasterPlaylistController, [{
  534. key: 'setupMasterPlaylistLoaderListeners_',
  535. value: function setupMasterPlaylistLoaderListeners_() {
  536. var _this2 = this;
  537. this.masterPlaylistLoader_.on('loadedmetadata', function () {
  538. var media = _this2.masterPlaylistLoader_.media();
  539. var requestTimeout = _this2.masterPlaylistLoader_.targetDuration * 1.5 * 1000;
  540. // If we don't have any more available playlists, we don't want to
  541. // timeout the request.
  542. if (_this2.masterPlaylistLoader_.isLowestEnabledRendition_()) {
  543. _this2.requestOptions_.timeout = 0;
  544. } else {
  545. _this2.requestOptions_.timeout = requestTimeout;
  546. }
  547. // if this isn't a live video and preload permits, start
  548. // downloading segments
  549. if (media.endList && _this2.tech_.preload() !== 'none') {
  550. _this2.mainSegmentLoader_.playlist(media, _this2.requestOptions_);
  551. _this2.mainSegmentLoader_.load();
  552. }
  553. _this2.fillAudioTracks_();
  554. _this2.setupAudio();
  555. _this2.fillSubtitleTracks_();
  556. _this2.setupSubtitles();
  557. try {
  558. _this2.setupSourceBuffers_();
  559. } catch (e) {
  560. _videoJs2['default'].log.warn('Failed to create SourceBuffers', e);
  561. return _this2.mediaSource.endOfStream('decode');
  562. }
  563. _this2.setupFirstPlay();
  564. _this2.trigger('audioupdate');
  565. _this2.trigger('selectedinitialmedia');
  566. });
  567. this.masterPlaylistLoader_.on('loadedplaylist', function () {
  568. var updatedPlaylist = _this2.masterPlaylistLoader_.media();
  569. if (!updatedPlaylist) {
  570. // select the initial variant
  571. _this2.initialMedia_ = _this2.selectPlaylist();
  572. _this2.masterPlaylistLoader_.media(_this2.initialMedia_);
  573. return;
  574. }
  575. if (_this2.useCueTags_) {
  576. _this2.updateAdCues_(updatedPlaylist);
  577. }
  578. // TODO: Create a new event on the PlaylistLoader that signals
  579. // that the segments have changed in some way and use that to
  580. // update the SegmentLoader instead of doing it twice here and
  581. // on `mediachange`
  582. _this2.mainSegmentLoader_.playlist(updatedPlaylist, _this2.requestOptions_);
  583. _this2.updateDuration();
  584. // If the player isn't paused, ensure that the segment loader is running,
  585. // as it is possible that it was temporarily stopped while waiting for
  586. // a playlist (e.g., in case the playlist errored and we re-requested it).
  587. if (!_this2.tech_.paused()) {
  588. _this2.mainSegmentLoader_.load();
  589. }
  590. if (!updatedPlaylist.endList) {
  591. (function () {
  592. var addSeekableRange = function addSeekableRange() {
  593. var seekable = _this2.seekable();
  594. if (seekable.length !== 0) {
  595. _this2.mediaSource.addSeekableRange_(seekable.start(0), seekable.end(0));
  596. }
  597. };
  598. if (_this2.duration() !== Infinity) {
  599. (function () {
  600. var onDurationchange = function onDurationchange() {
  601. if (_this2.duration() === Infinity) {
  602. addSeekableRange();
  603. } else {
  604. _this2.tech_.one('durationchange', onDurationchange);
  605. }
  606. };
  607. _this2.tech_.one('durationchange', onDurationchange);
  608. })();
  609. } else {
  610. addSeekableRange();
  611. }
  612. })();
  613. }
  614. });
  615. this.masterPlaylistLoader_.on('error', function () {
  616. _this2.blacklistCurrentPlaylist(_this2.masterPlaylistLoader_.error);
  617. });
  618. this.masterPlaylistLoader_.on('mediachanging', function () {
  619. _this2.mainSegmentLoader_.abort();
  620. _this2.mainSegmentLoader_.pause();
  621. });
  622. this.masterPlaylistLoader_.on('mediachange', function () {
  623. var media = _this2.masterPlaylistLoader_.media();
  624. var requestTimeout = _this2.masterPlaylistLoader_.targetDuration * 1.5 * 1000;
  625. var activeAudioGroup = undefined;
  626. var activeTrack = undefined;
  627. // If we don't have any more available playlists, we don't want to
  628. // timeout the request.
  629. if (_this2.masterPlaylistLoader_.isLowestEnabledRendition_()) {
  630. _this2.requestOptions_.timeout = 0;
  631. } else {
  632. _this2.requestOptions_.timeout = requestTimeout;
  633. }
  634. // TODO: Create a new event on the PlaylistLoader that signals
  635. // that the segments have changed in some way and use that to
  636. // update the SegmentLoader instead of doing it twice here and
  637. // on `loadedplaylist`
  638. _this2.mainSegmentLoader_.playlist(media, _this2.requestOptions_);
  639. _this2.mainSegmentLoader_.load();
  640. // if the audio group has changed, a new audio track has to be
  641. // enabled
  642. activeAudioGroup = _this2.activeAudioGroup();
  643. activeTrack = activeAudioGroup.filter(function (track) {
  644. return track.enabled;
  645. })[0];
  646. if (!activeTrack) {
  647. _this2.setupAudio();
  648. _this2.trigger('audioupdate');
  649. }
  650. _this2.setupSubtitles();
  651. _this2.tech_.trigger({
  652. type: 'mediachange',
  653. bubbles: true
  654. });
  655. });
  656. this.masterPlaylistLoader_.on('playlistunchanged', function () {
  657. var updatedPlaylist = _this2.masterPlaylistLoader_.media();
  658. var playlistOutdated = _this2.stuckAtPlaylistEnd_(updatedPlaylist);
  659. if (playlistOutdated) {
  660. // Playlist has stopped updating and we're stuck at its end. Try to
  661. // blacklist it and switch to another playlist in the hope that that
  662. // one is updating (and give the player a chance to re-adjust to the
  663. // safe live point).
  664. _this2.blacklistCurrentPlaylist({
  665. message: 'Playlist no longer updating.'
  666. });
  667. // useful for monitoring QoS
  668. _this2.tech_.trigger('playliststuck');
  669. }
  670. });
  671. }
  672. /**
  673. * Register event handlers on the segment loaders. A helper function
  674. * for construction time.
  675. *
  676. * @private
  677. */
  678. }, {
  679. key: 'setupSegmentLoaderListeners_',
  680. value: function setupSegmentLoaderListeners_() {
  681. var _this3 = this;
  682. this.mainSegmentLoader_.on('bandwidthupdate', function () {
  683. // figure out what stream the next segment should be downloaded from
  684. // with the updated bandwidth information
  685. _this3.masterPlaylistLoader_.media(_this3.selectPlaylist());
  686. });
  687. this.mainSegmentLoader_.on('progress', function () {
  688. _this3.trigger('progress');
  689. });
  690. this.mainSegmentLoader_.on('error', function () {
  691. _this3.blacklistCurrentPlaylist(_this3.mainSegmentLoader_.error());
  692. });
  693. this.mainSegmentLoader_.on('syncinfoupdate', function () {
  694. _this3.onSyncInfoUpdate_();
  695. });
  696. this.audioSegmentLoader_.on('syncinfoupdate', function () {
  697. _this3.onSyncInfoUpdate_();
  698. });
  699. this.audioSegmentLoader_.on('error', function () {
  700. _videoJs2['default'].log.warn('Problem encountered with the current alternate audio track' + '. Switching back to default.');
  701. _this3.audioSegmentLoader_.abort();
  702. _this3.audioPlaylistLoader_ = null;
  703. _this3.setupAudio();
  704. });
  705. this.subtitleSegmentLoader_.on('error', this.handleSubtitleError_.bind(this));
  706. }
  707. }, {
  708. key: 'handleAudioinfoUpdate_',
  709. value: function handleAudioinfoUpdate_(event) {
  710. if (Hls.supportsAudioInfoChange_() || !this.audioInfo_ || !objectChanged(this.audioInfo_, event.info)) {
  711. this.audioInfo_ = event.info;
  712. return;
  713. }
  714. var error = 'had different audio properties (channels, sample rate, etc.) ' + 'or changed in some other way. This behavior is currently ' + 'unsupported in Firefox 48 and below due to an issue: \n\n' + 'https://bugzilla.mozilla.org/show_bug.cgi?id=1247138\n\n';
  715. var enabledIndex = this.activeAudioGroup().map(function (track) {
  716. return track.enabled;
  717. }).indexOf(true);
  718. var enabledTrack = this.activeAudioGroup()[enabledIndex];
  719. var defaultTrack = this.activeAudioGroup().filter(function (track) {
  720. return track.properties_ && track.properties_['default'];
  721. })[0];
  722. // they did not switch audiotracks
  723. // blacklist the current playlist
  724. if (!this.audioPlaylistLoader_) {
  725. error = 'The rendition that we tried to switch to ' + error + 'Unfortunately that means we will have to blacklist ' + 'the current playlist and switch to another. Sorry!';
  726. this.blacklistCurrentPlaylist();
  727. } else {
  728. error = 'The audio track \'' + enabledTrack.label + '\' that we tried to ' + ('switch to ' + error + ' Unfortunately this means we will have to ') + ('return you to the main track \'' + defaultTrack.label + '\'. Sorry!');
  729. defaultTrack.enabled = true;
  730. this.activeAudioGroup().splice(enabledIndex, 1);
  731. this.trigger('audioupdate');
  732. }
  733. _videoJs2['default'].log.warn(error);
  734. this.setupAudio();
  735. }
  736. }, {
  737. key: 'mediaSecondsLoaded_',
  738. value: function mediaSecondsLoaded_() {
  739. return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
  740. }
  741. /**
  742. * fill our internal list of HlsAudioTracks with data from
  743. * the master playlist or use a default
  744. *
  745. * @private
  746. */
  747. }, {
  748. key: 'fillAudioTracks_',
  749. value: function fillAudioTracks_() {
  750. var master = this.master();
  751. var mediaGroups = master.mediaGroups || {};
  752. // force a default if we have none or we are not
  753. // in html5 mode (the only mode to support more than one
  754. // audio track)
  755. if (!mediaGroups || !mediaGroups.AUDIO || Object.keys(mediaGroups.AUDIO).length === 0 || this.mode_ !== 'html5') {
  756. // "main" audio group, track name "default"
  757. mediaGroups.AUDIO = { main: { 'default': { 'default': true } } };
  758. }
  759. for (var mediaGroup in mediaGroups.AUDIO) {
  760. if (!this.audioGroups_[mediaGroup]) {
  761. this.audioGroups_[mediaGroup] = [];
  762. }
  763. for (var label in mediaGroups.AUDIO[mediaGroup]) {
  764. var properties = mediaGroups.AUDIO[mediaGroup][label];
  765. var track = new _videoJs2['default'].AudioTrack({
  766. id: label,
  767. kind: this.audioTrackKind_(properties),
  768. enabled: false,
  769. language: properties.language,
  770. label: label
  771. });
  772. track.properties_ = properties;
  773. this.audioGroups_[mediaGroup].push(track);
  774. }
  775. }
  776. // enable the default active track
  777. (this.activeAudioGroup().filter(function (audioTrack) {
  778. return audioTrack.properties_['default'];
  779. })[0] || this.activeAudioGroup()[0]).enabled = true;
  780. }
  781. /**
  782. * Convert the properties of an HLS track into an audioTrackKind.
  783. *
  784. * @private
  785. */
  786. }, {
  787. key: 'audioTrackKind_',
  788. value: function audioTrackKind_(properties) {
  789. var kind = properties['default'] ? 'main' : 'alternative';
  790. if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
  791. kind = 'main-desc';
  792. }
  793. return kind;
  794. }
  795. /**
  796. * fill our internal list of Subtitle Tracks with data from
  797. * the master playlist or use a default
  798. *
  799. * @private
  800. */
  801. }, {
  802. key: 'fillSubtitleTracks_',
  803. value: function fillSubtitleTracks_() {
  804. var master = this.master();
  805. var mediaGroups = master.mediaGroups || {};
  806. for (var mediaGroup in mediaGroups.SUBTITLES) {
  807. if (!this.subtitleGroups_.groups[mediaGroup]) {
  808. this.subtitleGroups_.groups[mediaGroup] = [];
  809. }
  810. for (var label in mediaGroups.SUBTITLES[mediaGroup]) {
  811. var properties = mediaGroups.SUBTITLES[mediaGroup][label];
  812. if (!properties.forced) {
  813. this.subtitleGroups_.groups[mediaGroup].push(_videoJs2['default'].mergeOptions({ id: label }, properties));
  814. if (typeof this.subtitleGroups_.tracks[label] === 'undefined') {
  815. var track = this.tech_.addRemoteTextTrack({
  816. id: label,
  817. kind: 'subtitles',
  818. enabled: false,
  819. language: properties.language,
  820. label: label
  821. }, true).track;
  822. this.subtitleGroups_.tracks[label] = track;
  823. }
  824. }
  825. }
  826. }
  827. // Do not enable a default subtitle track. Wait for user interaction instead.
  828. }
  829. /**
  830. * Call load on our SegmentLoaders
  831. */
  832. }, {
  833. key: 'load',
  834. value: function load() {
  835. this.mainSegmentLoader_.load();
  836. if (this.audioPlaylistLoader_) {
  837. this.audioSegmentLoader_.load();
  838. }
  839. if (this.subtitlePlaylistLoader_) {
  840. this.subtitleSegmentLoader_.load();
  841. }
  842. }
  843. /**
  844. * Returns the audio group for the currently active primary
  845. * media playlist.
  846. */
  847. }, {
  848. key: 'activeAudioGroup',
  849. value: function activeAudioGroup() {
  850. var videoPlaylist = this.masterPlaylistLoader_.media();
  851. var result = undefined;
  852. if (videoPlaylist.attributes && videoPlaylist.attributes.AUDIO) {
  853. result = this.audioGroups_[videoPlaylist.attributes.AUDIO];
  854. }
  855. return result || this.audioGroups_.main;
  856. }
  857. /**
  858. * Returns the subtitle group for the currently active primary
  859. * media playlist.
  860. */
  861. }, {
  862. key: 'activeSubtitleGroup_',
  863. value: function activeSubtitleGroup_() {
  864. var videoPlaylist = this.masterPlaylistLoader_.media();
  865. var result = undefined;
  866. if (!videoPlaylist) {
  867. return null;
  868. }
  869. if (videoPlaylist.attributes && videoPlaylist.attributes.SUBTITLES) {
  870. result = this.subtitleGroups_.groups[videoPlaylist.attributes.SUBTITLES];
  871. }
  872. return result || this.subtitleGroups_.groups.main;
  873. }
  874. }, {
  875. key: 'activeSubtitleTrack_',
  876. value: function activeSubtitleTrack_() {
  877. for (var trackName in this.subtitleGroups_.tracks) {
  878. if (this.subtitleGroups_.tracks[trackName].mode === 'showing') {
  879. return this.subtitleGroups_.tracks[trackName];
  880. }
  881. }
  882. return null;
  883. }
  884. }, {
  885. key: 'handleSubtitleError_',
  886. value: function handleSubtitleError_() {
  887. _videoJs2['default'].log.warn('Problem encountered loading the subtitle track' + '. Switching back to default.');
  888. this.subtitleSegmentLoader_.abort();
  889. var track = this.activeSubtitleTrack_();
  890. if (track) {
  891. track.mode = 'disabled';
  892. }
  893. this.setupSubtitles();
  894. }
  895. /**
  896. * Determine the correct audio rendition based on the active
  897. * AudioTrack and initialize a PlaylistLoader and SegmentLoader if
  898. * necessary. This method is called once automatically before
  899. * playback begins to enable the default audio track and should be
  900. * invoked again if the track is changed.
  901. */
  902. }, {
  903. key: 'setupAudio',
  904. value: function setupAudio() {
  905. var _this4 = this;
  906. // determine whether seperate loaders are required for the audio
  907. // rendition
  908. var audioGroup = this.activeAudioGroup();
  909. var track = audioGroup.filter(function (audioTrack) {
  910. return audioTrack.enabled;
  911. })[0];
  912. if (!track) {
  913. track = audioGroup.filter(function (audioTrack) {
  914. return audioTrack.properties_['default'];
  915. })[0] || audioGroup[0];
  916. track.enabled = true;
  917. }
  918. // stop playlist and segment loading for audio
  919. if (this.audioPlaylistLoader_) {
  920. this.audioPlaylistLoader_.dispose();
  921. this.audioPlaylistLoader_ = null;
  922. }
  923. this.audioSegmentLoader_.pause();
  924. if (!track.properties_.resolvedUri) {
  925. this.mainSegmentLoader_.resetEverything();
  926. return;
  927. }
  928. this.audioSegmentLoader_.resetEverything();
  929. // startup playlist and segment loaders for the enabled audio
  930. // track
  931. this.audioPlaylistLoader_ = new _playlistLoader2['default'](track.properties_.resolvedUri, this.hls_, this.withCredentials);
  932. this.audioPlaylistLoader_.load();
  933. this.audioPlaylistLoader_.on('loadedmetadata', function () {
  934. var audioPlaylist = _this4.audioPlaylistLoader_.media();
  935. _this4.audioSegmentLoader_.playlist(audioPlaylist, _this4.requestOptions_);
  936. // if the video is already playing, or if this isn't a live video and preload
  937. // permits, start downloading segments
  938. if (!_this4.tech_.paused() || audioPlaylist.endList && _this4.tech_.preload() !== 'none') {
  939. _this4.audioSegmentLoader_.load();
  940. }
  941. if (!audioPlaylist.endList) {
  942. _this4.audioPlaylistLoader_.trigger('firstplay');
  943. }
  944. });
  945. this.audioPlaylistLoader_.on('loadedplaylist', function () {
  946. var updatedPlaylist = undefined;
  947. if (_this4.audioPlaylistLoader_) {
  948. updatedPlaylist = _this4.audioPlaylistLoader_.media();
  949. }
  950. if (!updatedPlaylist) {
  951. // only one playlist to select
  952. _this4.audioPlaylistLoader_.media(_this4.audioPlaylistLoader_.playlists.master.playlists[0]);
  953. return;
  954. }
  955. _this4.audioSegmentLoader_.playlist(updatedPlaylist, _this4.requestOptions_);
  956. });
  957. this.audioPlaylistLoader_.on('error', function () {
  958. _videoJs2['default'].log.warn('Problem encountered loading the alternate audio track' + '. Switching back to default.');
  959. _this4.audioSegmentLoader_.abort();
  960. _this4.setupAudio();
  961. });
  962. }
  963. /**
  964. * Determine the correct subtitle playlist based on the active
  965. * SubtitleTrack and initialize a PlaylistLoader and SegmentLoader if
  966. * necessary. This method is called once automatically before
  967. * playback begins to enable the default subtitle track and should be
  968. * invoked again if the track is changed.
  969. */
  970. }, {
  971. key: 'setupSubtitles',
  972. value: function setupSubtitles() {
  973. var _this5 = this;
  974. var subtitleGroup = this.activeSubtitleGroup_();
  975. var track = this.activeSubtitleTrack_();
  976. this.subtitleSegmentLoader_.pause();
  977. if (!track) {
  978. // stop playlist and segment loading for subtitles
  979. if (this.subtitlePlaylistLoader_) {
  980. this.subtitlePlaylistLoader_.dispose();
  981. this.subtitlePlaylistLoader_ = null;
  982. }
  983. return;
  984. }
  985. var properties = subtitleGroup.filter(function (subtitleProperties) {
  986. return subtitleProperties.id === track.id;
  987. })[0];
  988. // startup playlist and segment loaders for the enabled subtitle track
  989. if (!this.subtitlePlaylistLoader_ ||
  990. // if the media hasn't loaded yet, we don't have the URI to check, so it is
  991. // easiest to simply recreate the playlist loader
  992. !this.subtitlePlaylistLoader_.media() || this.subtitlePlaylistLoader_.media().resolvedUri !== properties.resolvedUri) {
  993. if (this.subtitlePlaylistLoader_) {
  994. this.subtitlePlaylistLoader_.dispose();
  995. }
  996. // reset the segment loader only when the subtitle playlist is changed instead of
  997. // every time setupSubtitles is called since switching subtitle tracks fires
  998. // multiple `change` events on the TextTrackList
  999. this.subtitleSegmentLoader_.resetEverything();
  1000. // can't reuse playlistloader because we're only using single renditions and not a
  1001. // proper master
  1002. this.subtitlePlaylistLoader_ = new _playlistLoader2['default'](properties.resolvedUri, this.hls_, this.withCredentials);
  1003. this.subtitlePlaylistLoader_.on('loadedmetadata', function () {
  1004. var subtitlePlaylist = _this5.subtitlePlaylistLoader_.media();
  1005. _this5.subtitleSegmentLoader_.playlist(subtitlePlaylist, _this5.requestOptions_);
  1006. _this5.subtitleSegmentLoader_.track(_this5.activeSubtitleTrack_());
  1007. // if the video is already playing, or if this isn't a live video and preload
  1008. // permits, start downloading segments
  1009. if (!_this5.tech_.paused() || subtitlePlaylist.endList && _this5.tech_.preload() !== 'none') {
  1010. _this5.subtitleSegmentLoader_.load();
  1011. }
  1012. });
  1013. this.subtitlePlaylistLoader_.on('loadedplaylist', function () {
  1014. var updatedPlaylist = undefined;
  1015. if (_this5.subtitlePlaylistLoader_) {
  1016. updatedPlaylist = _this5.subtitlePlaylistLoader_.media();
  1017. }
  1018. if (!updatedPlaylist) {
  1019. return;
  1020. }
  1021. _this5.subtitleSegmentLoader_.playlist(updatedPlaylist, _this5.requestOptions_);
  1022. });
  1023. this.subtitlePlaylistLoader_.on('error', this.handleSubtitleError_.bind(this));
  1024. }
  1025. if (this.subtitlePlaylistLoader_.media() && this.subtitlePlaylistLoader_.media().resolvedUri === properties.resolvedUri) {
  1026. this.subtitleSegmentLoader_.load();
  1027. } else {
  1028. this.subtitlePlaylistLoader_.load();
  1029. }
  1030. }
  1031. /**
  1032. * Re-tune playback quality level for the current player
  1033. * conditions. This method may perform destructive actions, like
  1034. * removing already buffered content, to readjust the currently
  1035. * active playlist quickly.
  1036. *
  1037. * @private
  1038. */
  1039. }, {
  1040. key: 'fastQualityChange_',
  1041. value: function fastQualityChange_() {
  1042. var media = this.selectPlaylist();
  1043. if (media !== this.masterPlaylistLoader_.media()) {
  1044. this.masterPlaylistLoader_.media(media);
  1045. this.mainSegmentLoader_.resetLoader();
  1046. // don't need to reset audio as it is reset when media changes
  1047. }
  1048. }
  1049. /**
  1050. * Begin playback.
  1051. */
  1052. }, {
  1053. key: 'play',
  1054. value: function play() {
  1055. if (this.setupFirstPlay()) {
  1056. return;
  1057. }
  1058. if (this.tech_.ended()) {
  1059. this.tech_.setCurrentTime(0);
  1060. }
  1061. if (this.hasPlayed_()) {
  1062. this.load();
  1063. }
  1064. var seekable = this.tech_.seekable();
  1065. // if the viewer has paused and we fell out of the live window,
  1066. // seek forward to the live point
  1067. if (this.tech_.duration() === Infinity) {
  1068. if (this.tech_.currentTime() < seekable.start(0)) {
  1069. return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
  1070. }
  1071. }
  1072. }
  1073. /**
  1074. * Seek to the latest media position if this is a live video and the
  1075. * player and video are loaded and initialized.
  1076. */
  1077. }, {
  1078. key: 'setupFirstPlay',
  1079. value: function setupFirstPlay() {
  1080. var seekable = undefined;
  1081. var media = this.masterPlaylistLoader_.media();
  1082. // check that everything is ready to begin buffering in the live
  1083. // scenario
  1084. // 1) the active media playlist is available
  1085. if (media &&
  1086. // 2) the player is not paused
  1087. !this.tech_.paused() &&
  1088. // 3) the player has not started playing
  1089. !this.hasPlayed_()) {
  1090. // when the video is a live stream
  1091. if (!media.endList) {
  1092. this.trigger('firstplay');
  1093. // seek to the latest media position for live videos
  1094. seekable = this.seekable();
  1095. if (seekable.length) {
  1096. this.tech_.setCurrentTime(seekable.end(0));
  1097. }
  1098. }
  1099. this.hasPlayed_ = function () {
  1100. return true;
  1101. };
  1102. // now that we are ready, load the segment
  1103. this.load();
  1104. return true;
  1105. }
  1106. return false;
  1107. }
  1108. /**
  1109. * handle the sourceopen event on the MediaSource
  1110. *
  1111. * @private
  1112. */
  1113. }, {
  1114. key: 'handleSourceOpen_',
  1115. value: function handleSourceOpen_() {
  1116. // Only attempt to create the source buffer if none already exist.
  1117. // handleSourceOpen is also called when we are "re-opening" a source buffer
  1118. // after `endOfStream` has been called (in response to a seek for instance)
  1119. try {
  1120. this.setupSourceBuffers_();
  1121. } catch (e) {
  1122. _videoJs2['default'].log.warn('Failed to create Source Buffers', e);
  1123. return this.mediaSource.endOfStream('decode');
  1124. }
  1125. // if autoplay is enabled, begin playback. This is duplicative of
  1126. // code in video.js but is required because play() must be invoked
  1127. // *after* the media source has opened.
  1128. if (this.tech_.autoplay()) {
  1129. this.tech_.play();
  1130. }
  1131. this.trigger('sourceopen');
  1132. }
  1133. /**
  1134. * Check if a playlist has stopped being updated
  1135. * @param {Object} playlist the media playlist object
  1136. * @return {boolean} whether the playlist has stopped being updated or not
  1137. */
  1138. }, {
  1139. key: 'stuckAtPlaylistEnd_',
  1140. value: function stuckAtPlaylistEnd_(playlist) {
  1141. var seekable = this.seekable();
  1142. if (!seekable.length) {
  1143. // playlist doesn't have enough information to determine whether we are stuck
  1144. return false;
  1145. }
  1146. // does not use the safe live end to calculate playlist end, since we
  1147. // don't want to say we are stuck while there is still content
  1148. var absolutePlaylistEnd = Hls.Playlist.playlistEnd(playlist);
  1149. var currentTime = this.tech_.currentTime();
  1150. var buffered = this.tech_.buffered();
  1151. if (!buffered.length) {
  1152. // return true if the playhead reached the absolute end of the playlist
  1153. return absolutePlaylistEnd - currentTime <= _ranges2['default'].TIME_FUDGE_FACTOR;
  1154. }
  1155. var bufferedEnd = buffered.end(buffered.length - 1);
  1156. // return true if there is too little buffer left and
  1157. // buffer has reached absolute end of playlist
  1158. return bufferedEnd - currentTime <= _ranges2['default'].TIME_FUDGE_FACTOR && absolutePlaylistEnd - bufferedEnd <= _ranges2['default'].TIME_FUDGE_FACTOR;
  1159. }
  1160. /**
  1161. * Blacklists a playlist when an error occurs for a set amount of time
  1162. * making it unavailable for selection by the rendition selection algorithm
  1163. * and then forces a new playlist (rendition) selection.
  1164. *
  1165. * @param {Object=} error an optional error that may include the playlist
  1166. * to blacklist
  1167. */
  1168. }, {
  1169. key: 'blacklistCurrentPlaylist',
  1170. value: function blacklistCurrentPlaylist() {
  1171. var error = arguments.length <= 0 || arguments[0] === undefined ? {} : arguments[0];
  1172. var currentPlaylist = undefined;
  1173. var nextPlaylist = undefined;
  1174. // If the `error` was generated by the playlist loader, it will contain
  1175. // the playlist we were trying to load (but failed) and that should be
  1176. // blacklisted instead of the currently selected playlist which is likely
  1177. // out-of-date in this scenario
  1178. currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
  1179. // If there is no current playlist, then an error occurred while we were
  1180. // trying to load the master OR while we were disposing of the tech
  1181. if (!currentPlaylist) {
  1182. this.error = error;
  1183. try {
  1184. return this.mediaSource.endOfStream('network');
  1185. } catch (e) {
  1186. return this.trigger('error');
  1187. }
  1188. }
  1189. var isFinalRendition = this.masterPlaylistLoader_.isFinalRendition_();
  1190. if (isFinalRendition) {
  1191. // Never blacklisting this playlist because it's final rendition
  1192. _videoJs2['default'].log.warn('Problem encountered with the current ' + 'HLS playlist. Trying again since it is the final playlist.');
  1193. this.tech_.trigger('retryplaylist');
  1194. return this.masterPlaylistLoader_.load(isFinalRendition);
  1195. }
  1196. // Blacklist this playlist
  1197. currentPlaylist.excludeUntil = Date.now() + this.blacklistDuration * 1000;
  1198. this.tech_.trigger('blacklistplaylist');
  1199. // Select a new playlist
  1200. nextPlaylist = this.selectPlaylist();
  1201. _videoJs2['default'].log.warn('Problem encountered with the current HLS playlist.' + (error.message ? ' ' + error.message : '') + ' Switching to another playlist.');
  1202. return this.masterPlaylistLoader_.media(nextPlaylist);
  1203. }
  1204. /**
  1205. * Pause all segment loaders
  1206. */
  1207. }, {
  1208. key: 'pauseLoading',
  1209. value: function pauseLoading() {
  1210. this.mainSegmentLoader_.pause();
  1211. if (this.audioPlaylistLoader_) {
  1212. this.audioSegmentLoader_.pause();
  1213. }
  1214. if (this.subtitlePlaylistLoader_) {
  1215. this.subtitleSegmentLoader_.pause();
  1216. }
  1217. }
  1218. /**
  1219. * set the current time on all segment loaders
  1220. *
  1221. * @param {TimeRange} currentTime the current time to set
  1222. * @return {TimeRange} the current time
  1223. */
  1224. }, {
  1225. key: 'setCurrentTime',
  1226. value: function setCurrentTime(currentTime) {
  1227. var buffered = _ranges2['default'].findRange(this.tech_.buffered(), currentTime);
  1228. if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
  1229. // return immediately if the metadata is not ready yet
  1230. return 0;
  1231. }
  1232. // it's clearly an edge-case but don't thrown an error if asked to
  1233. // seek within an empty playlist
  1234. if (!this.masterPlaylistLoader_.media().segments) {
  1235. return 0;
  1236. }
  1237. // In flash playback, the segment loaders should be reset on every seek, even
  1238. // in buffer seeks
  1239. var isFlash = this.mode_ === 'flash' || this.mode_ === 'auto' && !_videoJs2['default'].MediaSource.supportsNativeMediaSources();
  1240. // if the seek location is already buffered, continue buffering as
  1241. // usual
  1242. if (buffered && buffered.length && !isFlash) {
  1243. return currentTime;
  1244. }
  1245. // cancel outstanding requests so we begin buffering at the new
  1246. // location
  1247. this.mainSegmentLoader_.resetEverything();
  1248. this.mainSegmentLoader_.abort();
  1249. if (this.audioPlaylistLoader_) {
  1250. this.audioSegmentLoader_.resetEverything();
  1251. this.audioSegmentLoader_.abort();
  1252. }
  1253. if (this.subtitlePlaylistLoader_) {
  1254. this.subtitleSegmentLoader_.resetEverything();
  1255. this.subtitleSegmentLoader_.abort();
  1256. }
  1257. if (!this.tech_.paused()) {
  1258. this.mainSegmentLoader_.load();
  1259. if (this.audioPlaylistLoader_) {
  1260. this.audioSegmentLoader_.load();
  1261. }
  1262. if (this.subtitlePlaylistLoader_) {
  1263. this.subtitleSegmentLoader_.load();
  1264. }
  1265. }
  1266. }
  1267. /**
  1268. * get the current duration
  1269. *
  1270. * @return {TimeRange} the duration
  1271. */
  1272. }, {
  1273. key: 'duration',
  1274. value: function duration() {
  1275. if (!this.masterPlaylistLoader_) {
  1276. return 0;
  1277. }
  1278. if (this.mediaSource) {
  1279. return this.mediaSource.duration;
  1280. }
  1281. return Hls.Playlist.duration(this.masterPlaylistLoader_.media());
  1282. }
  1283. /**
  1284. * check the seekable range
  1285. *
  1286. * @return {TimeRange} the seekable range
  1287. */
  1288. }, {
  1289. key: 'seekable',
  1290. value: function seekable() {
  1291. return this.seekable_;
  1292. }
  1293. }, {
  1294. key: 'onSyncInfoUpdate_',
  1295. value: function onSyncInfoUpdate_() {
  1296. var media = undefined;
  1297. var mainSeekable = undefined;
  1298. var audioSeekable = undefined;
  1299. if (!this.masterPlaylistLoader_) {
  1300. return;
  1301. }
  1302. media = this.masterPlaylistLoader_.media();
  1303. if (!media) {
  1304. return;
  1305. }
  1306. mainSeekable = Hls.Playlist.seekable(media);
  1307. if (mainSeekable.length === 0) {
  1308. return;
  1309. }
  1310. if (this.audioPlaylistLoader_) {
  1311. audioSeekable = Hls.Playlist.seekable(this.audioPlaylistLoader_.media());
  1312. if (audioSeekable.length === 0) {
  1313. return;
  1314. }
  1315. }
  1316. if (!audioSeekable) {
  1317. // seekable has been calculated based on buffering video data so it
  1318. // can be returned directly
  1319. this.seekable_ = mainSeekable;
  1320. } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
  1321. // seekables are pretty far off, rely on main
  1322. this.seekable_ = mainSeekable;
  1323. } else {
  1324. this.seekable_ = _videoJs2['default'].createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
  1325. }
  1326. this.tech_.trigger('seekablechanged');
  1327. }
  1328. /**
  1329. * Update the player duration
  1330. */
  1331. }, {
  1332. key: 'updateDuration',
  1333. value: function updateDuration() {
  1334. var _this6 = this;
  1335. var oldDuration = this.mediaSource.duration;
  1336. var newDuration = Hls.Playlist.duration(this.masterPlaylistLoader_.media());
  1337. var buffered = this.tech_.buffered();
  1338. var setDuration = function setDuration() {
  1339. _this6.mediaSource.duration = newDuration;
  1340. _this6.tech_.trigger('durationchange');
  1341. _this6.mediaSource.removeEventListener('sourceopen', setDuration);
  1342. };
  1343. if (buffered.length > 0) {
  1344. newDuration = Math.max(newDuration, buffered.end(buffered.length - 1));
  1345. }
  1346. // if the duration has changed, invalidate the cached value
  1347. if (oldDuration !== newDuration) {
  1348. // update the duration
  1349. if (this.mediaSource.readyState !== 'open') {
  1350. this.mediaSource.addEventListener('sourceopen', setDuration);
  1351. } else {
  1352. setDuration();
  1353. }
  1354. }
  1355. }
  1356. /**
  1357. * dispose of the MasterPlaylistController and everything
  1358. * that it controls
  1359. */
  1360. }, {
  1361. key: 'dispose',
  1362. value: function dispose() {
  1363. this.decrypter_.terminate();
  1364. this.masterPlaylistLoader_.dispose();
  1365. this.mainSegmentLoader_.dispose();
  1366. if (this.audioPlaylistLoader_) {
  1367. this.audioPlaylistLoader_.dispose();
  1368. }
  1369. if (this.subtitlePlaylistLoader_) {
  1370. this.subtitlePlaylistLoader_.dispose();
  1371. }
  1372. this.audioSegmentLoader_.dispose();
  1373. this.subtitleSegmentLoader_.dispose();
  1374. }
  1375. /**
  1376. * return the master playlist object if we have one
  1377. *
  1378. * @return {Object} the master playlist object that we parsed
  1379. */
  1380. }, {
  1381. key: 'master',
  1382. value: function master() {
  1383. return this.masterPlaylistLoader_.master;
  1384. }
  1385. /**
  1386. * return the currently selected playlist
  1387. *
  1388. * @return {Object} the currently selected playlist object that we parsed
  1389. */
  1390. }, {
  1391. key: 'media',
  1392. value: function media() {
  1393. // playlist loader will not return media if it has not been fully loaded
  1394. return this.masterPlaylistLoader_.media() || this.initialMedia_;
  1395. }
  1396. /**
  1397. * setup our internal source buffers on our segment Loaders
  1398. *
  1399. * @private
  1400. */
  1401. }, {
  1402. key: 'setupSourceBuffers_',
  1403. value: function setupSourceBuffers_() {
  1404. var media = this.masterPlaylistLoader_.media();
  1405. var mimeTypes = undefined;
  1406. // wait until a media playlist is available and the Media Source is
  1407. // attached
  1408. if (!media || this.mediaSource.readyState !== 'open') {
  1409. return;
  1410. }
  1411. mimeTypes = mimeTypesForPlaylist_(this.masterPlaylistLoader_.master, media);
  1412. if (mimeTypes.length < 1) {
  1413. this.error = 'No compatible SourceBuffer configuration for the variant stream:' + media.resolvedUri;
  1414. return this.mediaSource.endOfStream('decode');
  1415. }
  1416. this.mainSegmentLoader_.mimeType(mimeTypes[0]);
  1417. if (mimeTypes[1]) {
  1418. this.audioSegmentLoader_.mimeType(mimeTypes[1]);
  1419. }
  1420. // exclude any incompatible variant streams from future playlist
  1421. // selection
  1422. this.excludeIncompatibleVariants_(media);
  1423. }
  1424. /**
  1425. * Blacklist playlists that are known to be codec or
  1426. * stream-incompatible with the SourceBuffer configuration. For
  1427. * instance, Media Source Extensions would cause the video element to
  1428. * stall waiting for video data if you switched from a variant with
  1429. * video and audio to an audio-only one.
  1430. *
  1431. * @param {Object} media a media playlist compatible with the current
  1432. * set of SourceBuffers. Variants in the current master playlist that
  1433. * do not appear to have compatible codec or stream configurations
  1434. * will be excluded from the default playlist selection algorithm
  1435. * indefinitely.
  1436. * @private
  1437. */
  1438. }, {
  1439. key: 'excludeIncompatibleVariants_',
  1440. value: function excludeIncompatibleVariants_(media) {
  1441. var master = this.masterPlaylistLoader_.master;
  1442. var codecCount = 2;
  1443. var videoCodec = null;
  1444. var codecs = undefined;
  1445. if (media.attributes && media.attributes.CODECS) {
  1446. codecs = parseCodecs(media.attributes.CODECS);
  1447. videoCodec = codecs.videoCodec;
  1448. codecCount = codecs.codecCount;
  1449. }
  1450. master.playlists.forEach(function (variant) {
  1451. var variantCodecs = {
  1452. codecCount: 2,
  1453. videoCodec: null
  1454. };
  1455. if (variant.attributes && variant.attributes.CODECS) {
  1456. var codecString = variant.attributes.CODECS;
  1457. variantCodecs = parseCodecs(codecString);
  1458. if (window.MediaSource && window.MediaSource.isTypeSupported && !window.MediaSource.isTypeSupported('video/mp4; codecs="' + mapLegacyAvcCodecs_(codecString) + '"')) {
  1459. variant.excludeUntil = Infinity;
  1460. }
  1461. }
  1462. // if the streams differ in the presence or absence of audio or
  1463. // video, they are incompatible
  1464. if (variantCodecs.codecCount !== codecCount) {
  1465. variant.excludeUntil = Infinity;
  1466. }
  1467. // if h.264 is specified on the current playlist, some flavor of
  1468. // it must be specified on all compatible variants
  1469. if (variantCodecs.videoCodec !== videoCodec) {
  1470. variant.excludeUntil = Infinity;
  1471. }
  1472. });
  1473. }
  1474. }, {
  1475. key: 'updateAdCues_',
  1476. value: function updateAdCues_(media) {
  1477. var offset = 0;
  1478. var seekable = this.seekable();
  1479. if (seekable.length) {
  1480. offset = seekable.start(0);
  1481. }
  1482. _adCueTags2['default'].updateAdCues(media, this.cueTagsTrack_, offset);
  1483. }
  1484. }]);
  1485. return MasterPlaylistController;
  1486. })(_videoJs2['default'].EventTarget);
  1487. exports.MasterPlaylistController = MasterPlaylistController;
  1488. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  1489. },{"./ad-cue-tags":1,"./decrypter-worker":4,"./playlist-loader":8,"./ranges":10,"./segment-loader":14,"./sync-controller":17,"./vtt-segment-loader":18,"videojs-contrib-media-sources/es5/codec-utils":64,"webworkify":75}],6:[function(require,module,exports){
  1490. (function (global){
  1491. 'use strict';
  1492. Object.defineProperty(exports, '__esModule', {
  1493. value: true
  1494. });
  1495. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  1496. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  1497. var _videoJs2 = _interopRequireDefault(_videoJs);
  1498. var _binUtils = require('./bin-utils');
  1499. var REQUEST_ERRORS = {
  1500. FAILURE: 2,
  1501. TIMEOUT: -101,
  1502. ABORTED: -102
  1503. };
  1504. exports.REQUEST_ERRORS = REQUEST_ERRORS;
  1505. /**
  1506. * Turns segment byterange into a string suitable for use in
  1507. * HTTP Range requests
  1508. *
  1509. * @param {Object} byterange - an object with two values defining the start and end
  1510. * of a byte-range
  1511. */
  1512. var byterangeStr = function byterangeStr(byterange) {
  1513. var byterangeStart = undefined;
  1514. var byterangeEnd = undefined;
  1515. // `byterangeEnd` is one less than `offset + length` because the HTTP range
  1516. // header uses inclusive ranges
  1517. byterangeEnd = byterange.offset + byterange.length - 1;
  1518. byterangeStart = byterange.offset;
  1519. return 'bytes=' + byterangeStart + '-' + byterangeEnd;
  1520. };
  1521. /**
  1522. * Defines headers for use in the xhr request for a particular segment.
  1523. *
  1524. * @param {Object} segment - a simplified copy of the segmentInfo object from SegmentLoader
  1525. */
  1526. var segmentXhrHeaders = function segmentXhrHeaders(segment) {
  1527. var headers = {};
  1528. if (segment.byterange) {
  1529. headers.Range = byterangeStr(segment.byterange);
  1530. }
  1531. return headers;
  1532. };
  1533. /**
  1534. * Abort all requests
  1535. *
  1536. * @param {Object} activeXhrs - an object that tracks all XHR requests
  1537. */
  1538. var abortAll = function abortAll(activeXhrs) {
  1539. activeXhrs.forEach(function (xhr) {
  1540. xhr.abort();
  1541. });
  1542. };
  1543. /**
  1544. * Gather important bandwidth stats once a request has completed
  1545. *
  1546. * @param {Object} request - the XHR request from which to gather stats
  1547. */
  1548. var getRequestStats = function getRequestStats(request) {
  1549. return {
  1550. bandwidth: request.bandwidth,
  1551. bytesReceived: request.bytesReceived || 0,
  1552. roundTripTime: request.roundTripTime || 0
  1553. };
  1554. };
  1555. /**
  1556. * If possible gather bandwidth stats as a request is in
  1557. * progress
  1558. *
  1559. * @param {Event} progressEvent - an event object from an XHR's progress event
  1560. */
  1561. var getProgressStats = function getProgressStats(progressEvent) {
  1562. var request = progressEvent.target;
  1563. var roundTripTime = Date.now() - request.requestTime;
  1564. var stats = {
  1565. bandwidth: Infinity,
  1566. bytesReceived: 0,
  1567. roundTripTime: roundTripTime || 0
  1568. };
  1569. if (progressEvent.lengthComputable) {
  1570. stats.bytesReceived = progressEvent.loaded;
  1571. // This can result in Infinity if stats.roundTripTime is 0 but that is ok
  1572. // because we should only use bandwidth stats on progress to determine when
  1573. // abort a request early due to insufficient bandwidth
  1574. stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
  1575. }
  1576. return stats;
  1577. };
  1578. /**
  1579. * Handle all error conditions in one place and return an object
  1580. * with all the information
  1581. *
  1582. * @param {Error|null} error - if non-null signals an error occured with the XHR
  1583. * @param {Object} request - the XHR request that possibly generated the error
  1584. */
  1585. var handleErrors = function handleErrors(error, request) {
  1586. if (request.timedout) {
  1587. return {
  1588. status: request.status,
  1589. message: 'HLS request timed-out at URL: ' + request.uri,
  1590. code: REQUEST_ERRORS.TIMEOUT,
  1591. xhr: request
  1592. };
  1593. }
  1594. if (request.aborted) {
  1595. return {
  1596. status: request.status,
  1597. message: 'HLS request aborted at URL: ' + request.uri,
  1598. code: REQUEST_ERRORS.ABORTED,
  1599. xhr: request
  1600. };
  1601. }
  1602. if (error) {
  1603. return {
  1604. status: request.status,
  1605. message: 'HLS request errored at URL: ' + request.uri,
  1606. code: REQUEST_ERRORS.FAILURE,
  1607. xhr: request
  1608. };
  1609. }
  1610. return null;
  1611. };
  1612. /**
  1613. * Handle responses for key data and convert the key data to the correct format
  1614. * for the decryption step later
  1615. *
  1616. * @param {Object} segment - a simplified copy of the segmentInfo object from SegmentLoader
  1617. * @param {Function} finishProcessingFn - a callback to execute to continue processing
  1618. * this request
  1619. */
  1620. var handleKeyResponse = function handleKeyResponse(segment, finishProcessingFn) {
  1621. return function (error, request) {
  1622. var response = request.response;
  1623. var errorObj = handleErrors(error, request);
  1624. if (errorObj) {
  1625. return finishProcessingFn(errorObj, segment);
  1626. }
  1627. if (response.byteLength !== 16) {
  1628. return finishProcessingFn({
  1629. status: request.status,
  1630. message: 'Invalid HLS key at URL: ' + request.uri,
  1631. code: REQUEST_ERRORS.FAILURE,
  1632. xhr: request
  1633. }, segment);
  1634. }
  1635. var view = new DataView(response);
  1636. segment.key.bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
  1637. return finishProcessingFn(null, segment);
  1638. };
  1639. };
  1640. /**
  1641. * Handle init-segment responses
  1642. *
  1643. * @param {Object} segment - a simplified copy of the segmentInfo object from SegmentLoader
  1644. * @param {Function} finishProcessingFn - a callback to execute to continue processing
  1645. * this request
  1646. */
  1647. var handleInitSegmentResponse = function handleInitSegmentResponse(segment, finishProcessingFn) {
  1648. return function (error, request) {
  1649. var errorObj = handleErrors(error, request);
  1650. if (errorObj) {
  1651. return finishProcessingFn(errorObj, segment);
  1652. }
  1653. segment.map.bytes = new Uint8Array(request.response);
  1654. return finishProcessingFn(null, segment);
  1655. };
  1656. };
  1657. /**
  1658. * Response handler for segment-requests being sure to set the correct
  1659. * property depending on whether the segment is encryped or not
  1660. * Also records and keeps track of stats that are used for ABR purposes
  1661. *
  1662. * @param {Object} segment - a simplified copy of the segmentInfo object from SegmentLoader
  1663. * @param {Function} finishProcessingFn - a callback to execute to continue processing
  1664. * this request
  1665. */
  1666. var handleSegmentResponse = function handleSegmentResponse(segment, finishProcessingFn) {
  1667. return function (error, request) {
  1668. var errorObj = handleErrors(error, request);
  1669. if (errorObj) {
  1670. return finishProcessingFn(errorObj, segment);
  1671. }
  1672. segment.stats = getRequestStats(request);
  1673. if (segment.key) {
  1674. segment.encryptedBytes = new Uint8Array(request.response);
  1675. } else {
  1676. segment.bytes = new Uint8Array(request.response);
  1677. }
  1678. return finishProcessingFn(null, segment);
  1679. };
  1680. };
  1681. /**
  1682. * Decrypt the segment via the decryption web worker
  1683. *
  1684. * @param {WebWorker} decrypter - a WebWorker interface to AES-128 decryption routines
  1685. * @param {Object} segment - a simplified copy of the segmentInfo object from SegmentLoader
  1686. * @param {Function} doneFn - a callback that is executed after decryption has completed
  1687. */
  1688. var decryptSegment = function decryptSegment(decrypter, segment, doneFn) {
  1689. var decryptionHandler = function decryptionHandler(event) {
  1690. if (event.data.source === segment.requestId) {
  1691. decrypter.removeEventListener('message', decryptionHandler);
  1692. var decrypted = event.data.decrypted;
  1693. segment.bytes = new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength);
  1694. return doneFn(null, segment);
  1695. }
  1696. };
  1697. decrypter.addEventListener('message', decryptionHandler);
  1698. // this is an encrypted segment
  1699. // incrementally decrypt the segment
  1700. decrypter.postMessage((0, _binUtils.createTransferableMessage)({
  1701. source: segment.requestId,
  1702. encrypted: segment.encryptedBytes,
  1703. key: segment.key.bytes,
  1704. iv: segment.key.iv
  1705. }), [segment.encryptedBytes.buffer, segment.key.bytes.buffer]);
  1706. };
  1707. /**
  1708. * The purpose of this function is to get the most pertinent error from the
  1709. * array of errors.
  1710. * For instance if a timeout and two aborts occur, then the aborts were
  1711. * likely triggered by the timeout so return that error object.
  1712. */
  1713. var getMostImportantError = function getMostImportantError(errors) {
  1714. return errors.reduce(function (prev, err) {
  1715. return err.code > prev.code ? err : prev;
  1716. });
  1717. };
  1718. /**
  1719. * This function waits for all XHRs to finish (with either success or failure)
  1720. * before continueing processing via it's callback. The function gathers errors
  1721. * from each request into a single errors array so that the error status for
  1722. * each request can be examined later.
  1723. *
  1724. * @param {Object} activeXhrs - an object that tracks all XHR requests
  1725. * @param {WebWorker} decrypter - a WebWorker interface to AES-128 decryption routines
  1726. * @param {Function} doneFn - a callback that is executed after all resources have been
  1727. * downloaded and any decryption completed
  1728. */
  1729. var waitForCompletion = function waitForCompletion(activeXhrs, decrypter, doneFn) {
  1730. var errors = [];
  1731. var count = 0;
  1732. return function (error, segment) {
  1733. if (error) {
  1734. // If there are errors, we have to abort any outstanding requests
  1735. abortAll(activeXhrs);
  1736. errors.push(error);
  1737. }
  1738. count += 1;
  1739. if (count === activeXhrs.length) {
  1740. // Keep track of when *all* of the requests have completed
  1741. segment.endOfAllRequests = Date.now();
  1742. if (errors.length > 0) {
  1743. var worstError = getMostImportantError(errors);
  1744. return doneFn(worstError, segment);
  1745. }
  1746. if (segment.encryptedBytes) {
  1747. return decryptSegment(decrypter, segment, doneFn);
  1748. }
  1749. // Otherwise, everything is ready just continue
  1750. return doneFn(null, segment);
  1751. }
  1752. };
  1753. };
  1754. /**
  1755. * Simple progress event callback handler that gathers some stats before
  1756. * executing a provided callback with the `segment` object
  1757. *
  1758. * @param {Object} segment - a simplified copy of the segmentInfo object from SegmentLoader
  1759. * @param {Function} progressFn - a callback that is executed each time a progress event is received
  1760. * @param {Event} event - the progress event object from XMLHttpRequest
  1761. */
  1762. var handleProgress = function handleProgress(segment, progressFn) {
  1763. return function (event) {
  1764. segment.stats = getProgressStats(event);
  1765. return progressFn(event, segment);
  1766. };
  1767. };
  1768. /**
  1769. * Load all resources and does any processing necessary for a media-segment
  1770. *
  1771. * Features:
  1772. * decrypts the media-segment if it has a key uri and an iv
  1773. * aborts *all* requests if *any* one request fails
  1774. *
  1775. * The segment object, at minimum, has the following format:
  1776. * {
  1777. * resolvedUri: String,
  1778. * [byterange]: {
  1779. * offset: Number,
  1780. * length: Number
  1781. * },
  1782. * [key]: {
  1783. * resolvedUri: String
  1784. * [byterange]: {
  1785. * offset: Number,
  1786. * length: Number
  1787. * },
  1788. * iv: {
  1789. * bytes: Uint32Array
  1790. * }
  1791. * },
  1792. * [map]: {
  1793. * resolvedUri: String,
  1794. * [byterange]: {
  1795. * offset: Number,
  1796. * length: Number
  1797. * },
  1798. * [bytes]: Uint8Array
  1799. * }
  1800. * }
  1801. * ...where [name] denotes optional properties
  1802. *
  1803. * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
  1804. * @param {Object} xhrOptions - the base options to provide to all xhr requests
  1805. * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption routines
  1806. * @param {Object} segment - a simplified copy of the segmentInfo object from SegmentLoader
  1807. * @param {Function} progressFn - a callback that receives progress events from the main segment's xhr request
  1808. * @param {Function} doneFn - a callback that is executed only once all requests have succeeded or failed
  1809. * @returns {Function} a function that, when invoked, immediately aborts all outstanding requests
  1810. */
  1811. var mediaSegmentRequest = function mediaSegmentRequest(xhr, xhrOptions, decryptionWorker, segment, progressFn, doneFn) {
  1812. var activeXhrs = [];
  1813. var finishProcessingFn = waitForCompletion(activeXhrs, decryptionWorker, doneFn);
  1814. // optionally, request the decryption key
  1815. if (segment.key) {
  1816. var keyRequestOptions = _videoJs2['default'].mergeOptions(xhrOptions, {
  1817. uri: segment.key.resolvedUri,
  1818. responseType: 'arraybuffer'
  1819. });
  1820. var keyRequestCallback = handleKeyResponse(segment, finishProcessingFn);
  1821. var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
  1822. activeXhrs.push(keyXhr);
  1823. }
  1824. // optionally, request the associated media init segment
  1825. if (segment.map && !segment.map.bytes) {
  1826. var initSegmentOptions = _videoJs2['default'].mergeOptions(xhrOptions, {
  1827. uri: segment.map.resolvedUri,
  1828. responseType: 'arraybuffer',
  1829. headers: segmentXhrHeaders(segment.map)
  1830. });
  1831. var initSegmentRequestCallback = handleInitSegmentResponse(segment, finishProcessingFn);
  1832. var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
  1833. activeXhrs.push(initSegmentXhr);
  1834. }
  1835. var segmentRequestOptions = _videoJs2['default'].mergeOptions(xhrOptions, {
  1836. uri: segment.resolvedUri,
  1837. responseType: 'arraybuffer',
  1838. headers: segmentXhrHeaders(segment)
  1839. });
  1840. var segmentRequestCallback = handleSegmentResponse(segment, finishProcessingFn);
  1841. var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
  1842. segmentXhr.addEventListener('progress', handleProgress(segment, progressFn));
  1843. activeXhrs.push(segmentXhr);
  1844. return function () {
  1845. return abortAll(activeXhrs);
  1846. };
  1847. };
  1848. exports.mediaSegmentRequest = mediaSegmentRequest;
  1849. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  1850. },{"./bin-utils":2}],7:[function(require,module,exports){
  1851. (function (global){
  1852. /**
  1853. * @file playback-watcher.js
  1854. *
  1855. * Playback starts, and now my watch begins. It shall not end until my death. I shall
  1856. * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
  1857. * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
  1858. * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
  1859. * my life and honor to the Playback Watch, for this Player and all the Players to come.
  1860. */
  1861. 'use strict';
  1862. Object.defineProperty(exports, '__esModule', {
  1863. value: true
  1864. });
  1865. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  1866. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  1867. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  1868. var _ranges = require('./ranges');
  1869. var _ranges2 = _interopRequireDefault(_ranges);
  1870. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  1871. var _videoJs2 = _interopRequireDefault(_videoJs);
  1872. // Set of events that reset the playback-watcher time check logic and clear the timeout
  1873. var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
  1874. /**
  1875. * @class PlaybackWatcher
  1876. */
  1877. var PlaybackWatcher = (function () {
  1878. /**
  1879. * Represents an PlaybackWatcher object.
  1880. * @constructor
  1881. * @param {object} options an object that includes the tech and settings
  1882. */
  1883. function PlaybackWatcher(options) {
  1884. var _this = this;
  1885. _classCallCheck(this, PlaybackWatcher);
  1886. this.tech_ = options.tech;
  1887. this.seekable = options.seekable;
  1888. this.consecutiveUpdates = 0;
  1889. this.lastRecordedTime = null;
  1890. this.timer_ = null;
  1891. this.checkCurrentTimeTimeout_ = null;
  1892. if (options.debug) {
  1893. this.logger_ = _videoJs2['default'].log.bind(_videoJs2['default'], 'playback-watcher ->');
  1894. }
  1895. this.logger_('initialize');
  1896. var waitingHandler = function waitingHandler() {
  1897. return _this.waiting_();
  1898. };
  1899. var cancelTimerHandler = function cancelTimerHandler() {
  1900. return _this.cancelTimer_();
  1901. };
  1902. var fixesBadSeeksHandler = function fixesBadSeeksHandler() {
  1903. return _this.fixesBadSeeks_();
  1904. };
  1905. this.tech_.on('seekablechanged', fixesBadSeeksHandler);
  1906. this.tech_.on('waiting', waitingHandler);
  1907. this.tech_.on(timerCancelEvents, cancelTimerHandler);
  1908. this.monitorCurrentTime_();
  1909. // Define the dispose function to clean up our events
  1910. this.dispose = function () {
  1911. _this.logger_('dispose');
  1912. _this.tech_.off('seekablechanged', fixesBadSeeksHandler);
  1913. _this.tech_.off('waiting', waitingHandler);
  1914. _this.tech_.off(timerCancelEvents, cancelTimerHandler);
  1915. if (_this.checkCurrentTimeTimeout_) {
  1916. clearTimeout(_this.checkCurrentTimeTimeout_);
  1917. }
  1918. _this.cancelTimer_();
  1919. };
  1920. }
  1921. /**
  1922. * Periodically check current time to see if playback stopped
  1923. *
  1924. * @private
  1925. */
  1926. _createClass(PlaybackWatcher, [{
  1927. key: 'monitorCurrentTime_',
  1928. value: function monitorCurrentTime_() {
  1929. this.checkCurrentTime_();
  1930. if (this.checkCurrentTimeTimeout_) {
  1931. clearTimeout(this.checkCurrentTimeTimeout_);
  1932. }
  1933. // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
  1934. this.checkCurrentTimeTimeout_ = setTimeout(this.monitorCurrentTime_.bind(this), 250);
  1935. }
  1936. /**
  1937. * The purpose of this function is to emulate the "waiting" event on
  1938. * browsers that do not emit it when they are waiting for more
  1939. * data to continue playback
  1940. *
  1941. * @private
  1942. */
  1943. }, {
  1944. key: 'checkCurrentTime_',
  1945. value: function checkCurrentTime_() {
  1946. if (this.tech_.seeking() && this.fixesBadSeeks_()) {
  1947. this.consecutiveUpdates = 0;
  1948. this.lastRecordedTime = this.tech_.currentTime();
  1949. return;
  1950. }
  1951. if (this.tech_.paused() || this.tech_.seeking()) {
  1952. return;
  1953. }
  1954. var currentTime = this.tech_.currentTime();
  1955. if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
  1956. this.consecutiveUpdates++;
  1957. this.waiting_();
  1958. } else if (currentTime === this.lastRecordedTime) {
  1959. this.consecutiveUpdates++;
  1960. } else {
  1961. this.consecutiveUpdates = 0;
  1962. this.lastRecordedTime = currentTime;
  1963. }
  1964. }
  1965. /**
  1966. * Cancels any pending timers and resets the 'timeupdate' mechanism
  1967. * designed to detect that we are stalled
  1968. *
  1969. * @private
  1970. */
  1971. }, {
  1972. key: 'cancelTimer_',
  1973. value: function cancelTimer_() {
  1974. this.consecutiveUpdates = 0;
  1975. if (this.timer_) {
  1976. this.logger_('cancelTimer_');
  1977. clearTimeout(this.timer_);
  1978. }
  1979. this.timer_ = null;
  1980. }
  1981. /**
  1982. * Fixes situations where there's a bad seek
  1983. *
  1984. * @return {Boolean} whether an action was taken to fix the seek
  1985. * @private
  1986. */
  1987. }, {
  1988. key: 'fixesBadSeeks_',
  1989. value: function fixesBadSeeks_() {
  1990. var seekable = this.seekable();
  1991. var currentTime = this.tech_.currentTime();
  1992. if (this.tech_.seeking() && this.outsideOfSeekableWindow_(seekable, currentTime)) {
  1993. var seekableEnd = seekable.end(seekable.length - 1);
  1994. // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
  1995. this.logger_('Trying to seek outside of seekable at time ' + currentTime + ' with ' + ('seekable range ' + _ranges2['default'].printableRange(seekable) + '. Seeking to ') + (seekableEnd + '.'));
  1996. this.tech_.setCurrentTime(seekableEnd);
  1997. return true;
  1998. }
  1999. return false;
  2000. }
  2001. /**
  2002. * Handler for situations when we determine the player is waiting
  2003. *
  2004. * @private
  2005. */
  2006. }, {
  2007. key: 'waiting_',
  2008. value: function waiting_() {
  2009. var seekable = this.seekable();
  2010. var currentTime = this.tech_.currentTime();
  2011. if (this.tech_.seeking() && this.fixesBadSeeks_()) {
  2012. return;
  2013. }
  2014. if (this.tech_.seeking() || this.timer_ !== null) {
  2015. return;
  2016. }
  2017. if (this.fellOutOfLiveWindow_(seekable, currentTime)) {
  2018. var livePoint = seekable.end(seekable.length - 1);
  2019. this.logger_('Fell out of live window at time ' + currentTime + '. Seeking to ' + ('live point (seekable end) ' + livePoint));
  2020. this.cancelTimer_();
  2021. this.tech_.setCurrentTime(livePoint);
  2022. // live window resyncs may be useful for monitoring QoS
  2023. this.tech_.trigger('liveresync');
  2024. return;
  2025. }
  2026. var buffered = this.tech_.buffered();
  2027. var nextRange = _ranges2['default'].findNextRange(buffered, currentTime);
  2028. if (this.videoUnderflow_(nextRange, buffered, currentTime)) {
  2029. // Even though the video underflowed and was stuck in a gap, the audio overplayed
  2030. // the gap, leading currentTime into a buffered range. Seeking to currentTime
  2031. // allows the video to catch up to the audio position without losing any audio
  2032. // (only suffering ~3 seconds of frozen video and a pause in audio playback).
  2033. this.cancelTimer_();
  2034. this.tech_.setCurrentTime(currentTime);
  2035. // video underflow may be useful for monitoring QoS
  2036. this.tech_.trigger('videounderflow');
  2037. return;
  2038. }
  2039. // check for gap
  2040. if (nextRange.length > 0) {
  2041. var difference = nextRange.start(0) - currentTime;
  2042. this.logger_('Stopped at ' + currentTime + ', setting timer for ' + difference + ', seeking ' + ('to ' + nextRange.start(0)));
  2043. this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
  2044. }
  2045. }
  2046. }, {
  2047. key: 'outsideOfSeekableWindow_',
  2048. value: function outsideOfSeekableWindow_(seekable, currentTime) {
  2049. if (!seekable.length) {
  2050. // we can't make a solid case if there's no seekable, default to false
  2051. return false;
  2052. }
  2053. // provide a buffer of .1 seconds to handle rounding/imprecise numbers
  2054. if (currentTime < seekable.start(0) - 0.1 || currentTime > seekable.end(seekable.length - 1) + 0.1) {
  2055. return true;
  2056. }
  2057. return false;
  2058. }
  2059. }, {
  2060. key: 'fellOutOfLiveWindow_',
  2061. value: function fellOutOfLiveWindow_(seekable, currentTime) {
  2062. if (seekable.length &&
  2063. // can't fall before 0 and 0 seekable start identifies VOD stream
  2064. seekable.start(0) > 0 && currentTime < seekable.start(0)) {
  2065. return true;
  2066. }
  2067. return false;
  2068. }
  2069. }, {
  2070. key: 'videoUnderflow_',
  2071. value: function videoUnderflow_(nextRange, buffered, currentTime) {
  2072. if (nextRange.length === 0) {
  2073. // Even if there is no available next range, there is still a possibility we are
  2074. // stuck in a gap due to video underflow.
  2075. var gap = this.gapFromVideoUnderflow_(buffered, currentTime);
  2076. if (gap) {
  2077. this.logger_('Encountered a gap in video from ' + gap.start + ' to ' + gap.end + '. ' + ('Seeking to current time ' + currentTime));
  2078. return true;
  2079. }
  2080. }
  2081. return false;
  2082. }
  2083. /**
  2084. * Timer callback. If playback still has not proceeded, then we seek
  2085. * to the start of the next buffered region.
  2086. *
  2087. * @private
  2088. */
  2089. }, {
  2090. key: 'skipTheGap_',
  2091. value: function skipTheGap_(scheduledCurrentTime) {
  2092. var buffered = this.tech_.buffered();
  2093. var currentTime = this.tech_.currentTime();
  2094. var nextRange = _ranges2['default'].findNextRange(buffered, currentTime);
  2095. this.cancelTimer_();
  2096. if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
  2097. return;
  2098. }
  2099. this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0));
  2100. // only seek if we still have not played
  2101. this.tech_.setCurrentTime(nextRange.start(0) + _ranges2['default'].TIME_FUDGE_FACTOR);
  2102. }
  2103. }, {
  2104. key: 'gapFromVideoUnderflow_',
  2105. value: function gapFromVideoUnderflow_(buffered, currentTime) {
  2106. // At least in Chrome, if there is a gap in the video buffer, the audio will continue
  2107. // playing for ~3 seconds after the video gap starts. This is done to account for
  2108. // video buffer underflow/underrun (note that this is not done when there is audio
  2109. // buffer underflow/underrun -- in that case the video will stop as soon as it
  2110. // encounters the gap, as audio stalls are more noticeable/jarring to a user than
  2111. // video stalls). The player's time will reflect the playthrough of audio, so the
  2112. // time will appear as if we are in a buffered region, even if we are stuck in a
  2113. // "gap."
  2114. //
  2115. // Example:
  2116. // video buffer: 0 => 10.1, 10.2 => 20
  2117. // audio buffer: 0 => 20
  2118. // overall buffer: 0 => 10.1, 10.2 => 20
  2119. // current time: 13
  2120. //
  2121. // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
  2122. // however, the audio continued playing until it reached ~3 seconds past the gap
  2123. // (13 seconds), at which point it stops as well. Since current time is past the
  2124. // gap, findNextRange will return no ranges.
  2125. //
  2126. // To check for this issue, we see if there is a gap that starts somewhere within
  2127. // a 3 second range (3 seconds +/- 1 second) back from our current time.
  2128. var gaps = _ranges2['default'].findGaps(buffered);
  2129. for (var i = 0; i < gaps.length; i++) {
  2130. var start = gaps.start(i);
  2131. var end = gaps.end(i);
  2132. // gap is starts no more than 4 seconds back
  2133. if (currentTime - start < 4 && currentTime - start > 2) {
  2134. return {
  2135. start: start,
  2136. end: end
  2137. };
  2138. }
  2139. }
  2140. return null;
  2141. }
  2142. /**
  2143. * A debugging logger noop that is set to console.log only if debugging
  2144. * is enabled globally
  2145. *
  2146. * @private
  2147. */
  2148. }, {
  2149. key: 'logger_',
  2150. value: function logger_() {}
  2151. }]);
  2152. return PlaybackWatcher;
  2153. })();
  2154. exports['default'] = PlaybackWatcher;
  2155. module.exports = exports['default'];
  2156. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  2157. },{"./ranges":10}],8:[function(require,module,exports){
  2158. (function (global){
  2159. /**
  2160. * @file playlist-loader.js
  2161. *
  2162. * A state machine that manages the loading, caching, and updating of
  2163. * M3U8 playlists.
  2164. *
  2165. */
  2166. 'use strict';
  2167. Object.defineProperty(exports, '__esModule', {
  2168. value: true
  2169. });
  2170. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  2171. var _resolveUrl = require('./resolve-url');
  2172. var _resolveUrl2 = _interopRequireDefault(_resolveUrl);
  2173. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  2174. var _playlistJs = require('./playlist.js');
  2175. var _stream = require('./stream');
  2176. var _stream2 = _interopRequireDefault(_stream);
  2177. var _m3u8Parser = require('m3u8-parser');
  2178. var _m3u8Parser2 = _interopRequireDefault(_m3u8Parser);
  2179. var _globalWindow = require('global/window');
  2180. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  2181. /**
  2182. * Returns a new array of segments that is the result of merging
  2183. * properties from an older list of segments onto an updated
  2184. * list. No properties on the updated playlist will be overridden.
  2185. *
  2186. * @param {Array} original the outdated list of segments
  2187. * @param {Array} update the updated list of segments
  2188. * @param {Number=} offset the index of the first update
  2189. * segment in the original segment list. For non-live playlists,
  2190. * this should always be zero and does not need to be
  2191. * specified. For live playlists, it should be the difference
  2192. * between the media sequence numbers in the original and updated
  2193. * playlists.
  2194. * @return a list of merged segment objects
  2195. */
  2196. var updateSegments = function updateSegments(original, update, offset) {
  2197. var result = update.slice();
  2198. var length = undefined;
  2199. var i = undefined;
  2200. offset = offset || 0;
  2201. length = Math.min(original.length, update.length + offset);
  2202. for (i = offset; i < length; i++) {
  2203. result[i - offset] = (0, _videoJs.mergeOptions)(original[i], result[i - offset]);
  2204. }
  2205. return result;
  2206. };
  2207. /**
  2208. * Returns a new master playlist that is the result of merging an
  2209. * updated media playlist into the original version. If the
  2210. * updated media playlist does not match any of the playlist
  2211. * entries in the original master playlist, null is returned.
  2212. *
  2213. * @param {Object} master a parsed master M3U8 object
  2214. * @param {Object} media a parsed media M3U8 object
  2215. * @return {Object} a new object that represents the original
  2216. * master playlist with the updated media playlist merged in, or
  2217. * null if the merge produced no change.
  2218. */
  2219. var updateMaster = function updateMaster(master, media) {
  2220. var changed = false;
  2221. var result = (0, _videoJs.mergeOptions)(master, {});
  2222. var i = master.playlists.length;
  2223. var playlist = undefined;
  2224. var segment = undefined;
  2225. var j = undefined;
  2226. while (i--) {
  2227. playlist = result.playlists[i];
  2228. if (playlist.uri === media.uri) {
  2229. // consider the playlist unchanged if the number of segments
  2230. // are equal and the media sequence number is unchanged
  2231. if (playlist.segments && media.segments && playlist.segments.length === media.segments.length && playlist.mediaSequence === media.mediaSequence) {
  2232. continue;
  2233. }
  2234. result.playlists[i] = (0, _videoJs.mergeOptions)(playlist, media);
  2235. result.playlists[media.uri] = result.playlists[i];
  2236. // if the update could overlap existing segment information,
  2237. // merge the two lists
  2238. if (playlist.segments) {
  2239. result.playlists[i].segments = updateSegments(playlist.segments, media.segments, media.mediaSequence - playlist.mediaSequence);
  2240. }
  2241. // resolve any missing segment and key URIs
  2242. j = 0;
  2243. if (result.playlists[i].segments) {
  2244. j = result.playlists[i].segments.length;
  2245. }
  2246. while (j--) {
  2247. segment = result.playlists[i].segments[j];
  2248. if (!segment.resolvedUri) {
  2249. segment.resolvedUri = (0, _resolveUrl2['default'])(playlist.resolvedUri, segment.uri);
  2250. }
  2251. if (segment.key && !segment.key.resolvedUri) {
  2252. segment.key.resolvedUri = (0, _resolveUrl2['default'])(playlist.resolvedUri, segment.key.uri);
  2253. }
  2254. if (segment.map && !segment.map.resolvedUri) {
  2255. segment.map.resolvedUri = (0, _resolveUrl2['default'])(playlist.resolvedUri, segment.map.uri);
  2256. }
  2257. }
  2258. changed = true;
  2259. }
  2260. }
  2261. return changed ? result : null;
  2262. };
  2263. /**
  2264. * Load a playlist from a remote location
  2265. *
  2266. * @class PlaylistLoader
  2267. * @extends Stream
  2268. * @param {String} srcUrl the url to start with
  2269. * @param {Boolean} withCredentials the withCredentials xhr option
  2270. * @constructor
  2271. */
  2272. var PlaylistLoader = function PlaylistLoader(srcUrl, hls, withCredentials) {
  2273. var _this = this;
  2274. /* eslint-disable consistent-this */
  2275. var loader = this;
  2276. /* eslint-enable consistent-this */
  2277. var dispose = undefined;
  2278. var mediaUpdateTimeout = undefined;
  2279. var request = undefined;
  2280. var playlistRequestError = undefined;
  2281. var haveMetadata = undefined;
  2282. PlaylistLoader.prototype.constructor.call(this);
  2283. this.hls_ = hls;
  2284. if (!srcUrl) {
  2285. throw new Error('A non-empty playlist URL is required');
  2286. }
  2287. playlistRequestError = function (xhr, url, startingState) {
  2288. loader.setBandwidth(request || xhr);
  2289. // any in-flight request is now finished
  2290. request = null;
  2291. if (startingState) {
  2292. loader.state = startingState;
  2293. }
  2294. loader.error = {
  2295. playlist: loader.master.playlists[url],
  2296. status: xhr.status,
  2297. message: 'HLS playlist request error at URL: ' + url,
  2298. responseText: xhr.responseText,
  2299. code: xhr.status >= 500 ? 4 : 2
  2300. };
  2301. loader.trigger('error');
  2302. };
  2303. // update the playlist loader's state in response to a new or
  2304. // updated playlist.
  2305. haveMetadata = function (xhr, url) {
  2306. var parser = undefined;
  2307. var refreshDelay = undefined;
  2308. var update = undefined;
  2309. loader.setBandwidth(request || xhr);
  2310. // any in-flight request is now finished
  2311. request = null;
  2312. loader.state = 'HAVE_METADATA';
  2313. parser = new _m3u8Parser2['default'].Parser();
  2314. parser.push(xhr.responseText);
  2315. parser.end();
  2316. parser.manifest.uri = url;
  2317. // merge this playlist into the master
  2318. update = updateMaster(loader.master, parser.manifest);
  2319. refreshDelay = (parser.manifest.targetDuration || 10) * 1000;
  2320. loader.targetDuration = parser.manifest.targetDuration;
  2321. if (update) {
  2322. loader.master = update;
  2323. loader.media_ = loader.master.playlists[parser.manifest.uri];
  2324. } else {
  2325. // if the playlist is unchanged since the last reload,
  2326. // try again after half the target duration
  2327. refreshDelay /= 2;
  2328. loader.trigger('playlistunchanged');
  2329. }
  2330. // refresh live playlists after a target duration passes
  2331. if (!loader.media().endList) {
  2332. _globalWindow2['default'].clearTimeout(mediaUpdateTimeout);
  2333. mediaUpdateTimeout = _globalWindow2['default'].setTimeout(function () {
  2334. loader.trigger('mediaupdatetimeout');
  2335. }, refreshDelay);
  2336. }
  2337. loader.trigger('loadedplaylist');
  2338. };
  2339. // initialize the loader state
  2340. loader.state = 'HAVE_NOTHING';
  2341. // capture the prototype dispose function
  2342. dispose = this.dispose;
  2343. /**
  2344. * Abort any outstanding work and clean up.
  2345. */
  2346. loader.dispose = function () {
  2347. loader.stopRequest();
  2348. _globalWindow2['default'].clearTimeout(mediaUpdateTimeout);
  2349. dispose.call(this);
  2350. };
  2351. loader.stopRequest = function () {
  2352. if (request) {
  2353. var oldRequest = request;
  2354. request = null;
  2355. oldRequest.onreadystatechange = null;
  2356. oldRequest.abort();
  2357. }
  2358. };
  2359. /**
  2360. * Returns the number of enabled playlists on the master playlist object
  2361. *
  2362. * @return {Number} number of eneabled playlists
  2363. */
  2364. loader.enabledPlaylists_ = function () {
  2365. return loader.master.playlists.filter(_playlistJs.isEnabled).length;
  2366. };
  2367. /**
  2368. * Returns whether the current playlist is the lowest rendition
  2369. *
  2370. * @return {Boolean} true if on lowest rendition
  2371. */
  2372. loader.isLowestEnabledRendition_ = function () {
  2373. if (loader.master.playlists.length === 1) {
  2374. return true;
  2375. }
  2376. var media = loader.media();
  2377. var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
  2378. return loader.master.playlists.filter(function (playlist) {
  2379. var enabled = (0, _playlistJs.isEnabled)(playlist);
  2380. if (!enabled) {
  2381. return false;
  2382. }
  2383. var bandwidth = 0;
  2384. if (playlist && playlist.attributes) {
  2385. bandwidth = playlist.attributes.BANDWIDTH;
  2386. }
  2387. return bandwidth < currentBandwidth;
  2388. }).length === 0;
  2389. };
  2390. /**
  2391. * Returns whether the current playlist is the final available rendition
  2392. *
  2393. * @return {Boolean} true if on final rendition
  2394. */
  2395. loader.isFinalRendition_ = function () {
  2396. return loader.master.playlists.filter(_playlistJs.isEnabled).length === 1;
  2397. };
  2398. /**
  2399. * When called without any arguments, returns the currently
  2400. * active media playlist. When called with a single argument,
  2401. * triggers the playlist loader to asynchronously switch to the
  2402. * specified media playlist. Calling this method while the
  2403. * loader is in the HAVE_NOTHING causes an error to be emitted
  2404. * but otherwise has no effect.
  2405. *
  2406. * @param {Object=} playlist the parsed media playlist
  2407. * object to switch to
  2408. * @return {Playlist} the current loaded media
  2409. */
  2410. loader.media = function (playlist) {
  2411. var startingState = loader.state;
  2412. var mediaChange = undefined;
  2413. // getter
  2414. if (!playlist) {
  2415. return loader.media_;
  2416. }
  2417. // setter
  2418. if (loader.state === 'HAVE_NOTHING') {
  2419. throw new Error('Cannot switch media playlist from ' + loader.state);
  2420. }
  2421. // find the playlist object if the target playlist has been
  2422. // specified by URI
  2423. if (typeof playlist === 'string') {
  2424. if (!loader.master.playlists[playlist]) {
  2425. throw new Error('Unknown playlist URI: ' + playlist);
  2426. }
  2427. playlist = loader.master.playlists[playlist];
  2428. }
  2429. mediaChange = !loader.media_ || playlist.uri !== loader.media_.uri;
  2430. // switch to fully loaded playlists immediately
  2431. if (loader.master.playlists[playlist.uri].endList) {
  2432. // abort outstanding playlist requests
  2433. if (request) {
  2434. request.onreadystatechange = null;
  2435. request.abort();
  2436. request = null;
  2437. }
  2438. loader.state = 'HAVE_METADATA';
  2439. loader.media_ = playlist;
  2440. // trigger media change if the active media has been updated
  2441. if (mediaChange) {
  2442. loader.trigger('mediachanging');
  2443. loader.trigger('mediachange');
  2444. }
  2445. return;
  2446. }
  2447. // switching to the active playlist is a no-op
  2448. if (!mediaChange) {
  2449. return;
  2450. }
  2451. loader.state = 'SWITCHING_MEDIA';
  2452. // there is already an outstanding playlist request
  2453. if (request) {
  2454. if ((0, _resolveUrl2['default'])(loader.master.uri, playlist.uri) === request.url) {
  2455. // requesting to switch to the same playlist multiple times
  2456. // has no effect after the first
  2457. return;
  2458. }
  2459. request.onreadystatechange = null;
  2460. request.abort();
  2461. request = null;
  2462. }
  2463. // request the new playlist
  2464. if (this.media_) {
  2465. this.trigger('mediachanging');
  2466. }
  2467. request = this.hls_.xhr({
  2468. uri: (0, _resolveUrl2['default'])(loader.master.uri, playlist.uri),
  2469. withCredentials: withCredentials
  2470. }, function (error, req) {
  2471. // disposed
  2472. if (!request) {
  2473. return;
  2474. }
  2475. if (error) {
  2476. return playlistRequestError(request, playlist.uri, startingState);
  2477. }
  2478. haveMetadata(req, playlist.uri);
  2479. // fire loadedmetadata the first time a media playlist is loaded
  2480. if (startingState === 'HAVE_MASTER') {
  2481. loader.trigger('loadedmetadata');
  2482. } else {
  2483. loader.trigger('mediachange');
  2484. }
  2485. });
  2486. };
  2487. /**
  2488. * set the bandwidth on an xhr to the bandwidth on the playlist
  2489. */
  2490. loader.setBandwidth = function (xhr) {
  2491. loader.bandwidth = xhr.bandwidth;
  2492. };
  2493. // live playlist staleness timeout
  2494. loader.on('mediaupdatetimeout', function () {
  2495. if (loader.state !== 'HAVE_METADATA') {
  2496. // only refresh the media playlist if no other activity is going on
  2497. return;
  2498. }
  2499. loader.state = 'HAVE_CURRENT_METADATA';
  2500. request = this.hls_.xhr({
  2501. uri: (0, _resolveUrl2['default'])(loader.master.uri, loader.media().uri),
  2502. withCredentials: withCredentials
  2503. }, function (error, req) {
  2504. // disposed
  2505. if (!request) {
  2506. return;
  2507. }
  2508. if (error) {
  2509. return playlistRequestError(request, loader.media().uri, 'HAVE_METADATA');
  2510. }
  2511. haveMetadata(request, loader.media().uri);
  2512. });
  2513. });
  2514. // setup initial sync info
  2515. loader.on('firstplay', function () {
  2516. var playlist = loader.media();
  2517. if (playlist) {
  2518. playlist.syncInfo = {
  2519. mediaSequence: playlist.mediaSequence,
  2520. time: 0
  2521. };
  2522. }
  2523. });
  2524. /**
  2525. * pause loading of the playlist
  2526. */
  2527. loader.pause = function () {
  2528. loader.stopRequest();
  2529. _globalWindow2['default'].clearTimeout(mediaUpdateTimeout);
  2530. if (loader.state === 'HAVE_NOTHING') {
  2531. // If we pause the loader before any data has been retrieved, its as if we never
  2532. // started, so reset to an unstarted state.
  2533. loader.started = false;
  2534. }
  2535. };
  2536. /**
  2537. * start loading of the playlist
  2538. */
  2539. loader.load = function (isFinalRendition) {
  2540. var media = loader.media();
  2541. _globalWindow2['default'].clearTimeout(mediaUpdateTimeout);
  2542. if (isFinalRendition) {
  2543. var refreshDelay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
  2544. mediaUpdateTimeout = _globalWindow2['default'].setTimeout(loader.load.bind(null, false), refreshDelay);
  2545. return;
  2546. }
  2547. if (!loader.started) {
  2548. loader.start();
  2549. return;
  2550. }
  2551. if (media && !media.endList) {
  2552. loader.trigger('mediaupdatetimeout');
  2553. } else {
  2554. loader.trigger('loadedplaylist');
  2555. }
  2556. };
  2557. /**
  2558. * start loading of the playlist
  2559. */
  2560. loader.start = function () {
  2561. loader.started = true;
  2562. // request the specified URL
  2563. request = _this.hls_.xhr({
  2564. uri: srcUrl,
  2565. withCredentials: withCredentials
  2566. }, function (error, req) {
  2567. var parser = undefined;
  2568. var playlist = undefined;
  2569. var i = undefined;
  2570. // disposed
  2571. if (!request) {
  2572. return;
  2573. }
  2574. // clear the loader's request reference
  2575. request = null;
  2576. if (error) {
  2577. loader.error = {
  2578. status: req.status,
  2579. message: 'HLS playlist request error at URL: ' + srcUrl,
  2580. responseText: req.responseText,
  2581. // MEDIA_ERR_NETWORK
  2582. code: 2
  2583. };
  2584. if (loader.state === 'HAVE_NOTHING') {
  2585. loader.started = false;
  2586. }
  2587. return loader.trigger('error');
  2588. }
  2589. parser = new _m3u8Parser2['default'].Parser();
  2590. parser.push(req.responseText);
  2591. parser.end();
  2592. loader.state = 'HAVE_MASTER';
  2593. parser.manifest.uri = srcUrl;
  2594. // loaded a master playlist
  2595. if (parser.manifest.playlists) {
  2596. loader.master = parser.manifest;
  2597. // setup by-URI lookups and resolve media playlist URIs
  2598. i = loader.master.playlists.length;
  2599. while (i--) {
  2600. playlist = loader.master.playlists[i];
  2601. loader.master.playlists[playlist.uri] = playlist;
  2602. playlist.resolvedUri = (0, _resolveUrl2['default'])(loader.master.uri, playlist.uri);
  2603. }
  2604. // resolve any media group URIs
  2605. ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
  2606. for (var groupKey in loader.master.mediaGroups[mediaType]) {
  2607. for (var labelKey in loader.master.mediaGroups[mediaType][groupKey]) {
  2608. var mediaProperties = loader.master.mediaGroups[mediaType][groupKey][labelKey];
  2609. if (mediaProperties.uri) {
  2610. mediaProperties.resolvedUri = (0, _resolveUrl2['default'])(loader.master.uri, mediaProperties.uri);
  2611. }
  2612. }
  2613. }
  2614. });
  2615. loader.trigger('loadedplaylist');
  2616. if (!request) {
  2617. // no media playlist was specifically selected so start
  2618. // from the first listed one
  2619. loader.media(parser.manifest.playlists[0]);
  2620. }
  2621. return;
  2622. }
  2623. // loaded a media playlist
  2624. // infer a master playlist if none was previously requested
  2625. loader.master = {
  2626. mediaGroups: {
  2627. 'AUDIO': {},
  2628. 'VIDEO': {},
  2629. 'CLOSED-CAPTIONS': {},
  2630. 'SUBTITLES': {}
  2631. },
  2632. uri: _globalWindow2['default'].location.href,
  2633. playlists: [{
  2634. uri: srcUrl
  2635. }]
  2636. };
  2637. loader.master.playlists[srcUrl] = loader.master.playlists[0];
  2638. loader.master.playlists[0].resolvedUri = srcUrl;
  2639. haveMetadata(req, srcUrl);
  2640. return loader.trigger('loadedmetadata');
  2641. });
  2642. };
  2643. };
  2644. PlaylistLoader.prototype = new _stream2['default']();
  2645. exports['default'] = PlaylistLoader;
  2646. module.exports = exports['default'];
  2647. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  2648. },{"./playlist.js":9,"./resolve-url":13,"./stream":16,"global/window":30,"m3u8-parser":31}],9:[function(require,module,exports){
  2649. (function (global){
  2650. /**
  2651. * @file playlist.js
  2652. *
  2653. * Playlist related utilities.
  2654. */
  2655. 'use strict';
  2656. Object.defineProperty(exports, '__esModule', {
  2657. value: true
  2658. });
  2659. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  2660. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  2661. var _globalWindow = require('global/window');
  2662. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  2663. var Playlist = {
  2664. /**
  2665. * The number of segments that are unsafe to start playback at in
  2666. * a live stream. Changing this value can cause playback stalls.
  2667. * See HTTP Live Streaming, "Playing the Media Playlist File"
  2668. * https://tools.ietf.org/html/draft-pantos-http-live-streaming-18#section-6.3.3
  2669. */
  2670. UNSAFE_LIVE_SEGMENTS: 3
  2671. };
  2672. /**
  2673. * walk backward until we find a duration we can use
  2674. * or return a failure
  2675. *
  2676. * @param {Playlist} playlist the playlist to walk through
  2677. * @param {Number} endSequence the mediaSequence to stop walking on
  2678. */
  2679. var backwardDuration = function backwardDuration(playlist, endSequence) {
  2680. var result = 0;
  2681. var i = endSequence - playlist.mediaSequence;
  2682. // if a start time is available for segment immediately following
  2683. // the interval, use it
  2684. var segment = playlist.segments[i];
  2685. // Walk backward until we find the latest segment with timeline
  2686. // information that is earlier than endSequence
  2687. if (segment) {
  2688. if (typeof segment.start !== 'undefined') {
  2689. return { result: segment.start, precise: true };
  2690. }
  2691. if (typeof segment.end !== 'undefined') {
  2692. return {
  2693. result: segment.end - segment.duration,
  2694. precise: true
  2695. };
  2696. }
  2697. }
  2698. while (i--) {
  2699. segment = playlist.segments[i];
  2700. if (typeof segment.end !== 'undefined') {
  2701. return { result: result + segment.end, precise: true };
  2702. }
  2703. result += segment.duration;
  2704. if (typeof segment.start !== 'undefined') {
  2705. return { result: result + segment.start, precise: true };
  2706. }
  2707. }
  2708. return { result: result, precise: false };
  2709. };
  2710. /**
  2711. * walk forward until we find a duration we can use
  2712. * or return a failure
  2713. *
  2714. * @param {Playlist} playlist the playlist to walk through
  2715. * @param {Number} endSequence the mediaSequence to stop walking on
  2716. */
  2717. var forwardDuration = function forwardDuration(playlist, endSequence) {
  2718. var result = 0;
  2719. var segment = undefined;
  2720. var i = endSequence - playlist.mediaSequence;
  2721. // Walk forward until we find the earliest segment with timeline
  2722. // information
  2723. for (; i < playlist.segments.length; i++) {
  2724. segment = playlist.segments[i];
  2725. if (typeof segment.start !== 'undefined') {
  2726. return {
  2727. result: segment.start - result,
  2728. precise: true
  2729. };
  2730. }
  2731. result += segment.duration;
  2732. if (typeof segment.end !== 'undefined') {
  2733. return {
  2734. result: segment.end - result,
  2735. precise: true
  2736. };
  2737. }
  2738. }
  2739. // indicate we didn't find a useful duration estimate
  2740. return { result: -1, precise: false };
  2741. };
  2742. /**
  2743. * Calculate the media duration from the segments associated with a
  2744. * playlist. The duration of a subinterval of the available segments
  2745. * may be calculated by specifying an end index.
  2746. *
  2747. * @param {Object} playlist a media playlist object
  2748. * @param {Number=} endSequence an exclusive upper boundary
  2749. * for the playlist. Defaults to playlist length.
  2750. * @param {Number} expired the amount of time that has dropped
  2751. * off the front of the playlist in a live scenario
  2752. * @return {Number} the duration between the first available segment
  2753. * and end index.
  2754. */
  2755. var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
  2756. var backward = undefined;
  2757. var forward = undefined;
  2758. if (typeof endSequence === 'undefined') {
  2759. endSequence = playlist.mediaSequence + playlist.segments.length;
  2760. }
  2761. if (endSequence < playlist.mediaSequence) {
  2762. return 0;
  2763. }
  2764. // do a backward walk to estimate the duration
  2765. backward = backwardDuration(playlist, endSequence);
  2766. if (backward.precise) {
  2767. // if we were able to base our duration estimate on timing
  2768. // information provided directly from the Media Source, return
  2769. // it
  2770. return backward.result;
  2771. }
  2772. // walk forward to see if a precise duration estimate can be made
  2773. // that way
  2774. forward = forwardDuration(playlist, endSequence);
  2775. if (forward.precise) {
  2776. // we found a segment that has been buffered and so it's
  2777. // position is known precisely
  2778. return forward.result;
  2779. }
  2780. // return the less-precise, playlist-based duration estimate
  2781. return backward.result + expired;
  2782. };
  2783. /**
  2784. * Calculates the duration of a playlist. If a start and end index
  2785. * are specified, the duration will be for the subset of the media
  2786. * timeline between those two indices. The total duration for live
  2787. * playlists is always Infinity.
  2788. *
  2789. * @param {Object} playlist a media playlist object
  2790. * @param {Number=} endSequence an exclusive upper
  2791. * boundary for the playlist. Defaults to the playlist media
  2792. * sequence number plus its length.
  2793. * @param {Number=} expired the amount of time that has
  2794. * dropped off the front of the playlist in a live scenario
  2795. * @return {Number} the duration between the start index and end
  2796. * index.
  2797. */
  2798. var duration = function duration(playlist, endSequence, expired) {
  2799. if (!playlist) {
  2800. return 0;
  2801. }
  2802. if (typeof expired !== 'number') {
  2803. expired = 0;
  2804. }
  2805. // if a slice of the total duration is not requested, use
  2806. // playlist-level duration indicators when they're present
  2807. if (typeof endSequence === 'undefined') {
  2808. // if present, use the duration specified in the playlist
  2809. if (playlist.totalDuration) {
  2810. return playlist.totalDuration;
  2811. }
  2812. // duration should be Infinity for live playlists
  2813. if (!playlist.endList) {
  2814. return _globalWindow2['default'].Infinity;
  2815. }
  2816. }
  2817. // calculate the total duration based on the segment durations
  2818. return intervalDuration(playlist, endSequence, expired);
  2819. };
  2820. exports.duration = duration;
  2821. /**
  2822. * Calculate the time between two indexes in the current playlist
  2823. * neight the start- nor the end-index need to be within the current
  2824. * playlist in which case, the targetDuration of the playlist is used
  2825. * to approximate the durations of the segments
  2826. *
  2827. * @param {Object} playlist a media playlist object
  2828. * @param {Number} startIndex
  2829. * @param {Number} endIndex
  2830. * @return {Number} the number of seconds between startIndex and endIndex
  2831. */
  2832. var sumDurations = function sumDurations(playlist, startIndex, endIndex) {
  2833. var durations = 0;
  2834. if (startIndex > endIndex) {
  2835. var _ref = [endIndex, startIndex];
  2836. startIndex = _ref[0];
  2837. endIndex = _ref[1];
  2838. }
  2839. if (startIndex < 0) {
  2840. for (var i = startIndex; i < Math.min(0, endIndex); i++) {
  2841. durations += playlist.targetDuration;
  2842. }
  2843. startIndex = 0;
  2844. }
  2845. for (var i = startIndex; i < endIndex; i++) {
  2846. durations += playlist.segments[i].duration;
  2847. }
  2848. return durations;
  2849. };
  2850. exports.sumDurations = sumDurations;
  2851. /**
  2852. * Returns an array with two sync points. The first being an expired sync point, which is
  2853. * the most recent segment with timing sync data that has fallen off the playlist. The
  2854. * second is a segment sync point, which is the first segment that has timing sync data in
  2855. * the current playlist.
  2856. *
  2857. * @param {Object} playlist a media playlist object
  2858. * @returns {Object} an object containing the two sync points
  2859. * @returns {Object.expiredSync|null} sync point data from an expired segment
  2860. * @returns {Object.segmentSync|null} sync point data from a segment in the playlist
  2861. * @function getPlaylistSyncPoints
  2862. */
  2863. var getPlaylistSyncPoints = function getPlaylistSyncPoints(playlist) {
  2864. if (!playlist || !playlist.segments) {
  2865. return [null, null];
  2866. }
  2867. var expiredSync = playlist.syncInfo || (playlist.endList ? { time: 0, mediaSequence: 0 } : null);
  2868. var segmentSync = null;
  2869. // Find the first segment with timing information
  2870. for (var i = 0, l = playlist.segments.length; i < l; i++) {
  2871. var segment = playlist.segments[i];
  2872. if (typeof segment.start !== 'undefined') {
  2873. segmentSync = {
  2874. mediaSequence: playlist.mediaSequence + i,
  2875. time: segment.start
  2876. };
  2877. break;
  2878. }
  2879. }
  2880. return { expiredSync: expiredSync, segmentSync: segmentSync };
  2881. };
  2882. /**
  2883. * Calculates the amount of time expired from the playlist based on the provided
  2884. * sync points.
  2885. *
  2886. * @param {Object} playlist a media playlist object
  2887. * @param {Object|null} expiredSync sync point representing most recent segment with
  2888. * timing sync data that has fallen off the playlist
  2889. * @param {Object|null} segmentSync sync point representing the first segment that has
  2890. * timing sync data in the playlist
  2891. * @returns {Number} the amount of time expired from the playlist
  2892. * @function calculateExpiredTime
  2893. */
  2894. var calculateExpiredTime = function calculateExpiredTime(playlist) {
  2895. // If we have both an expired sync point and a segment sync point
  2896. // determine which sync point is closest to the start of the playlist
  2897. // so the minimal amount of timing estimation is done.
  2898. var _getPlaylistSyncPoints = getPlaylistSyncPoints(playlist);
  2899. var expiredSync = _getPlaylistSyncPoints.expiredSync;
  2900. var segmentSync = _getPlaylistSyncPoints.segmentSync;
  2901. if (expiredSync && segmentSync) {
  2902. var expiredDiff = expiredSync.mediaSequence - playlist.mediaSequence;
  2903. var segmentDiff = segmentSync.mediaSequence - playlist.mediaSequence;
  2904. var syncIndex = undefined;
  2905. var syncTime = undefined;
  2906. if (Math.abs(expiredDiff) > Math.abs(segmentDiff)) {
  2907. syncIndex = segmentDiff;
  2908. syncTime = -segmentSync.time;
  2909. } else {
  2910. syncIndex = expiredDiff;
  2911. syncTime = expiredSync.time;
  2912. }
  2913. return Math.abs(syncTime + sumDurations(playlist, syncIndex, 0));
  2914. }
  2915. // We only have an expired sync point, so base expired time on the expired sync point
  2916. // and estimate the time from that sync point to the start of the playlist.
  2917. if (expiredSync) {
  2918. var syncIndex = expiredSync.mediaSequence - playlist.mediaSequence;
  2919. return expiredSync.time + sumDurations(playlist, syncIndex, 0);
  2920. }
  2921. // We only have a segment sync point, so base expired time on the first segment we have
  2922. // sync point data for and estimate the time from that media index to the start of the
  2923. // playlist.
  2924. if (segmentSync) {
  2925. var syncIndex = segmentSync.mediaSequence - playlist.mediaSequence;
  2926. return segmentSync.time - sumDurations(playlist, syncIndex, 0);
  2927. }
  2928. return null;
  2929. };
  2930. /**
  2931. * Calculates the playlist end time
  2932. *
  2933. * @param {Object} playlist a media playlist object
  2934. * @param {Boolean|false} useSafeLiveEnd a boolean value indicating whether or not the playlist
  2935. * end calculation should consider the safe live end (truncate the playlist
  2936. * end by three segments). This is normally used for calculating the end of
  2937. * the playlist's seekable range.
  2938. * @returns {Number} the end time of playlist
  2939. * @function playlistEnd
  2940. */
  2941. var playlistEnd = function playlistEnd(playlist, useSafeLiveEnd) {
  2942. if (!playlist || !playlist.segments) {
  2943. return null;
  2944. }
  2945. if (playlist.endList) {
  2946. return duration(playlist);
  2947. }
  2948. var expired = calculateExpiredTime(playlist);
  2949. if (expired === null) {
  2950. return null;
  2951. }
  2952. var endSequence = useSafeLiveEnd ? Math.max(0, playlist.segments.length - Playlist.UNSAFE_LIVE_SEGMENTS) : Math.max(0, playlist.segments.length);
  2953. return intervalDuration(playlist, playlist.mediaSequence + endSequence, expired);
  2954. };
  2955. exports.playlistEnd = playlistEnd;
  2956. /**
  2957. * Calculates the interval of time that is currently seekable in a
  2958. * playlist. The returned time ranges are relative to the earliest
  2959. * moment in the specified playlist that is still available. A full
  2960. * seekable implementation for live streams would need to offset
  2961. * these values by the duration of content that has expired from the
  2962. * stream.
  2963. *
  2964. * @param {Object} playlist a media playlist object
  2965. * dropped off the front of the playlist in a live scenario
  2966. * @return {TimeRanges} the periods of time that are valid targets
  2967. * for seeking
  2968. */
  2969. var seekable = function seekable(playlist) {
  2970. var useSafeLiveEnd = true;
  2971. var seekableStart = calculateExpiredTime(playlist);
  2972. var seekableEnd = playlistEnd(playlist, useSafeLiveEnd);
  2973. if (seekableEnd === null) {
  2974. return (0, _videoJs.createTimeRange)();
  2975. }
  2976. return (0, _videoJs.createTimeRange)(seekableStart, seekableEnd);
  2977. };
  2978. exports.seekable = seekable;
  2979. var isWholeNumber = function isWholeNumber(num) {
  2980. return num - Math.floor(num) === 0;
  2981. };
  2982. var roundSignificantDigit = function roundSignificantDigit(increment, num) {
  2983. // If we have a whole number, just add 1 to it
  2984. if (isWholeNumber(num)) {
  2985. return num + increment * 0.1;
  2986. }
  2987. var numDecimalDigits = num.toString().split('.')[1].length;
  2988. for (var i = 1; i <= numDecimalDigits; i++) {
  2989. var scale = Math.pow(10, i);
  2990. var temp = num * scale;
  2991. if (isWholeNumber(temp) || i === numDecimalDigits) {
  2992. return (temp + increment) / scale;
  2993. }
  2994. }
  2995. };
  2996. var ceilLeastSignificantDigit = roundSignificantDigit.bind(null, 1);
  2997. var floorLeastSignificantDigit = roundSignificantDigit.bind(null, -1);
  2998. /**
  2999. * Determine the index and estimated starting time of the segment that
  3000. * contains a specified playback position in a media playlist.
  3001. *
  3002. * @param {Object} playlist the media playlist to query
  3003. * @param {Number} currentTime The number of seconds since the earliest
  3004. * possible position to determine the containing segment for
  3005. * @param {Number} startIndex
  3006. * @param {Number} startTime
  3007. * @return {Object}
  3008. */
  3009. var getMediaInfoForTime_ = function getMediaInfoForTime_(playlist, currentTime, startIndex, startTime) {
  3010. var i = undefined;
  3011. var segment = undefined;
  3012. var numSegments = playlist.segments.length;
  3013. var time = currentTime - startTime;
  3014. if (time < 0) {
  3015. // Walk backward from startIndex in the playlist, adding durations
  3016. // until we find a segment that contains `time` and return it
  3017. if (startIndex > 0) {
  3018. for (i = startIndex - 1; i >= 0; i--) {
  3019. segment = playlist.segments[i];
  3020. time += floorLeastSignificantDigit(segment.duration);
  3021. if (time > 0) {
  3022. return {
  3023. mediaIndex: i,
  3024. startTime: startTime - sumDurations(playlist, startIndex, i)
  3025. };
  3026. }
  3027. }
  3028. }
  3029. // We were unable to find a good segment within the playlist
  3030. // so select the first segment
  3031. return {
  3032. mediaIndex: 0,
  3033. startTime: currentTime
  3034. };
  3035. }
  3036. // When startIndex is negative, we first walk forward to first segment
  3037. // adding target durations. If we "run out of time" before getting to
  3038. // the first segment, return the first segment
  3039. if (startIndex < 0) {
  3040. for (i = startIndex; i < 0; i++) {
  3041. time -= playlist.targetDuration;
  3042. if (time < 0) {
  3043. return {
  3044. mediaIndex: 0,
  3045. startTime: currentTime
  3046. };
  3047. }
  3048. }
  3049. startIndex = 0;
  3050. }
  3051. // Walk forward from startIndex in the playlist, subtracting durations
  3052. // until we find a segment that contains `time` and return it
  3053. for (i = startIndex; i < numSegments; i++) {
  3054. segment = playlist.segments[i];
  3055. time -= ceilLeastSignificantDigit(segment.duration);
  3056. if (time < 0) {
  3057. return {
  3058. mediaIndex: i,
  3059. startTime: startTime + sumDurations(playlist, startIndex, i)
  3060. };
  3061. }
  3062. }
  3063. // We are out of possible candidates so load the last one...
  3064. return {
  3065. mediaIndex: numSegments - 1,
  3066. startTime: currentTime
  3067. };
  3068. };
  3069. exports.getMediaInfoForTime_ = getMediaInfoForTime_;
  3070. /**
  3071. * Check whether the playlist is blacklisted or not.
  3072. *
  3073. * @param {Object} playlist the media playlist object
  3074. * @return {boolean} whether the playlist is blacklisted or not
  3075. * @function isBlacklisted
  3076. */
  3077. var isBlacklisted = function isBlacklisted(playlist) {
  3078. return playlist.excludeUntil && playlist.excludeUntil > Date.now();
  3079. };
  3080. exports.isBlacklisted = isBlacklisted;
  3081. /**
  3082. * Check whether the playlist is enabled or not.
  3083. *
  3084. * @param {Object} playlist the media playlist object
  3085. * @return {boolean} whether the playlist is enabled or not
  3086. * @function isEnabled
  3087. */
  3088. var isEnabled = function isEnabled(playlist) {
  3089. var blacklisted = isBlacklisted(playlist);
  3090. return !playlist.disabled && !blacklisted;
  3091. };
  3092. exports.isEnabled = isEnabled;
  3093. Playlist.duration = duration;
  3094. Playlist.seekable = seekable;
  3095. Playlist.getMediaInfoForTime_ = getMediaInfoForTime_;
  3096. Playlist.isEnabled = isEnabled;
  3097. Playlist.isBlacklisted = isBlacklisted;
  3098. Playlist.playlistEnd = playlistEnd;
  3099. // exports
  3100. exports['default'] = Playlist;
  3101. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  3102. },{"global/window":30}],10:[function(require,module,exports){
  3103. (function (global){
  3104. /**
  3105. * ranges
  3106. *
  3107. * Utilities for working with TimeRanges.
  3108. *
  3109. */
  3110. 'use strict';
  3111. Object.defineProperty(exports, '__esModule', {
  3112. value: true
  3113. });
  3114. var _slicedToArray = (function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i['return']) _i['return'](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError('Invalid attempt to destructure non-iterable instance'); } }; })();
  3115. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  3116. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  3117. var _videoJs2 = _interopRequireDefault(_videoJs);
  3118. // Fudge factor to account for TimeRanges rounding
  3119. var TIME_FUDGE_FACTOR = 1 / 30;
  3120. /**
  3121. * Clamps a value to within a range
  3122. * @param {Number} num - the value to clamp
  3123. * @param {Number} start - the start of the range to clamp within, inclusive
  3124. * @param {Number} end - the end of the range to clamp within, inclusive
  3125. * @return {Number}
  3126. */
  3127. var clamp = function clamp(num, _ref) {
  3128. var _ref2 = _slicedToArray(_ref, 2);
  3129. var start = _ref2[0];
  3130. var end = _ref2[1];
  3131. return Math.min(Math.max(start, num), end);
  3132. };
  3133. var filterRanges = function filterRanges(timeRanges, predicate) {
  3134. var results = [];
  3135. var i = undefined;
  3136. if (timeRanges && timeRanges.length) {
  3137. // Search for ranges that match the predicate
  3138. for (i = 0; i < timeRanges.length; i++) {
  3139. if (predicate(timeRanges.start(i), timeRanges.end(i))) {
  3140. results.push([timeRanges.start(i), timeRanges.end(i)]);
  3141. }
  3142. }
  3143. }
  3144. return _videoJs2['default'].createTimeRanges(results);
  3145. };
  3146. /**
  3147. * Attempts to find the buffered TimeRange that contains the specified
  3148. * time.
  3149. * @param {TimeRanges} buffered - the TimeRanges object to query
  3150. * @param {number} time - the time to filter on.
  3151. * @returns {TimeRanges} a new TimeRanges object
  3152. */
  3153. var findRange = function findRange(buffered, time) {
  3154. return filterRanges(buffered, function (start, end) {
  3155. return start - TIME_FUDGE_FACTOR <= time && end + TIME_FUDGE_FACTOR >= time;
  3156. });
  3157. };
  3158. /**
  3159. * Returns the TimeRanges that begin later than the specified time.
  3160. * @param {TimeRanges} timeRanges - the TimeRanges object to query
  3161. * @param {number} time - the time to filter on.
  3162. * @returns {TimeRanges} a new TimeRanges object.
  3163. */
  3164. var findNextRange = function findNextRange(timeRanges, time) {
  3165. return filterRanges(timeRanges, function (start) {
  3166. return start - TIME_FUDGE_FACTOR >= time;
  3167. });
  3168. };
  3169. /**
  3170. * Returns gaps within a list of TimeRanges
  3171. * @param {TimeRanges} buffered - the TimeRanges object
  3172. * @return {TimeRanges} a TimeRanges object of gaps
  3173. */
  3174. var findGaps = function findGaps(buffered) {
  3175. if (buffered.length < 2) {
  3176. return _videoJs2['default'].createTimeRanges();
  3177. }
  3178. var ranges = [];
  3179. for (var i = 1; i < buffered.length; i++) {
  3180. var start = buffered.end(i - 1);
  3181. var end = buffered.start(i);
  3182. ranges.push([start, end]);
  3183. }
  3184. return _videoJs2['default'].createTimeRanges(ranges);
  3185. };
  3186. /**
  3187. * Search for a likely end time for the segment that was just appened
  3188. * based on the state of the `buffered` property before and after the
  3189. * append. If we fin only one such uncommon end-point return it.
  3190. * @param {TimeRanges} original - the buffered time ranges before the update
  3191. * @param {TimeRanges} update - the buffered time ranges after the update
  3192. * @returns {Number|null} the end time added between `original` and `update`,
  3193. * or null if one cannot be unambiguously determined.
  3194. */
  3195. var findSoleUncommonTimeRangesEnd = function findSoleUncommonTimeRangesEnd(original, update) {
  3196. var i = undefined;
  3197. var start = undefined;
  3198. var end = undefined;
  3199. var result = [];
  3200. var edges = [];
  3201. // In order to qualify as a possible candidate, the end point must:
  3202. // 1) Not have already existed in the `original` ranges
  3203. // 2) Not result from the shrinking of a range that already existed
  3204. // in the `original` ranges
  3205. // 3) Not be contained inside of a range that existed in `original`
  3206. var overlapsCurrentEnd = function overlapsCurrentEnd(span) {
  3207. return span[0] <= end && span[1] >= end;
  3208. };
  3209. if (original) {
  3210. // Save all the edges in the `original` TimeRanges object
  3211. for (i = 0; i < original.length; i++) {
  3212. start = original.start(i);
  3213. end = original.end(i);
  3214. edges.push([start, end]);
  3215. }
  3216. }
  3217. if (update) {
  3218. // Save any end-points in `update` that are not in the `original`
  3219. // TimeRanges object
  3220. for (i = 0; i < update.length; i++) {
  3221. start = update.start(i);
  3222. end = update.end(i);
  3223. if (edges.some(overlapsCurrentEnd)) {
  3224. continue;
  3225. }
  3226. // at this point it must be a unique non-shrinking end edge
  3227. result.push(end);
  3228. }
  3229. }
  3230. // we err on the side of caution and return null if didn't find
  3231. // exactly *one* differing end edge in the search above
  3232. if (result.length !== 1) {
  3233. return null;
  3234. }
  3235. return result[0];
  3236. };
  3237. /**
  3238. * Calculate the intersection of two TimeRanges
  3239. * @param {TimeRanges} bufferA
  3240. * @param {TimeRanges} bufferB
  3241. * @returns {TimeRanges} The interesection of `bufferA` with `bufferB`
  3242. */
  3243. var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
  3244. var start = null;
  3245. var end = null;
  3246. var arity = 0;
  3247. var extents = [];
  3248. var ranges = [];
  3249. if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
  3250. return _videoJs2['default'].createTimeRange();
  3251. }
  3252. // Handle the case where we have both buffers and create an
  3253. // intersection of the two
  3254. var count = bufferA.length;
  3255. // A) Gather up all start and end times
  3256. while (count--) {
  3257. extents.push({ time: bufferA.start(count), type: 'start' });
  3258. extents.push({ time: bufferA.end(count), type: 'end' });
  3259. }
  3260. count = bufferB.length;
  3261. while (count--) {
  3262. extents.push({ time: bufferB.start(count), type: 'start' });
  3263. extents.push({ time: bufferB.end(count), type: 'end' });
  3264. }
  3265. // B) Sort them by time
  3266. extents.sort(function (a, b) {
  3267. return a.time - b.time;
  3268. });
  3269. // C) Go along one by one incrementing arity for start and decrementing
  3270. // arity for ends
  3271. for (count = 0; count < extents.length; count++) {
  3272. if (extents[count].type === 'start') {
  3273. arity++;
  3274. // D) If arity is ever incremented to 2 we are entering an
  3275. // overlapping range
  3276. if (arity === 2) {
  3277. start = extents[count].time;
  3278. }
  3279. } else if (extents[count].type === 'end') {
  3280. arity--;
  3281. // E) If arity is ever decremented to 1 we leaving an
  3282. // overlapping range
  3283. if (arity === 1) {
  3284. end = extents[count].time;
  3285. }
  3286. }
  3287. // F) Record overlapping ranges
  3288. if (start !== null && end !== null) {
  3289. ranges.push([start, end]);
  3290. start = null;
  3291. end = null;
  3292. }
  3293. }
  3294. return _videoJs2['default'].createTimeRanges(ranges);
  3295. };
  3296. /**
  3297. * Calculates the percentage of `segmentRange` that overlaps the
  3298. * `buffered` time ranges.
  3299. * @param {TimeRanges} segmentRange - the time range that the segment
  3300. * covers adjusted according to currentTime
  3301. * @param {TimeRanges} referenceRange - the original time range that the
  3302. * segment covers
  3303. * @param {Number} currentTime - time in seconds where the current playback
  3304. * is at
  3305. * @param {TimeRanges} buffered - the currently buffered time ranges
  3306. * @returns {Number} percent of the segment currently buffered
  3307. */
  3308. var calculateBufferedPercent = function calculateBufferedPercent(adjustedRange, referenceRange, currentTime, buffered) {
  3309. var referenceDuration = referenceRange.end(0) - referenceRange.start(0);
  3310. var adjustedDuration = adjustedRange.end(0) - adjustedRange.start(0);
  3311. var bufferMissingFromAdjusted = referenceDuration - adjustedDuration;
  3312. var adjustedIntersection = bufferIntersection(adjustedRange, buffered);
  3313. var referenceIntersection = bufferIntersection(referenceRange, buffered);
  3314. var adjustedOverlap = 0;
  3315. var referenceOverlap = 0;
  3316. var count = adjustedIntersection.length;
  3317. while (count--) {
  3318. adjustedOverlap += adjustedIntersection.end(count) - adjustedIntersection.start(count);
  3319. // If the current overlap segment starts at currentTime, then increase the
  3320. // overlap duration so that it actually starts at the beginning of referenceRange
  3321. // by including the difference between the two Range's durations
  3322. // This is a work around for the way Flash has no buffer before currentTime
  3323. if (adjustedIntersection.start(count) === currentTime) {
  3324. adjustedOverlap += bufferMissingFromAdjusted;
  3325. }
  3326. }
  3327. count = referenceIntersection.length;
  3328. while (count--) {
  3329. referenceOverlap += referenceIntersection.end(count) - referenceIntersection.start(count);
  3330. }
  3331. // Use whichever value is larger for the percentage-buffered since that value
  3332. // is likely more accurate because the only way
  3333. return Math.max(adjustedOverlap, referenceOverlap) / referenceDuration * 100;
  3334. };
  3335. /**
  3336. * Return the amount of a range specified by the startOfSegment and segmentDuration
  3337. * overlaps the current buffered content.
  3338. *
  3339. * @param {Number} startOfSegment - the time where the segment begins
  3340. * @param {Number} segmentDuration - the duration of the segment in seconds
  3341. * @param {Number} currentTime - time in seconds where the current playback
  3342. * is at
  3343. * @param {TimeRanges} buffered - the state of the buffer
  3344. * @returns {Number} percentage of the segment's time range that is
  3345. * already in `buffered`
  3346. */
  3347. var getSegmentBufferedPercent = function getSegmentBufferedPercent(startOfSegment, segmentDuration, currentTime, buffered) {
  3348. var endOfSegment = startOfSegment + segmentDuration;
  3349. // The entire time range of the segment
  3350. var originalSegmentRange = _videoJs2['default'].createTimeRanges([[startOfSegment, endOfSegment]]);
  3351. // The adjusted segment time range that is setup such that it starts
  3352. // no earlier than currentTime
  3353. // Flash has no notion of a back-buffer so adjustedSegmentRange adjusts
  3354. // for that and the function will still return 100% if a only half of a
  3355. // segment is actually in the buffer as long as the currentTime is also
  3356. // half-way through the segment
  3357. var adjustedSegmentRange = _videoJs2['default'].createTimeRanges([[clamp(startOfSegment, [currentTime, endOfSegment]), endOfSegment]]);
  3358. // This condition happens when the currentTime is beyond the segment's
  3359. // end time
  3360. if (adjustedSegmentRange.start(0) === adjustedSegmentRange.end(0)) {
  3361. return 0;
  3362. }
  3363. var percent = calculateBufferedPercent(adjustedSegmentRange, originalSegmentRange, currentTime, buffered);
  3364. // If the segment is reported as having a zero duration, return 0%
  3365. // since it is likely that we will need to fetch the segment
  3366. if (isNaN(percent) || percent === Infinity || percent === -Infinity) {
  3367. return 0;
  3368. }
  3369. return percent;
  3370. };
  3371. /**
  3372. * Gets a human readable string for a TimeRange
  3373. *
  3374. * @param {TimeRange} range
  3375. * @returns {String} a human readable string
  3376. */
  3377. var printableRange = function printableRange(range) {
  3378. var strArr = [];
  3379. if (!range || !range.length) {
  3380. return '';
  3381. }
  3382. for (var i = 0; i < range.length; i++) {
  3383. strArr.push(range.start(i) + ' => ' + range.end(i));
  3384. }
  3385. return strArr.join(', ');
  3386. };
  3387. exports['default'] = {
  3388. findRange: findRange,
  3389. findNextRange: findNextRange,
  3390. findGaps: findGaps,
  3391. findSoleUncommonTimeRangesEnd: findSoleUncommonTimeRangesEnd,
  3392. getSegmentBufferedPercent: getSegmentBufferedPercent,
  3393. TIME_FUDGE_FACTOR: TIME_FUDGE_FACTOR,
  3394. printableRange: printableRange
  3395. };
  3396. module.exports = exports['default'];
  3397. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  3398. },{}],11:[function(require,module,exports){
  3399. (function (global){
  3400. 'use strict';
  3401. Object.defineProperty(exports, '__esModule', {
  3402. value: true
  3403. });
  3404. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  3405. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  3406. var _videoJs2 = _interopRequireDefault(_videoJs);
  3407. var defaultOptions = {
  3408. errorInterval: 30,
  3409. getSource: function getSource(next) {
  3410. var tech = this.tech({ IWillNotUseThisInPlugins: true });
  3411. var sourceObj = tech.currentSource_;
  3412. return next(sourceObj);
  3413. }
  3414. };
  3415. /**
  3416. * Main entry point for the plugin
  3417. *
  3418. * @param {Player} player a reference to a videojs Player instance
  3419. * @param {Object} [options] an object with plugin options
  3420. * @private
  3421. */
  3422. var initPlugin = function initPlugin(player, options) {
  3423. var lastCalled = 0;
  3424. var seekTo = 0;
  3425. var localOptions = _videoJs2['default'].mergeOptions(defaultOptions, options);
  3426. /**
  3427. * Player modifications to perform that must wait until `loadedmetadata`
  3428. * has been triggered
  3429. *
  3430. * @private
  3431. */
  3432. var loadedMetadataHandler = function loadedMetadataHandler() {
  3433. if (seekTo) {
  3434. player.currentTime(seekTo);
  3435. }
  3436. };
  3437. /**
  3438. * Set the source on the player element, play, and seek if necessary
  3439. *
  3440. * @param {Object} sourceObj An object specifying the source url and mime-type to play
  3441. * @private
  3442. */
  3443. var setSource = function setSource(sourceObj) {
  3444. if (sourceObj === null || sourceObj === undefined) {
  3445. return;
  3446. }
  3447. seekTo = player.duration() !== Infinity && player.currentTime() || 0;
  3448. player.one('loadedmetadata', loadedMetadataHandler);
  3449. player.src(sourceObj);
  3450. player.play();
  3451. };
  3452. /**
  3453. * Attempt to get a source from either the built-in getSource function
  3454. * or a custom function provided via the options
  3455. *
  3456. * @private
  3457. */
  3458. var errorHandler = function errorHandler() {
  3459. // Do not attempt to reload the source if a source-reload occurred before
  3460. // 'errorInterval' time has elapsed since the last source-reload
  3461. if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
  3462. return;
  3463. }
  3464. if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
  3465. _videoJs2['default'].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
  3466. return;
  3467. }
  3468. lastCalled = Date.now();
  3469. return localOptions.getSource.call(player, setSource);
  3470. };
  3471. /**
  3472. * Unbind any event handlers that were bound by the plugin
  3473. *
  3474. * @private
  3475. */
  3476. var cleanupEvents = function cleanupEvents() {
  3477. player.off('loadedmetadata', loadedMetadataHandler);
  3478. player.off('error', errorHandler);
  3479. player.off('dispose', cleanupEvents);
  3480. };
  3481. /**
  3482. * Cleanup before re-initializing the plugin
  3483. *
  3484. * @param {Object} [newOptions] an object with plugin options
  3485. * @private
  3486. */
  3487. var reinitPlugin = function reinitPlugin(newOptions) {
  3488. cleanupEvents();
  3489. initPlugin(player, newOptions);
  3490. };
  3491. player.on('error', errorHandler);
  3492. player.on('dispose', cleanupEvents);
  3493. // Overwrite the plugin function so that we can correctly cleanup before
  3494. // initializing the plugin
  3495. player.reloadSourceOnError = reinitPlugin;
  3496. };
  3497. /**
  3498. * Reload the source when an error is detected as long as there
  3499. * wasn't an error previously within the last 30 seconds
  3500. *
  3501. * @param {Object} [options] an object with plugin options
  3502. */
  3503. var reloadSourceOnError = function reloadSourceOnError(options) {
  3504. initPlugin(this, options);
  3505. };
  3506. exports['default'] = reloadSourceOnError;
  3507. module.exports = exports['default'];
  3508. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  3509. },{}],12:[function(require,module,exports){
  3510. 'use strict';
  3511. Object.defineProperty(exports, '__esModule', {
  3512. value: true
  3513. });
  3514. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  3515. var _playlistJs = require('./playlist.js');
  3516. /**
  3517. * Enable/disable playlist function. It is intended to have the first two
  3518. * arguments partially-applied in order to create the final per-playlist
  3519. * function.
  3520. *
  3521. * @param {PlaylistLoader} playlist - The rendition or media-playlist
  3522. * @param {Function} changePlaylistFn - A function to be called after a
  3523. * playlist's enabled-state has been changed. Will NOT be called if a
  3524. * playlist's enabled-state is unchanged
  3525. * @param {Boolean=} enable - Value to set the playlist enabled-state to
  3526. * or if undefined returns the current enabled-state for the playlist
  3527. * @return {Boolean} The current enabled-state of the playlist
  3528. */
  3529. var enableFunction = function enableFunction(loader, playlistUri, changePlaylistFn, enable) {
  3530. var playlist = loader.master.playlists[playlistUri];
  3531. var blacklisted = (0, _playlistJs.isBlacklisted)(playlist);
  3532. var currentlyEnabled = (0, _playlistJs.isEnabled)(playlist);
  3533. if (typeof enable === 'undefined') {
  3534. return currentlyEnabled;
  3535. }
  3536. if (enable) {
  3537. delete playlist.disabled;
  3538. } else {
  3539. playlist.disabled = true;
  3540. }
  3541. if (enable !== currentlyEnabled && !blacklisted) {
  3542. // Ensure the outside world knows about our changes
  3543. changePlaylistFn();
  3544. }
  3545. return enable;
  3546. };
  3547. /**
  3548. * The representation object encapsulates the publicly visible information
  3549. * in a media playlist along with a setter/getter-type function (enabled)
  3550. * for changing the enabled-state of a particular playlist entry
  3551. *
  3552. * @class Representation
  3553. */
  3554. var Representation = function Representation(hlsHandler, playlist, id) {
  3555. _classCallCheck(this, Representation);
  3556. // Get a reference to a bound version of fastQualityChange_
  3557. var fastChangeFunction = hlsHandler.masterPlaylistController_.fastQualityChange_.bind(hlsHandler.masterPlaylistController_);
  3558. // Carefully descend into the playlist's attributes since most
  3559. // properties are optional
  3560. if (playlist.attributes) {
  3561. var attributes = playlist.attributes;
  3562. if (attributes.RESOLUTION) {
  3563. var resolution = attributes.RESOLUTION;
  3564. this.width = resolution.width;
  3565. this.height = resolution.height;
  3566. }
  3567. this.bandwidth = attributes.BANDWIDTH;
  3568. }
  3569. // The id is simply the ordinality of the media playlist
  3570. // within the master playlist
  3571. this.id = id;
  3572. // Partially-apply the enableFunction to create a playlist-
  3573. // specific variant
  3574. this.enabled = enableFunction.bind(this, hlsHandler.playlists, playlist.uri, fastChangeFunction);
  3575. }
  3576. /**
  3577. * A mixin function that adds the `representations` api to an instance
  3578. * of the HlsHandler class
  3579. * @param {HlsHandler} hlsHandler - An instance of HlsHandler to add the
  3580. * representation API into
  3581. */
  3582. ;
  3583. var renditionSelectionMixin = function renditionSelectionMixin(hlsHandler) {
  3584. var playlists = hlsHandler.playlists;
  3585. // Add a single API-specific function to the HlsHandler instance
  3586. hlsHandler.representations = function () {
  3587. return playlists.master.playlists.filter(function (media) {
  3588. return !(0, _playlistJs.isBlacklisted)(media);
  3589. }).map(function (e, i) {
  3590. return new Representation(hlsHandler, e, e.uri);
  3591. });
  3592. };
  3593. };
  3594. exports['default'] = renditionSelectionMixin;
  3595. module.exports = exports['default'];
  3596. },{"./playlist.js":9}],13:[function(require,module,exports){
  3597. /**
  3598. * @file resolve-url.js
  3599. */
  3600. 'use strict';
  3601. Object.defineProperty(exports, '__esModule', {
  3602. value: true
  3603. });
  3604. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  3605. var _urlToolkit = require('url-toolkit');
  3606. var _urlToolkit2 = _interopRequireDefault(_urlToolkit);
  3607. var _globalWindow = require('global/window');
  3608. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  3609. var resolveUrl = function resolveUrl(baseURL, relativeURL) {
  3610. // return early if we don't need to resolve
  3611. if (/^[a-z]+:/i.test(relativeURL)) {
  3612. return relativeURL;
  3613. }
  3614. // if the base URL is relative then combine with the current location
  3615. if (!/\/\//i.test(baseURL)) {
  3616. baseURL = _urlToolkit2['default'].buildAbsoluteURL(_globalWindow2['default'].location.href, baseURL);
  3617. }
  3618. return _urlToolkit2['default'].buildAbsoluteURL(baseURL, relativeURL);
  3619. };
  3620. exports['default'] = resolveUrl;
  3621. module.exports = exports['default'];
  3622. },{"global/window":30,"url-toolkit":61}],14:[function(require,module,exports){
  3623. (function (global){
  3624. /**
  3625. * @file segment-loader.js
  3626. */
  3627. 'use strict';
  3628. Object.defineProperty(exports, '__esModule', {
  3629. value: true
  3630. });
  3631. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  3632. var _get = function get(_x3, _x4, _x5) { var _again = true; _function: while (_again) { var object = _x3, property = _x4, receiver = _x5; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x3 = parent; _x4 = property; _x5 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  3633. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  3634. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  3635. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  3636. var _playlist = require('./playlist');
  3637. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  3638. var _videoJs2 = _interopRequireDefault(_videoJs);
  3639. var _sourceUpdater = require('./source-updater');
  3640. var _sourceUpdater2 = _interopRequireDefault(_sourceUpdater);
  3641. var _config = require('./config');
  3642. var _config2 = _interopRequireDefault(_config);
  3643. var _globalWindow = require('global/window');
  3644. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  3645. var _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs = require('videojs-contrib-media-sources/es5/remove-cues-from-track.js');
  3646. var _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2 = _interopRequireDefault(_videojsContribMediaSourcesEs5RemoveCuesFromTrackJs);
  3647. var _binUtils = require('./bin-utils');
  3648. var _mediaSegmentRequest = require('./media-segment-request');
  3649. // in ms
  3650. var CHECK_BUFFER_DELAY = 500;
  3651. /**
  3652. * Determines if we should call endOfStream on the media source based
  3653. * on the state of the buffer or if appened segment was the final
  3654. * segment in the playlist.
  3655. *
  3656. * @param {Object} playlist a media playlist object
  3657. * @param {Object} mediaSource the MediaSource object
  3658. * @param {Number} segmentIndex the index of segment we last appended
  3659. * @returns {Boolean} do we need to call endOfStream on the MediaSource
  3660. */
  3661. var detectEndOfStream = function detectEndOfStream(playlist, mediaSource, segmentIndex) {
  3662. if (!playlist || !mediaSource) {
  3663. return false;
  3664. }
  3665. var segments = playlist.segments;
  3666. // determine a few boolean values to help make the branch below easier
  3667. // to read
  3668. var appendedLastSegment = segmentIndex === segments.length;
  3669. // if we've buffered to the end of the video, we need to call endOfStream
  3670. // so that MediaSources can trigger the `ended` event when it runs out of
  3671. // buffered data instead of waiting for me
  3672. return playlist.endList && mediaSource.readyState === 'open' && appendedLastSegment;
  3673. };
  3674. /**
  3675. * An object that manages segment loading and appending.
  3676. *
  3677. * @class SegmentLoader
  3678. * @param {Object} options required and optional options
  3679. * @extends videojs.EventTarget
  3680. */
  3681. var SegmentLoader = (function (_videojs$EventTarget) {
  3682. _inherits(SegmentLoader, _videojs$EventTarget);
  3683. function SegmentLoader(options) {
  3684. var _this = this;
  3685. _classCallCheck(this, SegmentLoader);
  3686. _get(Object.getPrototypeOf(SegmentLoader.prototype), 'constructor', this).call(this);
  3687. // check pre-conditions
  3688. if (!options) {
  3689. throw new TypeError('Initialization options are required');
  3690. }
  3691. if (typeof options.currentTime !== 'function') {
  3692. throw new TypeError('No currentTime getter specified');
  3693. }
  3694. if (!options.mediaSource) {
  3695. throw new TypeError('No MediaSource specified');
  3696. }
  3697. var settings = _videoJs2['default'].mergeOptions(_videoJs2['default'].options.hls, options);
  3698. // public properties
  3699. this.state = 'INIT';
  3700. this.bandwidth = settings.bandwidth;
  3701. this.throughput = { rate: 0, count: 0 };
  3702. this.roundTrip = NaN;
  3703. this.resetStats_();
  3704. this.mediaIndex = null;
  3705. // private settings
  3706. this.hasPlayed_ = settings.hasPlayed;
  3707. this.currentTime_ = settings.currentTime;
  3708. this.seekable_ = settings.seekable;
  3709. this.seeking_ = settings.seeking;
  3710. this.duration_ = settings.duration;
  3711. this.mediaSource_ = settings.mediaSource;
  3712. this.hls_ = settings.hls;
  3713. this.loaderType_ = settings.loaderType;
  3714. this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
  3715. // private instance variables
  3716. this.checkBufferTimeout_ = null;
  3717. this.error_ = void 0;
  3718. this.currentTimeline_ = -1;
  3719. this.pendingSegment_ = null;
  3720. this.mimeType_ = null;
  3721. this.sourceUpdater_ = null;
  3722. this.xhrOptions_ = null;
  3723. // Fragmented mp4 playback
  3724. this.activeInitSegmentId_ = null;
  3725. this.initSegments_ = {};
  3726. this.decrypter_ = settings.decrypter;
  3727. // Manages the tracking and generation of sync-points, mappings
  3728. // between a time in the display time and a segment index within
  3729. // a playlist
  3730. this.syncController_ = settings.syncController;
  3731. this.syncPoint_ = {
  3732. segmentIndex: 0,
  3733. time: 0
  3734. };
  3735. this.syncController_.on('syncinfoupdate', function () {
  3736. return _this.trigger('syncinfoupdate');
  3737. });
  3738. // ...for determining the fetch location
  3739. this.fetchAtBuffer_ = false;
  3740. if (settings.debug) {
  3741. this.logger_ = _videoJs2['default'].log.bind(_videoJs2['default'], 'segment-loader', this.loaderType_, '->');
  3742. }
  3743. }
  3744. /**
  3745. * reset all of our media stats
  3746. *
  3747. * @private
  3748. */
  3749. _createClass(SegmentLoader, [{
  3750. key: 'resetStats_',
  3751. value: function resetStats_() {
  3752. this.mediaBytesTransferred = 0;
  3753. this.mediaRequests = 0;
  3754. this.mediaRequestsAborted = 0;
  3755. this.mediaRequestsTimedout = 0;
  3756. this.mediaRequestsErrored = 0;
  3757. this.mediaTransferDuration = 0;
  3758. this.mediaSecondsLoaded = 0;
  3759. }
  3760. /**
  3761. * dispose of the SegmentLoader and reset to the default state
  3762. */
  3763. }, {
  3764. key: 'dispose',
  3765. value: function dispose() {
  3766. this.state = 'DISPOSED';
  3767. this.abort_();
  3768. if (this.sourceUpdater_) {
  3769. this.sourceUpdater_.dispose();
  3770. }
  3771. this.resetStats_();
  3772. }
  3773. /**
  3774. * abort anything that is currently doing on with the SegmentLoader
  3775. * and reset to a default state
  3776. */
  3777. }, {
  3778. key: 'abort',
  3779. value: function abort() {
  3780. if (this.state !== 'WAITING') {
  3781. if (this.pendingSegment_) {
  3782. this.pendingSegment_ = null;
  3783. }
  3784. return;
  3785. }
  3786. this.abort_();
  3787. // don't wait for buffer check timeouts to begin fetching the
  3788. // next segment
  3789. if (!this.paused()) {
  3790. this.state = 'READY';
  3791. this.monitorBuffer_();
  3792. }
  3793. }
  3794. /**
  3795. * abort all pending xhr requests and null any pending segements
  3796. *
  3797. * @private
  3798. */
  3799. }, {
  3800. key: 'abort_',
  3801. value: function abort_() {
  3802. if (this.pendingSegment_) {
  3803. this.pendingSegment_.abortRequests();
  3804. }
  3805. // clear out the segment being processed
  3806. this.pendingSegment_ = null;
  3807. }
  3808. /**
  3809. * set an error on the segment loader and null out any pending segements
  3810. *
  3811. * @param {Error} error the error to set on the SegmentLoader
  3812. * @return {Error} the error that was set or that is currently set
  3813. */
  3814. }, {
  3815. key: 'error',
  3816. value: function error(_error) {
  3817. if (typeof _error !== 'undefined') {
  3818. this.error_ = _error;
  3819. }
  3820. this.pendingSegment_ = null;
  3821. return this.error_;
  3822. }
  3823. /**
  3824. * Indicates which time ranges are buffered
  3825. *
  3826. * @return {TimeRange}
  3827. * TimeRange object representing the current buffered ranges
  3828. */
  3829. }, {
  3830. key: 'buffered_',
  3831. value: function buffered_() {
  3832. if (!this.sourceUpdater_) {
  3833. return _videoJs2['default'].createTimeRanges();
  3834. }
  3835. return this.sourceUpdater_.buffered();
  3836. }
  3837. /**
  3838. * Gets and sets init segment for the provided map
  3839. *
  3840. * @param {Object} map
  3841. * The map object representing the init segment to get or set
  3842. * @param {Boolean=} set
  3843. * If true, the init segment for the provided map should be saved
  3844. * @return {Object}
  3845. * map object for desired init segment
  3846. */
  3847. }, {
  3848. key: 'initSegment',
  3849. value: function initSegment(map) {
  3850. var set = arguments.length <= 1 || arguments[1] === undefined ? false : arguments[1];
  3851. if (!map) {
  3852. return null;
  3853. }
  3854. var id = (0, _binUtils.initSegmentId)(map);
  3855. var storedMap = this.initSegments_[id];
  3856. if (set && !storedMap && map.bytes) {
  3857. this.initSegments_[id] = storedMap = {
  3858. resolvedUri: map.resolvedUri,
  3859. byterange: map.byterange,
  3860. bytes: map.bytes
  3861. };
  3862. }
  3863. return storedMap || map;
  3864. }
  3865. /**
  3866. * Returns true if all configuration required for loading is present, otherwise false.
  3867. *
  3868. * @return {Boolean} True if the all configuration is ready for loading
  3869. * @private
  3870. */
  3871. }, {
  3872. key: 'couldBeginLoading_',
  3873. value: function couldBeginLoading_() {
  3874. return this.playlist_ && (
  3875. // the source updater is created when init_ is called, so either having a
  3876. // source updater or being in the INIT state with a mimeType is enough
  3877. // to say we have all the needed configuration to start loading.
  3878. this.sourceUpdater_ || this.mimeType_ && this.state === 'INIT') && !this.paused();
  3879. }
  3880. /**
  3881. * load a playlist and start to fill the buffer
  3882. */
  3883. }, {
  3884. key: 'load',
  3885. value: function load() {
  3886. // un-pause
  3887. this.monitorBuffer_();
  3888. // if we don't have a playlist yet, keep waiting for one to be
  3889. // specified
  3890. if (!this.playlist_) {
  3891. return;
  3892. }
  3893. // not sure if this is the best place for this
  3894. this.syncController_.setDateTimeMapping(this.playlist_);
  3895. // if all the configuration is ready, initialize and begin loading
  3896. if (this.state === 'INIT' && this.couldBeginLoading_()) {
  3897. return this.init_();
  3898. }
  3899. // if we're in the middle of processing a segment already, don't
  3900. // kick off an additional segment request
  3901. if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
  3902. return;
  3903. }
  3904. this.state = 'READY';
  3905. }
  3906. /**
  3907. * Once all the starting parameters have been specified, begin
  3908. * operation. This method should only be invoked from the INIT
  3909. * state.
  3910. *
  3911. * @private
  3912. */
  3913. }, {
  3914. key: 'init_',
  3915. value: function init_() {
  3916. this.state = 'READY';
  3917. this.sourceUpdater_ = new _sourceUpdater2['default'](this.mediaSource_, this.mimeType_);
  3918. this.resetEverything();
  3919. return this.monitorBuffer_();
  3920. }
  3921. /**
  3922. * set a playlist on the segment loader
  3923. *
  3924. * @param {PlaylistLoader} media the playlist to set on the segment loader
  3925. */
  3926. }, {
  3927. key: 'playlist',
  3928. value: function playlist(newPlaylist) {
  3929. var options = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
  3930. if (!newPlaylist) {
  3931. return;
  3932. }
  3933. var oldPlaylist = this.playlist_;
  3934. var segmentInfo = this.pendingSegment_;
  3935. this.playlist_ = newPlaylist;
  3936. this.xhrOptions_ = options;
  3937. // when we haven't started playing yet, the start of a live playlist
  3938. // is always our zero-time so force a sync update each time the playlist
  3939. // is refreshed from the server
  3940. if (!this.hasPlayed_()) {
  3941. newPlaylist.syncInfo = {
  3942. mediaSequence: newPlaylist.mediaSequence,
  3943. time: 0
  3944. };
  3945. }
  3946. // in VOD, this is always a rendition switch (or we updated our syncInfo above)
  3947. // in LIVE, we always want to update with new playlists (including refreshes)
  3948. this.trigger('syncinfoupdate');
  3949. // if we were unpaused but waiting for a playlist, start
  3950. // buffering now
  3951. if (this.state === 'INIT' && this.couldBeginLoading_()) {
  3952. return this.init_();
  3953. }
  3954. if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
  3955. if (this.mediaIndex !== null) {
  3956. // we must "resync" the segment loader when we switch renditions and
  3957. // the segment loader is already synced to the previous rendition
  3958. this.resyncLoader();
  3959. }
  3960. // the rest of this function depends on `oldPlaylist` being defined
  3961. return;
  3962. }
  3963. // we reloaded the same playlist so we are in a live scenario
  3964. // and we will likely need to adjust the mediaIndex
  3965. var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
  3966. this.logger_('mediaSequenceDiff', mediaSequenceDiff);
  3967. // update the mediaIndex on the SegmentLoader
  3968. // this is important because we can abort a request and this value must be
  3969. // equal to the last appended mediaIndex
  3970. if (this.mediaIndex !== null) {
  3971. this.mediaIndex -= mediaSequenceDiff;
  3972. }
  3973. // update the mediaIndex on the SegmentInfo object
  3974. // this is important because we will update this.mediaIndex with this value
  3975. // in `handleUpdateEnd_` after the segment has been successfully appended
  3976. if (segmentInfo) {
  3977. segmentInfo.mediaIndex -= mediaSequenceDiff;
  3978. // we need to update the referenced segment so that timing information is
  3979. // saved for the new playlist's segment, however, if the segment fell off the
  3980. // playlist, we can leave the old reference and just lose the timing info
  3981. if (segmentInfo.mediaIndex >= 0) {
  3982. segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
  3983. }
  3984. }
  3985. this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
  3986. }
  3987. /**
  3988. * Prevent the loader from fetching additional segments. If there
  3989. * is a segment request outstanding, it will finish processing
  3990. * before the loader halts. A segment loader can be unpaused by
  3991. * calling load().
  3992. */
  3993. }, {
  3994. key: 'pause',
  3995. value: function pause() {
  3996. if (this.checkBufferTimeout_) {
  3997. _globalWindow2['default'].clearTimeout(this.checkBufferTimeout_);
  3998. this.checkBufferTimeout_ = null;
  3999. }
  4000. }
  4001. /**
  4002. * Returns whether the segment loader is fetching additional
  4003. * segments when given the opportunity. This property can be
  4004. * modified through calls to pause() and load().
  4005. */
  4006. }, {
  4007. key: 'paused',
  4008. value: function paused() {
  4009. return this.checkBufferTimeout_ === null;
  4010. }
  4011. /**
  4012. * create/set the following mimetype on the SourceBuffer through a
  4013. * SourceUpdater
  4014. *
  4015. * @param {String} mimeType the mime type string to use
  4016. */
  4017. }, {
  4018. key: 'mimeType',
  4019. value: function mimeType(_mimeType) {
  4020. if (this.mimeType_) {
  4021. return;
  4022. }
  4023. this.mimeType_ = _mimeType;
  4024. // if we were unpaused but waiting for a sourceUpdater, start
  4025. // buffering now
  4026. if (this.state === 'INIT' && this.couldBeginLoading_()) {
  4027. this.init_();
  4028. }
  4029. }
  4030. /**
  4031. * Delete all the buffered data and reset the SegmentLoader
  4032. */
  4033. }, {
  4034. key: 'resetEverything',
  4035. value: function resetEverything() {
  4036. this.resetLoader();
  4037. this.remove(0, Infinity);
  4038. }
  4039. /**
  4040. * Force the SegmentLoader to resync and start loading around the currentTime instead
  4041. * of starting at the end of the buffer
  4042. *
  4043. * Useful for fast quality changes
  4044. */
  4045. }, {
  4046. key: 'resetLoader',
  4047. value: function resetLoader() {
  4048. this.fetchAtBuffer_ = false;
  4049. this.resyncLoader();
  4050. }
  4051. /**
  4052. * Force the SegmentLoader to restart synchronization and make a conservative guess
  4053. * before returning to the simple walk-forward method
  4054. */
  4055. }, {
  4056. key: 'resyncLoader',
  4057. value: function resyncLoader() {
  4058. this.mediaIndex = null;
  4059. this.syncPoint_ = null;
  4060. }
  4061. /**
  4062. * Remove any data in the source buffer between start and end times
  4063. * @param {Number} start - the start time of the region to remove from the buffer
  4064. * @param {Number} end - the end time of the region to remove from the buffer
  4065. */
  4066. }, {
  4067. key: 'remove',
  4068. value: function remove(start, end) {
  4069. if (this.sourceUpdater_) {
  4070. this.sourceUpdater_.remove(start, end);
  4071. }
  4072. (0, _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2['default'])(start, end, this.segmentMetadataTrack_);
  4073. }
  4074. /**
  4075. * (re-)schedule monitorBufferTick_ to run as soon as possible
  4076. *
  4077. * @private
  4078. */
  4079. }, {
  4080. key: 'monitorBuffer_',
  4081. value: function monitorBuffer_() {
  4082. if (this.checkBufferTimeout_) {
  4083. _globalWindow2['default'].clearTimeout(this.checkBufferTimeout_);
  4084. }
  4085. this.checkBufferTimeout_ = _globalWindow2['default'].setTimeout(this.monitorBufferTick_.bind(this), 1);
  4086. }
  4087. /**
  4088. * As long as the SegmentLoader is in the READY state, periodically
  4089. * invoke fillBuffer_().
  4090. *
  4091. * @private
  4092. */
  4093. }, {
  4094. key: 'monitorBufferTick_',
  4095. value: function monitorBufferTick_() {
  4096. if (this.state === 'READY') {
  4097. this.fillBuffer_();
  4098. }
  4099. if (this.checkBufferTimeout_) {
  4100. _globalWindow2['default'].clearTimeout(this.checkBufferTimeout_);
  4101. }
  4102. this.checkBufferTimeout_ = _globalWindow2['default'].setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
  4103. }
  4104. /**
  4105. * fill the buffer with segements unless the sourceBuffers are
  4106. * currently updating
  4107. *
  4108. * Note: this function should only ever be called by monitorBuffer_
  4109. * and never directly
  4110. *
  4111. * @private
  4112. */
  4113. }, {
  4114. key: 'fillBuffer_',
  4115. value: function fillBuffer_() {
  4116. if (this.sourceUpdater_.updating()) {
  4117. return;
  4118. }
  4119. if (!this.syncPoint_) {
  4120. this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
  4121. }
  4122. // see if we need to begin loading immediately
  4123. var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
  4124. if (!segmentInfo) {
  4125. return;
  4126. }
  4127. var isEndOfStream = detectEndOfStream(this.playlist_, this.mediaSource_, segmentInfo.mediaIndex);
  4128. if (isEndOfStream) {
  4129. this.mediaSource_.endOfStream();
  4130. return;
  4131. }
  4132. if (segmentInfo.mediaIndex === this.playlist_.segments.length - 1 && this.mediaSource_.readyState === 'ended' && !this.seeking_()) {
  4133. return;
  4134. }
  4135. // We will need to change timestampOffset of the sourceBuffer if either of
  4136. // the following conditions are true:
  4137. // - The segment.timeline !== this.currentTimeline
  4138. // (we are crossing a discontinuity somehow)
  4139. // - The "timestampOffset" for the start of this segment is less than
  4140. // the currently set timestampOffset
  4141. if (segmentInfo.timeline !== this.currentTimeline_ || segmentInfo.startOfSegment !== null && segmentInfo.startOfSegment < this.sourceUpdater_.timestampOffset()) {
  4142. this.syncController_.reset();
  4143. segmentInfo.timestampOffset = segmentInfo.startOfSegment;
  4144. }
  4145. this.loadSegment_(segmentInfo);
  4146. }
  4147. /**
  4148. * Determines what segment request should be made, given current playback
  4149. * state.
  4150. *
  4151. * @param {TimeRanges} buffered - the state of the buffer
  4152. * @param {Object} playlist - the playlist object to fetch segments from
  4153. * @param {Number} mediaIndex - the previous mediaIndex fetched or null
  4154. * @param {Boolean} hasPlayed - a flag indicating whether we have played or not
  4155. * @param {Number} currentTime - the playback position in seconds
  4156. * @param {Object} syncPoint - a segment info object that describes the
  4157. * @returns {Object} a segment request object that describes the segment to load
  4158. */
  4159. }, {
  4160. key: 'checkBuffer_',
  4161. value: function checkBuffer_(buffered, playlist, mediaIndex, hasPlayed, currentTime, syncPoint) {
  4162. var lastBufferedEnd = 0;
  4163. var startOfSegment = undefined;
  4164. if (buffered.length) {
  4165. lastBufferedEnd = buffered.end(buffered.length - 1);
  4166. }
  4167. var bufferedTime = Math.max(0, lastBufferedEnd - currentTime);
  4168. if (!playlist.segments.length) {
  4169. return null;
  4170. }
  4171. // if there is plenty of content buffered, and the video has
  4172. // been played before relax for awhile
  4173. if (bufferedTime >= _config2['default'].GOAL_BUFFER_LENGTH) {
  4174. return null;
  4175. }
  4176. // if the video has not yet played once, and we already have
  4177. // one segment downloaded do nothing
  4178. if (!hasPlayed && bufferedTime >= 1) {
  4179. return null;
  4180. }
  4181. this.logger_('checkBuffer_', 'mediaIndex:', mediaIndex, 'hasPlayed:', hasPlayed, 'currentTime:', currentTime, 'syncPoint:', syncPoint, 'fetchAtBuffer:', this.fetchAtBuffer_, 'bufferedTime:', bufferedTime);
  4182. // When the syncPoint is null, there is no way of determining a good
  4183. // conservative segment index to fetch from
  4184. // The best thing to do here is to get the kind of sync-point data by
  4185. // making a request
  4186. if (syncPoint === null) {
  4187. mediaIndex = this.getSyncSegmentCandidate_(playlist);
  4188. this.logger_('getSync', 'mediaIndex:', mediaIndex);
  4189. return this.generateSegmentInfo_(playlist, mediaIndex, null, true);
  4190. }
  4191. // Under normal playback conditions fetching is a simple walk forward
  4192. if (mediaIndex !== null) {
  4193. this.logger_('walkForward', 'mediaIndex:', mediaIndex + 1);
  4194. var segment = playlist.segments[mediaIndex];
  4195. if (segment && segment.end) {
  4196. startOfSegment = segment.end;
  4197. } else {
  4198. startOfSegment = lastBufferedEnd;
  4199. }
  4200. return this.generateSegmentInfo_(playlist, mediaIndex + 1, startOfSegment, false);
  4201. }
  4202. // There is a sync-point but the lack of a mediaIndex indicates that
  4203. // we need to make a good conservative guess about which segment to
  4204. // fetch
  4205. if (this.fetchAtBuffer_) {
  4206. // Find the segment containing the end of the buffer
  4207. var mediaSourceInfo = (0, _playlist.getMediaInfoForTime_)(playlist, lastBufferedEnd, syncPoint.segmentIndex, syncPoint.time);
  4208. mediaIndex = mediaSourceInfo.mediaIndex;
  4209. startOfSegment = mediaSourceInfo.startTime;
  4210. } else {
  4211. // Find the segment containing currentTime
  4212. var mediaSourceInfo = (0, _playlist.getMediaInfoForTime_)(playlist, currentTime, syncPoint.segmentIndex, syncPoint.time);
  4213. mediaIndex = mediaSourceInfo.mediaIndex;
  4214. startOfSegment = mediaSourceInfo.startTime;
  4215. }
  4216. this.logger_('getMediaIndexForTime', 'mediaIndex:', mediaIndex, 'startOfSegment:', startOfSegment);
  4217. return this.generateSegmentInfo_(playlist, mediaIndex, startOfSegment, false);
  4218. }
  4219. /**
  4220. * The segment loader has no recourse except to fetch a segment in the
  4221. * current playlist and use the internal timestamps in that segment to
  4222. * generate a syncPoint. This function returns a good candidate index
  4223. * for that process.
  4224. *
  4225. * @param {Object} playlist - the playlist object to look for a
  4226. * @returns {Number} An index of a segment from the playlist to load
  4227. */
  4228. }, {
  4229. key: 'getSyncSegmentCandidate_',
  4230. value: function getSyncSegmentCandidate_(playlist) {
  4231. var _this2 = this;
  4232. if (this.currentTimeline_ === -1) {
  4233. return 0;
  4234. }
  4235. var segmentIndexArray = playlist.segments.map(function (s, i) {
  4236. return {
  4237. timeline: s.timeline,
  4238. segmentIndex: i
  4239. };
  4240. }).filter(function (s) {
  4241. return s.timeline === _this2.currentTimeline_;
  4242. });
  4243. if (segmentIndexArray.length) {
  4244. return segmentIndexArray[Math.min(segmentIndexArray.length - 1, 1)].segmentIndex;
  4245. }
  4246. return Math.max(playlist.segments.length - 1, 0);
  4247. }
  4248. }, {
  4249. key: 'generateSegmentInfo_',
  4250. value: function generateSegmentInfo_(playlist, mediaIndex, startOfSegment, isSyncRequest) {
  4251. if (mediaIndex < 0 || mediaIndex >= playlist.segments.length) {
  4252. return null;
  4253. }
  4254. var segment = playlist.segments[mediaIndex];
  4255. return {
  4256. requestId: 'segment-loader-' + Math.random(),
  4257. // resolve the segment URL relative to the playlist
  4258. uri: segment.resolvedUri,
  4259. // the segment's mediaIndex at the time it was requested
  4260. mediaIndex: mediaIndex,
  4261. // whether or not to update the SegmentLoader's state with this
  4262. // segment's mediaIndex
  4263. isSyncRequest: isSyncRequest,
  4264. startOfSegment: startOfSegment,
  4265. // the segment's playlist
  4266. playlist: playlist,
  4267. // unencrypted bytes of the segment
  4268. bytes: null,
  4269. // when a key is defined for this segment, the encrypted bytes
  4270. encryptedBytes: null,
  4271. // The target timestampOffset for this segment when we append it
  4272. // to the source buffer
  4273. timestampOffset: null,
  4274. // The timeline that the segment is in
  4275. timeline: segment.timeline,
  4276. // The expected duration of the segment in seconds
  4277. duration: segment.duration,
  4278. // retain the segment in case the playlist updates while doing an async process
  4279. segment: segment
  4280. };
  4281. }
  4282. /**
  4283. * load a specific segment from a request into the buffer
  4284. *
  4285. * @private
  4286. */
  4287. }, {
  4288. key: 'loadSegment_',
  4289. value: function loadSegment_(segmentInfo) {
  4290. var _this3 = this;
  4291. this.state = 'WAITING';
  4292. this.pendingSegment_ = segmentInfo;
  4293. this.trimBackBuffer_(segmentInfo);
  4294. segmentInfo.abortRequests = (0, _mediaSegmentRequest.mediaSegmentRequest)(this.hls_.xhr, this.xhrOptions_, this.decrypter_, this.createSimplifiedSegmentObj_(segmentInfo),
  4295. // progress callback
  4296. function (event, segment) {
  4297. if (!_this3.pendingSegment_ || segment.requestId !== _this3.pendingSegment_.requestId) {
  4298. return;
  4299. }
  4300. // TODO: Use progress-based bandwidth to early abort low-bandwidth situations
  4301. _this3.trigger('progress');
  4302. }, this.segmentRequestFinished_.bind(this));
  4303. }
  4304. /**
  4305. * trim the back buffer so that we don't have too much data
  4306. * in the source buffer
  4307. *
  4308. * @private
  4309. *
  4310. * @param {Object} segmentInfo - the current segment
  4311. */
  4312. }, {
  4313. key: 'trimBackBuffer_',
  4314. value: function trimBackBuffer_(segmentInfo) {
  4315. var seekable = this.seekable_();
  4316. var currentTime = this.currentTime_();
  4317. var removeToTime = 0;
  4318. // Chrome has a hard limit of 150mb of
  4319. // buffer and a very conservative "garbage collector"
  4320. // We manually clear out the old buffer to ensure
  4321. // we don't trigger the QuotaExceeded error
  4322. // on the source buffer during subsequent appends
  4323. // If we have a seekable range use that as the limit for what can be removed safely
  4324. // otherwise remove anything older than 1 minute before the current play head
  4325. if (seekable.length && seekable.start(0) > 0 && seekable.start(0) < currentTime) {
  4326. removeToTime = seekable.start(0);
  4327. } else {
  4328. removeToTime = currentTime - 60;
  4329. }
  4330. if (removeToTime > 0) {
  4331. this.remove(0, removeToTime);
  4332. }
  4333. }
  4334. /**
  4335. * created a simplified copy of the segment object with just the
  4336. * information necessary to perform the XHR and decryption
  4337. *
  4338. * @private
  4339. *
  4340. * @param {Object} segmentInfo - the current segment
  4341. * @returns {Object} a simplified segment object copy
  4342. */
  4343. }, {
  4344. key: 'createSimplifiedSegmentObj_',
  4345. value: function createSimplifiedSegmentObj_(segmentInfo) {
  4346. var segment = segmentInfo.segment;
  4347. var simpleSegment = {
  4348. resolvedUri: segment.resolvedUri,
  4349. byterange: segment.byterange,
  4350. requestId: segmentInfo.requestId
  4351. };
  4352. if (segment.key) {
  4353. // if the media sequence is greater than 2^32, the IV will be incorrect
  4354. // assuming 10s segments, that would be about 1300 years
  4355. var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
  4356. simpleSegment.key = {
  4357. resolvedUri: segment.key.resolvedUri,
  4358. iv: iv
  4359. };
  4360. }
  4361. if (segment.map) {
  4362. simpleSegment.map = this.initSegment(segment.map);
  4363. }
  4364. return simpleSegment;
  4365. }
  4366. /**
  4367. * Handle the callback from the segmentRequest function and set the
  4368. * associated SegmentLoader state and errors if necessary
  4369. *
  4370. * @private
  4371. */
  4372. }, {
  4373. key: 'segmentRequestFinished_',
  4374. value: function segmentRequestFinished_(error, simpleSegment) {
  4375. // every request counts as a media request even if it has been aborted
  4376. // or canceled due to a timeout
  4377. this.mediaRequests += 1;
  4378. if (simpleSegment.stats) {
  4379. this.mediaBytesTransferred += simpleSegment.stats.bytesReceived;
  4380. this.mediaTransferDuration += simpleSegment.stats.roundTripTime;
  4381. }
  4382. // The request was aborted and the SegmentLoader has already been reset
  4383. if (!this.pendingSegment_) {
  4384. this.mediaRequestsAborted += 1;
  4385. return;
  4386. }
  4387. // the request was aborted and the SegmentLoader has already started
  4388. // another request. this can happen when the timeout for an aborted
  4389. // request triggers due to a limitation in the XHR library
  4390. // do not count this as any sort of request or we risk double-counting
  4391. if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
  4392. return;
  4393. }
  4394. // an error occurred from the active pendingSegment_ so reset everything
  4395. if (error) {
  4396. this.pendingSegment_ = null;
  4397. // the requests were aborted just record the aborted stat and exit
  4398. // this is not a true error condition and nothing corrective needs
  4399. // to be done
  4400. if (error.code === _mediaSegmentRequest.REQUEST_ERRORS.ABORTED) {
  4401. this.mediaRequestsAborted += 1;
  4402. return;
  4403. }
  4404. this.state = 'READY';
  4405. this.pause();
  4406. // the error is really just that at least one of the requests timed-out
  4407. // set the bandwidth to a very low value and trigger an ABR switch to
  4408. // take emergency action
  4409. if (error.code === _mediaSegmentRequest.REQUEST_ERRORS.TIMEOUT) {
  4410. this.mediaRequestsTimedout += 1;
  4411. this.bandwidth = 1;
  4412. this.roundTrip = NaN;
  4413. this.trigger('bandwidthupdate');
  4414. return;
  4415. }
  4416. // if control-flow has arrived here, then the error is real
  4417. // emit an error event to blacklist the current playlist
  4418. this.mediaRequestsErrored += 1;
  4419. this.error(error);
  4420. this.trigger('error');
  4421. return;
  4422. }
  4423. // the response was a success so set any bandwidth stats the request
  4424. // generated for ABR purposes
  4425. this.bandwidth = simpleSegment.stats.bandwidth;
  4426. this.roundTrip = simpleSegment.stats.roundTripTime;
  4427. // if this request included an initialization segment, save that data
  4428. // to the initSegment cache
  4429. if (simpleSegment.map) {
  4430. simpleSegment.map = this.initSegment(simpleSegment.map, true);
  4431. }
  4432. this.processSegmentResponse_(simpleSegment);
  4433. }
  4434. /**
  4435. * Move any important data from the simplified segment object
  4436. * back to the real segment object for future phases
  4437. *
  4438. * @private
  4439. */
  4440. }, {
  4441. key: 'processSegmentResponse_',
  4442. value: function processSegmentResponse_(simpleSegment) {
  4443. var segmentInfo = this.pendingSegment_;
  4444. segmentInfo.bytes = simpleSegment.bytes;
  4445. if (simpleSegment.map) {
  4446. segmentInfo.segment.map.bytes = simpleSegment.map.bytes;
  4447. }
  4448. segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
  4449. this.handleSegment_();
  4450. }
  4451. /**
  4452. * append a decrypted segement to the SourceBuffer through a SourceUpdater
  4453. *
  4454. * @private
  4455. */
  4456. }, {
  4457. key: 'handleSegment_',
  4458. value: function handleSegment_() {
  4459. var _this4 = this;
  4460. if (!this.pendingSegment_) {
  4461. this.state = 'READY';
  4462. return;
  4463. }
  4464. this.state = 'APPENDING';
  4465. var segmentInfo = this.pendingSegment_;
  4466. var segment = segmentInfo.segment;
  4467. this.syncController_.probeSegmentInfo(segmentInfo);
  4468. if (segmentInfo.isSyncRequest) {
  4469. this.trigger('syncinfoupdate');
  4470. this.pendingSegment_ = null;
  4471. this.state = 'READY';
  4472. return;
  4473. }
  4474. if (segmentInfo.timestampOffset !== null && segmentInfo.timestampOffset !== this.sourceUpdater_.timestampOffset()) {
  4475. this.sourceUpdater_.timestampOffset(segmentInfo.timestampOffset);
  4476. }
  4477. // if the media initialization segment is changing, append it
  4478. // before the content segment
  4479. if (segment.map) {
  4480. (function () {
  4481. var initId = (0, _binUtils.initSegmentId)(segment.map);
  4482. if (!_this4.activeInitSegmentId_ || _this4.activeInitSegmentId_ !== initId) {
  4483. var initSegment = _this4.initSegment(segment.map);
  4484. _this4.sourceUpdater_.appendBuffer(initSegment.bytes, function () {
  4485. _this4.activeInitSegmentId_ = initId;
  4486. });
  4487. }
  4488. })();
  4489. }
  4490. segmentInfo.byteLength = segmentInfo.bytes.byteLength;
  4491. if (typeof segment.start === 'number' && typeof segment.end === 'number') {
  4492. this.mediaSecondsLoaded += segment.end - segment.start;
  4493. } else {
  4494. this.mediaSecondsLoaded += segment.duration;
  4495. }
  4496. this.sourceUpdater_.appendBuffer(segmentInfo.bytes, this.handleUpdateEnd_.bind(this));
  4497. }
  4498. /**
  4499. * callback to run when appendBuffer is finished. detects if we are
  4500. * in a good state to do things with the data we got, or if we need
  4501. * to wait for more
  4502. *
  4503. * @private
  4504. */
  4505. }, {
  4506. key: 'handleUpdateEnd_',
  4507. value: function handleUpdateEnd_() {
  4508. this.logger_('handleUpdateEnd_', 'segmentInfo:', this.pendingSegment_);
  4509. if (!this.pendingSegment_) {
  4510. this.state = 'READY';
  4511. if (!this.paused()) {
  4512. this.monitorBuffer_();
  4513. }
  4514. return;
  4515. }
  4516. var segmentInfo = this.pendingSegment_;
  4517. var segment = segmentInfo.segment;
  4518. var isWalkingForward = this.mediaIndex !== null;
  4519. this.pendingSegment_ = null;
  4520. this.recordThroughput_(segmentInfo);
  4521. this.addSegmentMetadataCue_(segmentInfo);
  4522. this.state = 'READY';
  4523. this.mediaIndex = segmentInfo.mediaIndex;
  4524. this.fetchAtBuffer_ = true;
  4525. this.currentTimeline_ = segmentInfo.timeline;
  4526. // We must update the syncinfo to recalculate the seekable range before
  4527. // the following conditional otherwise it may consider this a bad "guess"
  4528. // and attempt to resync when the post-update seekable window and live
  4529. // point would mean that this was the perfect segment to fetch
  4530. this.trigger('syncinfoupdate');
  4531. // If we previously appended a segment that ends more than 3 targetDurations before
  4532. // the currentTime_ that means that our conservative guess was too conservative.
  4533. // In that case, reset the loader state so that we try to use any information gained
  4534. // from the previous request to create a new, more accurate, sync-point.
  4535. if (segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3) {
  4536. this.resetEverything();
  4537. return;
  4538. }
  4539. // Don't do a rendition switch unless we have enough time to get a sync segment
  4540. // and conservatively guess
  4541. if (isWalkingForward) {
  4542. this.trigger('bandwidthupdate');
  4543. }
  4544. this.trigger('progress');
  4545. // any time an update finishes and the last segment is in the
  4546. // buffer, end the stream. this ensures the "ended" event will
  4547. // fire if playback reaches that point.
  4548. var isEndOfStream = detectEndOfStream(segmentInfo.playlist, this.mediaSource_, segmentInfo.mediaIndex + 1);
  4549. if (isEndOfStream) {
  4550. this.mediaSource_.endOfStream();
  4551. }
  4552. if (!this.paused()) {
  4553. this.monitorBuffer_();
  4554. }
  4555. }
  4556. /**
  4557. * Records the current throughput of the decrypt, transmux, and append
  4558. * portion of the semgment pipeline. `throughput.rate` is a the cumulative
  4559. * moving average of the throughput. `throughput.count` is the number of
  4560. * data points in the average.
  4561. *
  4562. * @private
  4563. * @param {Object} segmentInfo the object returned by loadSegment
  4564. */
  4565. }, {
  4566. key: 'recordThroughput_',
  4567. value: function recordThroughput_(segmentInfo) {
  4568. var rate = this.throughput.rate;
  4569. // Add one to the time to ensure that we don't accidentally attempt to divide
  4570. // by zero in the case where the throughput is ridiculously high
  4571. var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1;
  4572. // Multiply by 8000 to convert from bytes/millisecond to bits/second
  4573. var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000);
  4574. // This is just a cumulative moving average calculation:
  4575. // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
  4576. this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
  4577. }
  4578. /**
  4579. * A debugging logger noop that is set to console.log only if debugging
  4580. * is enabled globally
  4581. *
  4582. * @private
  4583. */
  4584. }, {
  4585. key: 'logger_',
  4586. value: function logger_() {}
  4587. /**
  4588. * Adds a cue to the segment-metadata track with some metadata information about the
  4589. * segment
  4590. *
  4591. * @private
  4592. * @param {Object} segmentInfo
  4593. * the object returned by loadSegment
  4594. * @method addSegmentMetadataCue_
  4595. */
  4596. }, {
  4597. key: 'addSegmentMetadataCue_',
  4598. value: function addSegmentMetadataCue_(segmentInfo) {
  4599. if (!this.segmentMetadataTrack_) {
  4600. return;
  4601. }
  4602. var segment = segmentInfo.segment;
  4603. var start = segment.start;
  4604. var end = segment.end;
  4605. (0, _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2['default'])(start, end, this.segmentMetadataTrack_);
  4606. var Cue = _globalWindow2['default'].WebKitDataCue || _globalWindow2['default'].VTTCue;
  4607. var value = {
  4608. uri: segmentInfo.uri,
  4609. timeline: segmentInfo.timeline,
  4610. playlist: segmentInfo.playlist.uri,
  4611. start: start,
  4612. end: end
  4613. };
  4614. var data = JSON.stringify(value);
  4615. var cue = new Cue(start, end, data);
  4616. // Attach the metadata to the value property of the cue to keep consistency between
  4617. // the differences of WebKitDataCue in safari and VTTCue in other browsers
  4618. cue.value = value;
  4619. this.segmentMetadataTrack_.addCue(cue);
  4620. }
  4621. }]);
  4622. return SegmentLoader;
  4623. })(_videoJs2['default'].EventTarget);
  4624. exports['default'] = SegmentLoader;
  4625. module.exports = exports['default'];
  4626. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  4627. },{"./bin-utils":2,"./config":3,"./media-segment-request":6,"./playlist":9,"./source-updater":15,"global/window":30,"videojs-contrib-media-sources/es5/remove-cues-from-track.js":71}],15:[function(require,module,exports){
  4628. (function (global){
  4629. /**
  4630. * @file source-updater.js
  4631. */
  4632. 'use strict';
  4633. Object.defineProperty(exports, '__esModule', {
  4634. value: true
  4635. });
  4636. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  4637. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  4638. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  4639. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  4640. var _videoJs2 = _interopRequireDefault(_videoJs);
  4641. var noop = function noop() {};
  4642. /**
  4643. * A queue of callbacks to be serialized and applied when a
  4644. * MediaSource and its associated SourceBuffers are not in the
  4645. * updating state. It is used by the segment loader to update the
  4646. * underlying SourceBuffers when new data is loaded, for instance.
  4647. *
  4648. * @class SourceUpdater
  4649. * @param {MediaSource} mediaSource the MediaSource to create the
  4650. * SourceBuffer from
  4651. * @param {String} mimeType the desired MIME type of the underlying
  4652. * SourceBuffer
  4653. */
  4654. var SourceUpdater = (function () {
  4655. function SourceUpdater(mediaSource, mimeType) {
  4656. var _this = this;
  4657. _classCallCheck(this, SourceUpdater);
  4658. var createSourceBuffer = function createSourceBuffer() {
  4659. _this.sourceBuffer_ = mediaSource.addSourceBuffer(mimeType);
  4660. // run completion handlers and process callbacks as updateend
  4661. // events fire
  4662. _this.onUpdateendCallback_ = function () {
  4663. var pendingCallback = _this.pendingCallback_;
  4664. _this.pendingCallback_ = null;
  4665. if (pendingCallback) {
  4666. pendingCallback();
  4667. }
  4668. _this.runCallback_();
  4669. };
  4670. _this.sourceBuffer_.addEventListener('updateend', _this.onUpdateendCallback_);
  4671. _this.runCallback_();
  4672. };
  4673. this.callbacks_ = [];
  4674. this.pendingCallback_ = null;
  4675. this.timestampOffset_ = 0;
  4676. this.mediaSource = mediaSource;
  4677. this.processedAppend_ = false;
  4678. if (mediaSource.readyState === 'closed') {
  4679. mediaSource.addEventListener('sourceopen', createSourceBuffer);
  4680. } else {
  4681. createSourceBuffer();
  4682. }
  4683. }
  4684. /**
  4685. * Aborts the current segment and resets the segment parser.
  4686. *
  4687. * @param {Function} done function to call when done
  4688. * @see http://w3c.github.io/media-source/#widl-SourceBuffer-abort-void
  4689. */
  4690. _createClass(SourceUpdater, [{
  4691. key: 'abort',
  4692. value: function abort(done) {
  4693. var _this2 = this;
  4694. if (this.processedAppend_) {
  4695. this.queueCallback_(function () {
  4696. _this2.sourceBuffer_.abort();
  4697. }, done);
  4698. }
  4699. }
  4700. /**
  4701. * Queue an update to append an ArrayBuffer.
  4702. *
  4703. * @param {ArrayBuffer} bytes
  4704. * @param {Function} done the function to call when done
  4705. * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
  4706. */
  4707. }, {
  4708. key: 'appendBuffer',
  4709. value: function appendBuffer(bytes, done) {
  4710. var _this3 = this;
  4711. this.processedAppend_ = true;
  4712. this.queueCallback_(function () {
  4713. _this3.sourceBuffer_.appendBuffer(bytes);
  4714. }, done);
  4715. }
  4716. /**
  4717. * Indicates what TimeRanges are buffered in the managed SourceBuffer.
  4718. *
  4719. * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-buffered
  4720. */
  4721. }, {
  4722. key: 'buffered',
  4723. value: function buffered() {
  4724. if (!this.sourceBuffer_) {
  4725. return _videoJs2['default'].createTimeRanges();
  4726. }
  4727. return this.sourceBuffer_.buffered;
  4728. }
  4729. /**
  4730. * Queue an update to remove a time range from the buffer.
  4731. *
  4732. * @param {Number} start where to start the removal
  4733. * @param {Number} end where to end the removal
  4734. * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
  4735. */
  4736. }, {
  4737. key: 'remove',
  4738. value: function remove(start, end) {
  4739. var _this4 = this;
  4740. if (this.processedAppend_) {
  4741. this.queueCallback_(function () {
  4742. _this4.sourceBuffer_.remove(start, end);
  4743. }, noop);
  4744. }
  4745. }
  4746. /**
  4747. * Whether the underlying sourceBuffer is updating or not
  4748. *
  4749. * @return {Boolean} the updating status of the SourceBuffer
  4750. */
  4751. }, {
  4752. key: 'updating',
  4753. value: function updating() {
  4754. return !this.sourceBuffer_ || this.sourceBuffer_.updating || this.pendingCallback_;
  4755. }
  4756. /**
  4757. * Set/get the timestampoffset on the SourceBuffer
  4758. *
  4759. * @return {Number} the timestamp offset
  4760. */
  4761. }, {
  4762. key: 'timestampOffset',
  4763. value: function timestampOffset(offset) {
  4764. var _this5 = this;
  4765. if (typeof offset !== 'undefined') {
  4766. this.queueCallback_(function () {
  4767. _this5.sourceBuffer_.timestampOffset = offset;
  4768. });
  4769. this.timestampOffset_ = offset;
  4770. }
  4771. return this.timestampOffset_;
  4772. }
  4773. /**
  4774. * Queue a callback to run
  4775. */
  4776. }, {
  4777. key: 'queueCallback_',
  4778. value: function queueCallback_(callback, done) {
  4779. this.callbacks_.push([callback.bind(this), done]);
  4780. this.runCallback_();
  4781. }
  4782. /**
  4783. * Run a queued callback
  4784. */
  4785. }, {
  4786. key: 'runCallback_',
  4787. value: function runCallback_() {
  4788. var callbacks = undefined;
  4789. if (!this.updating() && this.callbacks_.length) {
  4790. callbacks = this.callbacks_.shift();
  4791. this.pendingCallback_ = callbacks[1];
  4792. callbacks[0]();
  4793. }
  4794. }
  4795. /**
  4796. * dispose of the source updater and the underlying sourceBuffer
  4797. */
  4798. }, {
  4799. key: 'dispose',
  4800. value: function dispose() {
  4801. this.sourceBuffer_.removeEventListener('updateend', this.onUpdateendCallback_);
  4802. if (this.sourceBuffer_ && this.mediaSource.readyState === 'open') {
  4803. this.sourceBuffer_.abort();
  4804. }
  4805. }
  4806. }]);
  4807. return SourceUpdater;
  4808. })();
  4809. exports['default'] = SourceUpdater;
  4810. module.exports = exports['default'];
  4811. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  4812. },{}],16:[function(require,module,exports){
  4813. /**
  4814. * @file stream.js
  4815. */
  4816. /**
  4817. * A lightweight readable stream implemention that handles event dispatching.
  4818. *
  4819. * @class Stream
  4820. */
  4821. 'use strict';
  4822. Object.defineProperty(exports, '__esModule', {
  4823. value: true
  4824. });
  4825. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  4826. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  4827. var Stream = (function () {
  4828. function Stream() {
  4829. _classCallCheck(this, Stream);
  4830. this.listeners = {};
  4831. }
  4832. /**
  4833. * Add a listener for a specified event type.
  4834. *
  4835. * @param {String} type the event name
  4836. * @param {Function} listener the callback to be invoked when an event of
  4837. * the specified type occurs
  4838. */
  4839. _createClass(Stream, [{
  4840. key: 'on',
  4841. value: function on(type, listener) {
  4842. if (!this.listeners[type]) {
  4843. this.listeners[type] = [];
  4844. }
  4845. this.listeners[type].push(listener);
  4846. }
  4847. /**
  4848. * Remove a listener for a specified event type.
  4849. *
  4850. * @param {String} type the event name
  4851. * @param {Function} listener a function previously registered for this
  4852. * type of event through `on`
  4853. * @return {Boolean} if we could turn it off or not
  4854. */
  4855. }, {
  4856. key: 'off',
  4857. value: function off(type, listener) {
  4858. var index = undefined;
  4859. if (!this.listeners[type]) {
  4860. return false;
  4861. }
  4862. index = this.listeners[type].indexOf(listener);
  4863. this.listeners[type].splice(index, 1);
  4864. return index > -1;
  4865. }
  4866. /**
  4867. * Trigger an event of the specified type on this stream. Any additional
  4868. * arguments to this function are passed as parameters to event listeners.
  4869. *
  4870. * @param {String} type the event name
  4871. */
  4872. }, {
  4873. key: 'trigger',
  4874. value: function trigger(type) {
  4875. var callbacks = undefined;
  4876. var i = undefined;
  4877. var length = undefined;
  4878. var args = undefined;
  4879. callbacks = this.listeners[type];
  4880. if (!callbacks) {
  4881. return;
  4882. }
  4883. // Slicing the arguments on every invocation of this method
  4884. // can add a significant amount of overhead. Avoid the
  4885. // intermediate object creation for the common case of a
  4886. // single callback argument
  4887. if (arguments.length === 2) {
  4888. length = callbacks.length;
  4889. for (i = 0; i < length; ++i) {
  4890. callbacks[i].call(this, arguments[1]);
  4891. }
  4892. } else {
  4893. args = Array.prototype.slice.call(arguments, 1);
  4894. length = callbacks.length;
  4895. for (i = 0; i < length; ++i) {
  4896. callbacks[i].apply(this, args);
  4897. }
  4898. }
  4899. }
  4900. /**
  4901. * Destroys the stream and cleans up.
  4902. */
  4903. }, {
  4904. key: 'dispose',
  4905. value: function dispose() {
  4906. this.listeners = {};
  4907. }
  4908. /**
  4909. * Forwards all `data` events on this stream to the destination stream. The
  4910. * destination stream should provide a method `push` to receive the data
  4911. * events as they arrive.
  4912. *
  4913. * @param {Stream} destination the stream that will receive all `data` events
  4914. * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
  4915. */
  4916. }, {
  4917. key: 'pipe',
  4918. value: function pipe(destination) {
  4919. this.on('data', function (data) {
  4920. destination.push(data);
  4921. });
  4922. }
  4923. }]);
  4924. return Stream;
  4925. })();
  4926. exports['default'] = Stream;
  4927. module.exports = exports['default'];
  4928. },{}],17:[function(require,module,exports){
  4929. (function (global){
  4930. /**
  4931. * @file sync-controller.js
  4932. */
  4933. 'use strict';
  4934. Object.defineProperty(exports, '__esModule', {
  4935. value: true
  4936. });
  4937. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  4938. var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  4939. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  4940. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  4941. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  4942. var _muxJsLibMp4Probe = require('mux.js/lib/mp4/probe');
  4943. var _muxJsLibMp4Probe2 = _interopRequireDefault(_muxJsLibMp4Probe);
  4944. var _muxJsLibToolsTsInspectorJs = require('mux.js/lib/tools/ts-inspector.js');
  4945. var _playlist = require('./playlist');
  4946. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  4947. var _videoJs2 = _interopRequireDefault(_videoJs);
  4948. var syncPointStrategies = [
  4949. // Stategy "VOD": Handle the VOD-case where the sync-point is *always*
  4950. // the equivalence display-time 0 === segment-index 0
  4951. {
  4952. name: 'VOD',
  4953. run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
  4954. if (duration !== Infinity) {
  4955. var syncPoint = {
  4956. time: 0,
  4957. segmentIndex: 0
  4958. };
  4959. return syncPoint;
  4960. }
  4961. return null;
  4962. }
  4963. },
  4964. // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
  4965. {
  4966. name: 'ProgramDateTime',
  4967. run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
  4968. if (syncController.datetimeToDisplayTime && playlist.dateTimeObject) {
  4969. var playlistTime = playlist.dateTimeObject.getTime() / 1000;
  4970. var playlistStart = playlistTime + syncController.datetimeToDisplayTime;
  4971. var syncPoint = {
  4972. time: playlistStart,
  4973. segmentIndex: 0
  4974. };
  4975. return syncPoint;
  4976. }
  4977. return null;
  4978. }
  4979. },
  4980. // Stategy "Segment": We have a known time mapping for a timeline and a
  4981. // segment in the current timeline with timing data
  4982. {
  4983. name: 'Segment',
  4984. run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
  4985. var segments = playlist.segments;
  4986. var syncPoint = null;
  4987. var lastDistance = null;
  4988. currentTime = currentTime || 0;
  4989. for (var i = 0; i < segments.length; i++) {
  4990. var segment = segments[i];
  4991. if (segment.timeline === currentTimeline && typeof segment.start !== 'undefined') {
  4992. var distance = Math.abs(currentTime - segment.start);
  4993. // Once the distance begins to increase, we have passed
  4994. // currentTime and can stop looking for better candidates
  4995. if (lastDistance !== null && lastDistance < distance) {
  4996. break;
  4997. }
  4998. if (!syncPoint || lastDistance === null || lastDistance >= distance) {
  4999. lastDistance = distance;
  5000. syncPoint = {
  5001. time: segment.start,
  5002. segmentIndex: i
  5003. };
  5004. }
  5005. }
  5006. }
  5007. return syncPoint;
  5008. }
  5009. },
  5010. // Stategy "Discontinuity": We have a discontinuity with a known
  5011. // display-time
  5012. {
  5013. name: 'Discontinuity',
  5014. run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
  5015. var syncPoint = null;
  5016. currentTime = currentTime || 0;
  5017. if (playlist.discontinuityStarts.length) {
  5018. var lastDistance = null;
  5019. for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
  5020. var segmentIndex = playlist.discontinuityStarts[i];
  5021. var discontinuity = playlist.discontinuitySequence + i + 1;
  5022. var discontinuitySync = syncController.discontinuities[discontinuity];
  5023. if (discontinuitySync) {
  5024. var distance = Math.abs(currentTime - discontinuitySync.time);
  5025. // Once the distance begins to increase, we have passed
  5026. // currentTime and can stop looking for better candidates
  5027. if (lastDistance !== null && lastDistance < distance) {
  5028. break;
  5029. }
  5030. if (!syncPoint || lastDistance === null || lastDistance >= distance) {
  5031. lastDistance = distance;
  5032. syncPoint = {
  5033. time: discontinuitySync.time,
  5034. segmentIndex: segmentIndex
  5035. };
  5036. }
  5037. }
  5038. }
  5039. }
  5040. return syncPoint;
  5041. }
  5042. },
  5043. // Stategy "Playlist": We have a playlist with a known mapping of
  5044. // segment index to display time
  5045. {
  5046. name: 'Playlist',
  5047. run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
  5048. if (playlist.syncInfo) {
  5049. var syncPoint = {
  5050. time: playlist.syncInfo.time,
  5051. segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence
  5052. };
  5053. return syncPoint;
  5054. }
  5055. return null;
  5056. }
  5057. }];
  5058. exports.syncPointStrategies = syncPointStrategies;
  5059. var SyncController = (function (_videojs$EventTarget) {
  5060. _inherits(SyncController, _videojs$EventTarget);
  5061. function SyncController() {
  5062. _classCallCheck(this, SyncController);
  5063. _get(Object.getPrototypeOf(SyncController.prototype), 'constructor', this).call(this);
  5064. // Segment Loader state variables...
  5065. // ...for synching across variants
  5066. this.inspectCache_ = undefined;
  5067. // ...for synching across variants
  5068. this.timelines = [];
  5069. this.discontinuities = [];
  5070. this.datetimeToDisplayTime = null;
  5071. if (_videoJs2['default'].options.hls && _videoJs2['default'].options.hls.debug) {
  5072. this.logger_ = _videoJs2['default'].log.bind(_videoJs2['default'], 'sync-controller ->');
  5073. }
  5074. }
  5075. /**
  5076. * Find a sync-point for the playlist specified
  5077. *
  5078. * A sync-point is defined as a known mapping from display-time to
  5079. * a segment-index in the current playlist.
  5080. *
  5081. * @param {Playlist} media - The playlist that needs a sync-point
  5082. * @param {Number} duration - Duration of the MediaSource (Infinite if playing a live source)
  5083. * @param {Number} currentTimeline - The last timeline from which a segment was loaded
  5084. * @returns {Object} - A sync-point object
  5085. */
  5086. _createClass(SyncController, [{
  5087. key: 'getSyncPoint',
  5088. value: function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
  5089. var syncPoints = [];
  5090. // Try to find a sync-point in by utilizing various strategies...
  5091. for (var i = 0; i < syncPointStrategies.length; i++) {
  5092. var strategy = syncPointStrategies[i];
  5093. var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
  5094. if (syncPoint) {
  5095. syncPoint.strategy = strategy.name;
  5096. syncPoints.push({
  5097. strategy: strategy.name,
  5098. syncPoint: syncPoint
  5099. });
  5100. this.logger_('syncPoint found via <' + strategy.name + '>:', syncPoint);
  5101. }
  5102. }
  5103. if (!syncPoints.length) {
  5104. // Signal that we need to attempt to get a sync-point manually
  5105. // by fetching a segment in the playlist and constructing
  5106. // a sync-point from that information
  5107. return null;
  5108. }
  5109. // Now find the sync-point that is closest to the currentTime because
  5110. // that should result in the most accurate guess about which segment
  5111. // to fetch
  5112. var bestSyncPoint = syncPoints[0].syncPoint;
  5113. var bestDistance = Math.abs(syncPoints[0].syncPoint.time - currentTime);
  5114. var bestStrategy = syncPoints[0].strategy;
  5115. for (var i = 1; i < syncPoints.length; i++) {
  5116. var newDistance = Math.abs(syncPoints[i].syncPoint.time - currentTime);
  5117. if (newDistance < bestDistance) {
  5118. bestDistance = newDistance;
  5119. bestSyncPoint = syncPoints[i].syncPoint;
  5120. bestStrategy = syncPoints[i].strategy;
  5121. }
  5122. }
  5123. this.logger_('syncPoint with strategy <' + bestStrategy + '> chosen: ', bestSyncPoint);
  5124. return bestSyncPoint;
  5125. }
  5126. /**
  5127. * Save any meta-data present on the segments when segments leave
  5128. * the live window to the playlist to allow for synchronization at the
  5129. * playlist level later.
  5130. *
  5131. * @param {Playlist} oldPlaylist - The previous active playlist
  5132. * @param {Playlist} newPlaylist - The updated and most current playlist
  5133. */
  5134. }, {
  5135. key: 'saveExpiredSegmentInfo',
  5136. value: function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
  5137. var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
  5138. // When a segment expires from the playlist and it has a start time
  5139. // save that information as a possible sync-point reference in future
  5140. for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
  5141. var lastRemovedSegment = oldPlaylist.segments[i];
  5142. if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
  5143. newPlaylist.syncInfo = {
  5144. mediaSequence: oldPlaylist.mediaSequence + i,
  5145. time: lastRemovedSegment.start
  5146. };
  5147. this.logger_('playlist sync:', newPlaylist.syncInfo);
  5148. this.trigger('syncinfoupdate');
  5149. break;
  5150. }
  5151. }
  5152. }
  5153. /**
  5154. * Save the mapping from playlist's ProgramDateTime to display. This should
  5155. * only ever happen once at the start of playback.
  5156. *
  5157. * @param {Playlist} playlist - The currently active playlist
  5158. */
  5159. }, {
  5160. key: 'setDateTimeMapping',
  5161. value: function setDateTimeMapping(playlist) {
  5162. if (!this.datetimeToDisplayTime && playlist.dateTimeObject) {
  5163. var playlistTimestamp = playlist.dateTimeObject.getTime() / 1000;
  5164. this.datetimeToDisplayTime = -playlistTimestamp;
  5165. }
  5166. }
  5167. /**
  5168. * Reset the state of the inspection cache when we do a rendition
  5169. * switch
  5170. */
  5171. }, {
  5172. key: 'reset',
  5173. value: function reset() {
  5174. this.inspectCache_ = undefined;
  5175. }
  5176. /**
  5177. * Probe or inspect a fmp4 or an mpeg2-ts segment to determine the start
  5178. * and end of the segment in it's internal "media time". Used to generate
  5179. * mappings from that internal "media time" to the display time that is
  5180. * shown on the player.
  5181. *
  5182. * @param {SegmentInfo} segmentInfo - The current active request information
  5183. */
  5184. }, {
  5185. key: 'probeSegmentInfo',
  5186. value: function probeSegmentInfo(segmentInfo) {
  5187. var segment = segmentInfo.segment;
  5188. var timingInfo = undefined;
  5189. if (segment.map) {
  5190. timingInfo = this.probeMp4Segment_(segmentInfo);
  5191. } else {
  5192. timingInfo = this.probeTsSegment_(segmentInfo);
  5193. }
  5194. if (timingInfo) {
  5195. if (this.calculateSegmentTimeMapping_(segmentInfo, timingInfo)) {
  5196. this.saveDiscontinuitySyncInfo_(segmentInfo);
  5197. }
  5198. }
  5199. }
  5200. /**
  5201. * Probe an fmp4 or an mpeg2-ts segment to determine the start of the segment
  5202. * in it's internal "media time".
  5203. *
  5204. * @private
  5205. * @param {SegmentInfo} segmentInfo - The current active request information
  5206. * @return {object} The start and end time of the current segment in "media time"
  5207. */
  5208. }, {
  5209. key: 'probeMp4Segment_',
  5210. value: function probeMp4Segment_(segmentInfo) {
  5211. var segment = segmentInfo.segment;
  5212. var timescales = _muxJsLibMp4Probe2['default'].timescale(segment.map.bytes);
  5213. var startTime = _muxJsLibMp4Probe2['default'].startTime(timescales, segmentInfo.bytes);
  5214. if (segmentInfo.timestampOffset !== null) {
  5215. segmentInfo.timestampOffset -= startTime;
  5216. }
  5217. return {
  5218. start: startTime,
  5219. end: startTime + segment.duration
  5220. };
  5221. }
  5222. /**
  5223. * Probe an mpeg2-ts segment to determine the start and end of the segment
  5224. * in it's internal "media time".
  5225. *
  5226. * @private
  5227. * @param {SegmentInfo} segmentInfo - The current active request information
  5228. * @return {object} The start and end time of the current segment in "media time"
  5229. */
  5230. }, {
  5231. key: 'probeTsSegment_',
  5232. value: function probeTsSegment_(segmentInfo) {
  5233. var timeInfo = (0, _muxJsLibToolsTsInspectorJs.inspect)(segmentInfo.bytes, this.inspectCache_);
  5234. var segmentStartTime = undefined;
  5235. var segmentEndTime = undefined;
  5236. if (!timeInfo) {
  5237. return null;
  5238. }
  5239. if (timeInfo.video && timeInfo.video.length === 2) {
  5240. this.inspectCache_ = timeInfo.video[1].dts;
  5241. segmentStartTime = timeInfo.video[0].dtsTime;
  5242. segmentEndTime = timeInfo.video[1].dtsTime;
  5243. } else if (timeInfo.audio && timeInfo.audio.length === 2) {
  5244. this.inspectCache_ = timeInfo.audio[1].dts;
  5245. segmentStartTime = timeInfo.audio[0].dtsTime;
  5246. segmentEndTime = timeInfo.audio[1].dtsTime;
  5247. }
  5248. return {
  5249. start: segmentStartTime,
  5250. end: segmentEndTime
  5251. };
  5252. }
  5253. }, {
  5254. key: 'timestampOffsetForTimeline',
  5255. value: function timestampOffsetForTimeline(timeline) {
  5256. if (typeof this.timelines[timeline] === 'undefined') {
  5257. return null;
  5258. }
  5259. return this.timelines[timeline].time;
  5260. }
  5261. /**
  5262. * Use the "media time" for a segment to generate a mapping to "display time" and
  5263. * save that display time to the segment.
  5264. *
  5265. * @private
  5266. * @param {SegmentInfo} segmentInfo - The current active request information
  5267. * @param {object} timingInfo - The start and end time of the current segment in "media time"
  5268. */
  5269. }, {
  5270. key: 'calculateSegmentTimeMapping_',
  5271. value: function calculateSegmentTimeMapping_(segmentInfo, timingInfo) {
  5272. var segment = segmentInfo.segment;
  5273. var mappingObj = this.timelines[segmentInfo.timeline];
  5274. if (segmentInfo.timestampOffset !== null) {
  5275. this.logger_('tsO:', segmentInfo.timestampOffset);
  5276. mappingObj = {
  5277. time: segmentInfo.startOfSegment,
  5278. mapping: segmentInfo.startOfSegment - timingInfo.start
  5279. };
  5280. this.timelines[segmentInfo.timeline] = mappingObj;
  5281. this.trigger('timestampoffset');
  5282. segment.start = segmentInfo.startOfSegment;
  5283. segment.end = timingInfo.end + mappingObj.mapping;
  5284. } else if (mappingObj) {
  5285. segment.start = timingInfo.start + mappingObj.mapping;
  5286. segment.end = timingInfo.end + mappingObj.mapping;
  5287. } else {
  5288. return false;
  5289. }
  5290. return true;
  5291. }
  5292. /**
  5293. * Each time we have discontinuity in the playlist, attempt to calculate the location
  5294. * in display of the start of the discontinuity and save that. We also save an accuracy
  5295. * value so that we save values with the most accuracy (closest to 0.)
  5296. *
  5297. * @private
  5298. * @param {SegmentInfo} segmentInfo - The current active request information
  5299. */
  5300. }, {
  5301. key: 'saveDiscontinuitySyncInfo_',
  5302. value: function saveDiscontinuitySyncInfo_(segmentInfo) {
  5303. var playlist = segmentInfo.playlist;
  5304. var segment = segmentInfo.segment;
  5305. // If the current segment is a discontinuity then we know exactly where
  5306. // the start of the range and it's accuracy is 0 (greater accuracy values
  5307. // mean more approximation)
  5308. if (segment.discontinuity) {
  5309. this.discontinuities[segment.timeline] = {
  5310. time: segment.start,
  5311. accuracy: 0
  5312. };
  5313. } else if (playlist.discontinuityStarts.length) {
  5314. // Search for future discontinuities that we can provide better timing
  5315. // information for and save that information for sync purposes
  5316. for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
  5317. var segmentIndex = playlist.discontinuityStarts[i];
  5318. var discontinuity = playlist.discontinuitySequence + i + 1;
  5319. var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
  5320. var accuracy = Math.abs(mediaIndexDiff);
  5321. if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
  5322. if (mediaIndexDiff < 0) {
  5323. this.discontinuities[discontinuity] = {
  5324. time: segment.start - (0, _playlist.sumDurations)(playlist, segmentInfo.mediaIndex, segmentIndex),
  5325. accuracy: accuracy
  5326. };
  5327. } else {
  5328. this.discontinuities[discontinuity] = {
  5329. time: segment.end + (0, _playlist.sumDurations)(playlist, segmentInfo.mediaIndex + 1, segmentIndex),
  5330. accuracy: accuracy
  5331. };
  5332. }
  5333. }
  5334. }
  5335. }
  5336. }
  5337. /**
  5338. * A debugging logger noop that is set to console.log only if debugging
  5339. * is enabled globally
  5340. *
  5341. * @private
  5342. */
  5343. }, {
  5344. key: 'logger_',
  5345. value: function logger_() {}
  5346. }]);
  5347. return SyncController;
  5348. })(_videoJs2['default'].EventTarget);
  5349. exports['default'] = SyncController;
  5350. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  5351. },{"./playlist":9,"mux.js/lib/mp4/probe":55,"mux.js/lib/tools/ts-inspector.js":57}],18:[function(require,module,exports){
  5352. (function (global){
  5353. /**
  5354. * @file vtt-segment-loader.js
  5355. */
  5356. 'use strict';
  5357. Object.defineProperty(exports, '__esModule', {
  5358. value: true
  5359. });
  5360. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  5361. var _get = function get(_x2, _x3, _x4) { var _again = true; _function: while (_again) { var object = _x2, property = _x3, receiver = _x4; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x2 = parent; _x3 = property; _x4 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  5362. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  5363. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  5364. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  5365. var _segmentLoader = require('./segment-loader');
  5366. var _segmentLoader2 = _interopRequireDefault(_segmentLoader);
  5367. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  5368. var _videoJs2 = _interopRequireDefault(_videoJs);
  5369. var _globalWindow = require('global/window');
  5370. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  5371. var _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs = require('videojs-contrib-media-sources/es5/remove-cues-from-track.js');
  5372. var _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2 = _interopRequireDefault(_videojsContribMediaSourcesEs5RemoveCuesFromTrackJs);
  5373. var _binUtils = require('./bin-utils');
  5374. var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
  5375. return char.charCodeAt(0);
  5376. }));
  5377. var uintToString = function uintToString(uintArray) {
  5378. return String.fromCharCode.apply(null, uintArray);
  5379. };
  5380. /**
  5381. * An object that manages segment loading and appending.
  5382. *
  5383. * @class VTTSegmentLoader
  5384. * @param {Object} options required and optional options
  5385. * @extends videojs.EventTarget
  5386. */
  5387. var VTTSegmentLoader = (function (_SegmentLoader) {
  5388. _inherits(VTTSegmentLoader, _SegmentLoader);
  5389. function VTTSegmentLoader(options) {
  5390. _classCallCheck(this, VTTSegmentLoader);
  5391. _get(Object.getPrototypeOf(VTTSegmentLoader.prototype), 'constructor', this).call(this, options);
  5392. // SegmentLoader requires a MediaSource be specified or it will throw an error;
  5393. // however, VTTSegmentLoader has no need of a media source, so delete the reference
  5394. this.mediaSource_ = null;
  5395. this.subtitlesTrack_ = null;
  5396. }
  5397. /**
  5398. * Indicates which time ranges are buffered
  5399. *
  5400. * @return {TimeRange}
  5401. * TimeRange object representing the current buffered ranges
  5402. */
  5403. _createClass(VTTSegmentLoader, [{
  5404. key: 'buffered_',
  5405. value: function buffered_() {
  5406. if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues.length) {
  5407. return _videoJs2['default'].createTimeRanges();
  5408. }
  5409. var cues = this.subtitlesTrack_.cues;
  5410. var start = cues[0].startTime;
  5411. var end = cues[cues.length - 1].startTime;
  5412. return _videoJs2['default'].createTimeRanges([[start, end]]);
  5413. }
  5414. /**
  5415. * Gets and sets init segment for the provided map
  5416. *
  5417. * @param {Object} map
  5418. * The map object representing the init segment to get or set
  5419. * @param {Boolean=} set
  5420. * If true, the init segment for the provided map should be saved
  5421. * @return {Object}
  5422. * map object for desired init segment
  5423. */
  5424. }, {
  5425. key: 'initSegment',
  5426. value: function initSegment(map) {
  5427. var set = arguments.length <= 1 || arguments[1] === undefined ? false : arguments[1];
  5428. if (!map) {
  5429. return null;
  5430. }
  5431. var id = (0, _binUtils.initSegmentId)(map);
  5432. var storedMap = this.initSegments_[id];
  5433. if (set && !storedMap && map.bytes) {
  5434. // append WebVTT line terminators to the media initialization segment if it exists
  5435. // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
  5436. // requires two or more WebVTT line terminators between the WebVTT header and the rest
  5437. // of the file
  5438. var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
  5439. var combinedSegment = new Uint8Array(combinedByteLength);
  5440. combinedSegment.set(map.bytes);
  5441. combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
  5442. this.initSegments_[id] = storedMap = {
  5443. resolvedUri: map.resolvedUri,
  5444. byterange: map.byterange,
  5445. bytes: combinedSegment
  5446. };
  5447. }
  5448. return storedMap || map;
  5449. }
  5450. /**
  5451. * Returns true if all configuration required for loading is present, otherwise false.
  5452. *
  5453. * @return {Boolean} True if the all configuration is ready for loading
  5454. * @private
  5455. */
  5456. }, {
  5457. key: 'couldBeginLoading_',
  5458. value: function couldBeginLoading_() {
  5459. return this.playlist_ && this.subtitlesTrack_ && !this.paused();
  5460. }
  5461. /**
  5462. * Once all the starting parameters have been specified, begin
  5463. * operation. This method should only be invoked from the INIT
  5464. * state.
  5465. *
  5466. * @private
  5467. */
  5468. }, {
  5469. key: 'init_',
  5470. value: function init_() {
  5471. this.state = 'READY';
  5472. this.resetEverything();
  5473. return this.monitorBuffer_();
  5474. }
  5475. /**
  5476. * Set a subtitle track on the segment loader to add subtitles to
  5477. *
  5478. * @param {TextTrack} track
  5479. * The text track to add loaded subtitles to
  5480. */
  5481. }, {
  5482. key: 'track',
  5483. value: function track(_track) {
  5484. this.subtitlesTrack_ = _track;
  5485. // if we were unpaused but waiting for a sourceUpdater, start
  5486. // buffering now
  5487. if (this.state === 'INIT' && this.couldBeginLoading_()) {
  5488. this.init_();
  5489. }
  5490. }
  5491. /**
  5492. * Remove any data in the source buffer between start and end times
  5493. * @param {Number} start - the start time of the region to remove from the buffer
  5494. * @param {Number} end - the end time of the region to remove from the buffer
  5495. */
  5496. }, {
  5497. key: 'remove',
  5498. value: function remove(start, end) {
  5499. (0, _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2['default'])(start, end, this.subtitlesTrack_);
  5500. }
  5501. /**
  5502. * fill the buffer with segements unless the sourceBuffers are
  5503. * currently updating
  5504. *
  5505. * Note: this function should only ever be called by monitorBuffer_
  5506. * and never directly
  5507. *
  5508. * @private
  5509. */
  5510. }, {
  5511. key: 'fillBuffer_',
  5512. value: function fillBuffer_() {
  5513. var _this = this;
  5514. if (!this.syncPoint_) {
  5515. this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
  5516. }
  5517. // see if we need to begin loading immediately
  5518. var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
  5519. segmentInfo = this.skipEmptySegments_(segmentInfo);
  5520. if (!segmentInfo) {
  5521. return;
  5522. }
  5523. if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
  5524. // We don't have the timestamp offset that we need to sync subtitles.
  5525. // Rerun on a timestamp offset or user interaction.
  5526. var checkTimestampOffset = function checkTimestampOffset() {
  5527. _this.state = 'READY';
  5528. if (!_this.paused()) {
  5529. // if not paused, queue a buffer check as soon as possible
  5530. _this.monitorBuffer_();
  5531. }
  5532. };
  5533. this.syncController_.one('timestampoffset', checkTimestampOffset);
  5534. this.state = 'WAITING_ON_TIMELINE';
  5535. return;
  5536. }
  5537. this.loadSegment_(segmentInfo);
  5538. }
  5539. /**
  5540. * Prevents the segment loader from requesting segments we know contain no subtitles
  5541. * by walking forward until we find the next segment that we don't know whether it is
  5542. * empty or not.
  5543. *
  5544. * @param {Object} segmentInfo
  5545. * a segment info object that describes the current segment
  5546. * @return {Object}
  5547. * a segment info object that describes the current segment
  5548. */
  5549. }, {
  5550. key: 'skipEmptySegments_',
  5551. value: function skipEmptySegments_(segmentInfo) {
  5552. while (segmentInfo && segmentInfo.segment.empty) {
  5553. segmentInfo = this.generateSegmentInfo_(segmentInfo.playlist, segmentInfo.mediaIndex + 1, segmentInfo.startOfSegment + segmentInfo.duration, segmentInfo.isSyncRequest);
  5554. }
  5555. return segmentInfo;
  5556. }
  5557. /**
  5558. * append a decrypted segement to the SourceBuffer through a SourceUpdater
  5559. *
  5560. * @private
  5561. */
  5562. }, {
  5563. key: 'handleSegment_',
  5564. value: function handleSegment_() {
  5565. var _this2 = this;
  5566. if (!this.pendingSegment_) {
  5567. this.state = 'READY';
  5568. return;
  5569. }
  5570. this.state = 'APPENDING';
  5571. var segmentInfo = this.pendingSegment_;
  5572. var segment = segmentInfo.segment;
  5573. // Make sure that vttjs has loaded, otherwise, wait till it finished loading
  5574. if (typeof _globalWindow2['default'].WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
  5575. var _ret = (function () {
  5576. var loadHandler = function loadHandler() {
  5577. _this2.handleSegment_();
  5578. };
  5579. _this2.state = 'WAITING_ON_VTTJS';
  5580. _this2.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
  5581. _this2.subtitlesTrack_.tech_.one('vttjserror', function () {
  5582. _this2.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
  5583. _this2.error({
  5584. message: 'Error loading vtt.js'
  5585. });
  5586. _this2.state = 'READY';
  5587. _this2.pause();
  5588. _this2.trigger('error');
  5589. });
  5590. return {
  5591. v: undefined
  5592. };
  5593. })();
  5594. if (typeof _ret === 'object') return _ret.v;
  5595. }
  5596. segment.requested = true;
  5597. try {
  5598. this.parseVTTCues_(segmentInfo);
  5599. } catch (e) {
  5600. this.error({
  5601. message: e.message
  5602. });
  5603. this.state = 'READY';
  5604. this.pause();
  5605. return this.trigger('error');
  5606. }
  5607. this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
  5608. if (segmentInfo.isSyncRequest) {
  5609. this.trigger('syncinfoupdate');
  5610. this.pendingSegment_ = null;
  5611. this.state = 'READY';
  5612. return;
  5613. }
  5614. segmentInfo.byteLength = segmentInfo.bytes.byteLength;
  5615. this.mediaSecondsLoaded += segment.duration;
  5616. segmentInfo.cues.forEach(function (cue) {
  5617. _this2.subtitlesTrack_.addCue(cue);
  5618. });
  5619. this.handleUpdateEnd_();
  5620. }
  5621. /**
  5622. * Uses the WebVTT parser to parse the segment response
  5623. *
  5624. * @param {Object} segmentInfo
  5625. * a segment info object that describes the current segment
  5626. * @private
  5627. */
  5628. }, {
  5629. key: 'parseVTTCues_',
  5630. value: function parseVTTCues_(segmentInfo) {
  5631. var decoder = undefined;
  5632. var decodeBytesToString = false;
  5633. if (typeof _globalWindow2['default'].TextDecoder === 'function') {
  5634. decoder = new _globalWindow2['default'].TextDecoder('utf8');
  5635. } else {
  5636. decoder = _globalWindow2['default'].WebVTT.StringDecoder();
  5637. decodeBytesToString = true;
  5638. }
  5639. var parser = new _globalWindow2['default'].WebVTT.Parser(_globalWindow2['default'], _globalWindow2['default'].vttjs, decoder);
  5640. segmentInfo.cues = [];
  5641. segmentInfo.timestampmap = { MPEGTS: 0, LOCAL: 0 };
  5642. parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
  5643. parser.ontimestampmap = function (map) {
  5644. return segmentInfo.timestampmap = map;
  5645. };
  5646. parser.onparsingerror = function (error) {
  5647. _videoJs2['default'].log.warn('Error encountered when parsing cues: ' + error.message);
  5648. };
  5649. if (segmentInfo.segment.map) {
  5650. var mapData = segmentInfo.segment.map.bytes;
  5651. if (decodeBytesToString) {
  5652. mapData = uintToString(mapData);
  5653. }
  5654. parser.parse(mapData);
  5655. }
  5656. var segmentData = segmentInfo.bytes;
  5657. if (decodeBytesToString) {
  5658. segmentData = uintToString(segmentData);
  5659. }
  5660. parser.parse(segmentData);
  5661. parser.flush();
  5662. }
  5663. /**
  5664. * Updates the start and end times of any cues parsed by the WebVTT parser using
  5665. * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
  5666. * from the SyncController
  5667. *
  5668. * @param {Object} segmentInfo
  5669. * a segment info object that describes the current segment
  5670. * @param {Object} mappingObj
  5671. * object containing a mapping from TS to media time
  5672. * @param {Object} playlist
  5673. * the playlist object containing the segment
  5674. * @private
  5675. */
  5676. }, {
  5677. key: 'updateTimeMapping_',
  5678. value: function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
  5679. var segment = segmentInfo.segment;
  5680. if (!mappingObj) {
  5681. // If the sync controller does not have a mapping of TS to Media Time for the
  5682. // timeline, then we don't have enough information to update the cue
  5683. // start/end times
  5684. return;
  5685. }
  5686. if (!segmentInfo.cues.length) {
  5687. // If there are no cues, we also do not have enough information to figure out
  5688. // segment timing. Mark that the segment contains no cues so we don't re-request
  5689. // an empty segment.
  5690. segment.empty = true;
  5691. return;
  5692. }
  5693. var timestampmap = segmentInfo.timestampmap;
  5694. var diff = timestampmap.MPEGTS / 90000 - timestampmap.LOCAL + mappingObj.mapping;
  5695. segmentInfo.cues.forEach(function (cue) {
  5696. // First convert cue time to TS time using the timestamp-map provided within the vtt
  5697. cue.startTime += diff;
  5698. cue.endTime += diff;
  5699. });
  5700. if (!playlist.syncInfo) {
  5701. var firstStart = segmentInfo.cues[0].startTime;
  5702. var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
  5703. playlist.syncInfo = {
  5704. mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
  5705. time: Math.min(firstStart, lastStart - segment.duration)
  5706. };
  5707. }
  5708. }
  5709. }]);
  5710. return VTTSegmentLoader;
  5711. })(_segmentLoader2['default']);
  5712. exports['default'] = VTTSegmentLoader;
  5713. module.exports = exports['default'];
  5714. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  5715. },{"./bin-utils":2,"./segment-loader":14,"global/window":30,"videojs-contrib-media-sources/es5/remove-cues-from-track.js":71}],19:[function(require,module,exports){
  5716. (function (global){
  5717. /**
  5718. * @file xhr.js
  5719. */
  5720. /**
  5721. * A wrapper for videojs.xhr that tracks bandwidth.
  5722. *
  5723. * @param {Object} options options for the XHR
  5724. * @param {Function} callback the callback to call when done
  5725. * @return {Request} the xhr request that is going to be made
  5726. */
  5727. 'use strict';
  5728. Object.defineProperty(exports, '__esModule', {
  5729. value: true
  5730. });
  5731. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  5732. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  5733. var _videoJs2 = _interopRequireDefault(_videoJs);
  5734. var xhrFactory = function xhrFactory() {
  5735. var xhr = function XhrFunction(options, callback) {
  5736. // Add a default timeout for all hls requests
  5737. options = (0, _videoJs.mergeOptions)({
  5738. timeout: 45e3
  5739. }, options);
  5740. // Allow an optional user-specified function to modify the option
  5741. // object before we construct the xhr request
  5742. var beforeRequest = XhrFunction.beforeRequest || _videoJs2['default'].Hls.xhr.beforeRequest;
  5743. if (beforeRequest && typeof beforeRequest === 'function') {
  5744. var newOptions = beforeRequest(options);
  5745. if (newOptions) {
  5746. options = newOptions;
  5747. }
  5748. }
  5749. var request = (0, _videoJs.xhr)(options, function (error, response) {
  5750. var reqResponse = request.response;
  5751. if (!error && reqResponse) {
  5752. request.responseTime = Date.now();
  5753. request.roundTripTime = request.responseTime - request.requestTime;
  5754. request.bytesReceived = reqResponse.byteLength || reqResponse.length;
  5755. if (!request.bandwidth) {
  5756. request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
  5757. }
  5758. }
  5759. // videojs.xhr now uses a specific code on the error
  5760. // object to signal that a request has timed out instead
  5761. // of setting a boolean on the request object
  5762. if (error && error.code === 'ETIMEDOUT') {
  5763. request.timedout = true;
  5764. }
  5765. // videojs.xhr no longer considers status codes outside of 200 and 0
  5766. // (for file uris) to be errors, but the old XHR did, so emulate that
  5767. // behavior. Status 206 may be used in response to byterange requests.
  5768. if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
  5769. error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
  5770. }
  5771. callback(error, request);
  5772. });
  5773. var originalAbort = request.abort;
  5774. request.abort = function () {
  5775. request.aborted = true;
  5776. return originalAbort.apply(request, arguments);
  5777. };
  5778. request.uri = options.uri;
  5779. request.requestTime = Date.now();
  5780. return request;
  5781. };
  5782. return xhr;
  5783. };
  5784. exports['default'] = xhrFactory;
  5785. module.exports = exports['default'];
  5786. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  5787. },{}],20:[function(require,module,exports){
  5788. /**
  5789. * @file aes.js
  5790. *
  5791. * This file contains an adaptation of the AES decryption algorithm
  5792. * from the Standford Javascript Cryptography Library. That work is
  5793. * covered by the following copyright and permissions notice:
  5794. *
  5795. * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
  5796. * All rights reserved.
  5797. *
  5798. * Redistribution and use in source and binary forms, with or without
  5799. * modification, are permitted provided that the following conditions are
  5800. * met:
  5801. *
  5802. * 1. Redistributions of source code must retain the above copyright
  5803. * notice, this list of conditions and the following disclaimer.
  5804. *
  5805. * 2. Redistributions in binary form must reproduce the above
  5806. * copyright notice, this list of conditions and the following
  5807. * disclaimer in the documentation and/or other materials provided
  5808. * with the distribution.
  5809. *
  5810. * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
  5811. * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
  5812. * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
  5813. * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
  5814. * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  5815. * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  5816. * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
  5817. * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
  5818. * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
  5819. * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
  5820. * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  5821. *
  5822. * The views and conclusions contained in the software and documentation
  5823. * are those of the authors and should not be interpreted as representing
  5824. * official policies, either expressed or implied, of the authors.
  5825. */
  5826. /**
  5827. * Expand the S-box tables.
  5828. *
  5829. * @private
  5830. */
  5831. 'use strict';
  5832. Object.defineProperty(exports, '__esModule', {
  5833. value: true
  5834. });
  5835. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  5836. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  5837. var precompute = function precompute() {
  5838. var tables = [[[], [], [], [], []], [[], [], [], [], []]];
  5839. var encTable = tables[0];
  5840. var decTable = tables[1];
  5841. var sbox = encTable[4];
  5842. var sboxInv = decTable[4];
  5843. var i = undefined;
  5844. var x = undefined;
  5845. var xInv = undefined;
  5846. var d = [];
  5847. var th = [];
  5848. var x2 = undefined;
  5849. var x4 = undefined;
  5850. var x8 = undefined;
  5851. var s = undefined;
  5852. var tEnc = undefined;
  5853. var tDec = undefined;
  5854. // Compute double and third tables
  5855. for (i = 0; i < 256; i++) {
  5856. th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
  5857. }
  5858. for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
  5859. // Compute sbox
  5860. s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
  5861. s = s >> 8 ^ s & 255 ^ 99;
  5862. sbox[x] = s;
  5863. sboxInv[s] = x;
  5864. // Compute MixColumns
  5865. x8 = d[x4 = d[x2 = d[x]]];
  5866. tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
  5867. tEnc = d[s] * 0x101 ^ s * 0x1010100;
  5868. for (i = 0; i < 4; i++) {
  5869. encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
  5870. decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
  5871. }
  5872. }
  5873. // Compactify. Considerable speedup on Firefox.
  5874. for (i = 0; i < 5; i++) {
  5875. encTable[i] = encTable[i].slice(0);
  5876. decTable[i] = decTable[i].slice(0);
  5877. }
  5878. return tables;
  5879. };
  5880. var aesTables = null;
  5881. /**
  5882. * Schedule out an AES key for both encryption and decryption. This
  5883. * is a low-level class. Use a cipher mode to do bulk encryption.
  5884. *
  5885. * @class AES
  5886. * @param key {Array} The key as an array of 4, 6 or 8 words.
  5887. */
  5888. var AES = (function () {
  5889. function AES(key) {
  5890. _classCallCheck(this, AES);
  5891. /**
  5892. * The expanded S-box and inverse S-box tables. These will be computed
  5893. * on the client so that we don't have to send them down the wire.
  5894. *
  5895. * There are two tables, _tables[0] is for encryption and
  5896. * _tables[1] is for decryption.
  5897. *
  5898. * The first 4 sub-tables are the expanded S-box with MixColumns. The
  5899. * last (_tables[01][4]) is the S-box itself.
  5900. *
  5901. * @private
  5902. */
  5903. // if we have yet to precompute the S-box tables
  5904. // do so now
  5905. if (!aesTables) {
  5906. aesTables = precompute();
  5907. }
  5908. // then make a copy of that object for use
  5909. this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
  5910. var i = undefined;
  5911. var j = undefined;
  5912. var tmp = undefined;
  5913. var encKey = undefined;
  5914. var decKey = undefined;
  5915. var sbox = this._tables[0][4];
  5916. var decTable = this._tables[1];
  5917. var keyLen = key.length;
  5918. var rcon = 1;
  5919. if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
  5920. throw new Error('Invalid aes key size');
  5921. }
  5922. encKey = key.slice(0);
  5923. decKey = [];
  5924. this._key = [encKey, decKey];
  5925. // schedule encryption keys
  5926. for (i = keyLen; i < 4 * keyLen + 28; i++) {
  5927. tmp = encKey[i - 1];
  5928. // apply sbox
  5929. if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
  5930. tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255];
  5931. // shift rows and add rcon
  5932. if (i % keyLen === 0) {
  5933. tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
  5934. rcon = rcon << 1 ^ (rcon >> 7) * 283;
  5935. }
  5936. }
  5937. encKey[i] = encKey[i - keyLen] ^ tmp;
  5938. }
  5939. // schedule decryption keys
  5940. for (j = 0; i; j++, i--) {
  5941. tmp = encKey[j & 3 ? i : i - 4];
  5942. if (i <= 4 || j < 4) {
  5943. decKey[j] = tmp;
  5944. } else {
  5945. decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
  5946. }
  5947. }
  5948. }
  5949. /**
  5950. * Decrypt 16 bytes, specified as four 32-bit words.
  5951. *
  5952. * @param {Number} encrypted0 the first word to decrypt
  5953. * @param {Number} encrypted1 the second word to decrypt
  5954. * @param {Number} encrypted2 the third word to decrypt
  5955. * @param {Number} encrypted3 the fourth word to decrypt
  5956. * @param {Int32Array} out the array to write the decrypted words
  5957. * into
  5958. * @param {Number} offset the offset into the output array to start
  5959. * writing results
  5960. * @return {Array} The plaintext.
  5961. */
  5962. _createClass(AES, [{
  5963. key: 'decrypt',
  5964. value: function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
  5965. var key = this._key[1];
  5966. // state variables a,b,c,d are loaded with pre-whitened data
  5967. var a = encrypted0 ^ key[0];
  5968. var b = encrypted3 ^ key[1];
  5969. var c = encrypted2 ^ key[2];
  5970. var d = encrypted1 ^ key[3];
  5971. var a2 = undefined;
  5972. var b2 = undefined;
  5973. var c2 = undefined;
  5974. // key.length === 2 ?
  5975. var nInnerRounds = key.length / 4 - 2;
  5976. var i = undefined;
  5977. var kIndex = 4;
  5978. var table = this._tables[1];
  5979. // load up the tables
  5980. var table0 = table[0];
  5981. var table1 = table[1];
  5982. var table2 = table[2];
  5983. var table3 = table[3];
  5984. var sbox = table[4];
  5985. // Inner rounds. Cribbed from OpenSSL.
  5986. for (i = 0; i < nInnerRounds; i++) {
  5987. a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
  5988. b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
  5989. c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
  5990. d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
  5991. kIndex += 4;
  5992. a = a2;b = b2;c = c2;
  5993. }
  5994. // Last round.
  5995. for (i = 0; i < 4; i++) {
  5996. out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
  5997. a2 = a;a = b;b = c;c = d;d = a2;
  5998. }
  5999. }
  6000. }]);
  6001. return AES;
  6002. })();
  6003. exports['default'] = AES;
  6004. module.exports = exports['default'];
  6005. },{}],21:[function(require,module,exports){
  6006. /**
  6007. * @file async-stream.js
  6008. */
  6009. 'use strict';
  6010. Object.defineProperty(exports, '__esModule', {
  6011. value: true
  6012. });
  6013. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  6014. var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  6015. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  6016. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  6017. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  6018. var _stream = require('./stream');
  6019. var _stream2 = _interopRequireDefault(_stream);
  6020. /**
  6021. * A wrapper around the Stream class to use setTiemout
  6022. * and run stream "jobs" Asynchronously
  6023. *
  6024. * @class AsyncStream
  6025. * @extends Stream
  6026. */
  6027. var AsyncStream = (function (_Stream) {
  6028. _inherits(AsyncStream, _Stream);
  6029. function AsyncStream() {
  6030. _classCallCheck(this, AsyncStream);
  6031. _get(Object.getPrototypeOf(AsyncStream.prototype), 'constructor', this).call(this, _stream2['default']);
  6032. this.jobs = [];
  6033. this.delay = 1;
  6034. this.timeout_ = null;
  6035. }
  6036. /**
  6037. * process an async job
  6038. *
  6039. * @private
  6040. */
  6041. _createClass(AsyncStream, [{
  6042. key: 'processJob_',
  6043. value: function processJob_() {
  6044. this.jobs.shift()();
  6045. if (this.jobs.length) {
  6046. this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
  6047. } else {
  6048. this.timeout_ = null;
  6049. }
  6050. }
  6051. /**
  6052. * push a job into the stream
  6053. *
  6054. * @param {Function} job the job to push into the stream
  6055. */
  6056. }, {
  6057. key: 'push',
  6058. value: function push(job) {
  6059. this.jobs.push(job);
  6060. if (!this.timeout_) {
  6061. this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
  6062. }
  6063. }
  6064. }]);
  6065. return AsyncStream;
  6066. })(_stream2['default']);
  6067. exports['default'] = AsyncStream;
  6068. module.exports = exports['default'];
  6069. },{"./stream":24}],22:[function(require,module,exports){
  6070. /**
  6071. * @file decrypter.js
  6072. *
  6073. * An asynchronous implementation of AES-128 CBC decryption with
  6074. * PKCS#7 padding.
  6075. */
  6076. 'use strict';
  6077. Object.defineProperty(exports, '__esModule', {
  6078. value: true
  6079. });
  6080. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  6081. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  6082. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  6083. var _aes = require('./aes');
  6084. var _aes2 = _interopRequireDefault(_aes);
  6085. var _asyncStream = require('./async-stream');
  6086. var _asyncStream2 = _interopRequireDefault(_asyncStream);
  6087. var _pkcs7 = require('pkcs7');
  6088. /**
  6089. * Convert network-order (big-endian) bytes into their little-endian
  6090. * representation.
  6091. */
  6092. var ntoh = function ntoh(word) {
  6093. return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
  6094. };
  6095. /**
  6096. * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
  6097. *
  6098. * @param {Uint8Array} encrypted the encrypted bytes
  6099. * @param {Uint32Array} key the bytes of the decryption key
  6100. * @param {Uint32Array} initVector the initialization vector (IV) to
  6101. * use for the first round of CBC.
  6102. * @return {Uint8Array} the decrypted bytes
  6103. *
  6104. * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
  6105. * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
  6106. * @see https://tools.ietf.org/html/rfc2315
  6107. */
  6108. var decrypt = function decrypt(encrypted, key, initVector) {
  6109. // word-level access to the encrypted bytes
  6110. var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
  6111. var decipher = new _aes2['default'](Array.prototype.slice.call(key));
  6112. // byte and word-level access for the decrypted output
  6113. var decrypted = new Uint8Array(encrypted.byteLength);
  6114. var decrypted32 = new Int32Array(decrypted.buffer);
  6115. // temporary variables for working with the IV, encrypted, and
  6116. // decrypted data
  6117. var init0 = undefined;
  6118. var init1 = undefined;
  6119. var init2 = undefined;
  6120. var init3 = undefined;
  6121. var encrypted0 = undefined;
  6122. var encrypted1 = undefined;
  6123. var encrypted2 = undefined;
  6124. var encrypted3 = undefined;
  6125. // iteration variable
  6126. var wordIx = undefined;
  6127. // pull out the words of the IV to ensure we don't modify the
  6128. // passed-in reference and easier access
  6129. init0 = initVector[0];
  6130. init1 = initVector[1];
  6131. init2 = initVector[2];
  6132. init3 = initVector[3];
  6133. // decrypt four word sequences, applying cipher-block chaining (CBC)
  6134. // to each decrypted block
  6135. for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
  6136. // convert big-endian (network order) words into little-endian
  6137. // (javascript order)
  6138. encrypted0 = ntoh(encrypted32[wordIx]);
  6139. encrypted1 = ntoh(encrypted32[wordIx + 1]);
  6140. encrypted2 = ntoh(encrypted32[wordIx + 2]);
  6141. encrypted3 = ntoh(encrypted32[wordIx + 3]);
  6142. // decrypt the block
  6143. decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx);
  6144. // XOR with the IV, and restore network byte-order to obtain the
  6145. // plaintext
  6146. decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
  6147. decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
  6148. decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
  6149. decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3);
  6150. // setup the IV for the next round
  6151. init0 = encrypted0;
  6152. init1 = encrypted1;
  6153. init2 = encrypted2;
  6154. init3 = encrypted3;
  6155. }
  6156. return decrypted;
  6157. };
  6158. exports.decrypt = decrypt;
  6159. /**
  6160. * The `Decrypter` class that manages decryption of AES
  6161. * data through `AsyncStream` objects and the `decrypt`
  6162. * function
  6163. *
  6164. * @param {Uint8Array} encrypted the encrypted bytes
  6165. * @param {Uint32Array} key the bytes of the decryption key
  6166. * @param {Uint32Array} initVector the initialization vector (IV) to
  6167. * @param {Function} done the function to run when done
  6168. * @class Decrypter
  6169. */
  6170. var Decrypter = (function () {
  6171. function Decrypter(encrypted, key, initVector, done) {
  6172. _classCallCheck(this, Decrypter);
  6173. var step = Decrypter.STEP;
  6174. var encrypted32 = new Int32Array(encrypted.buffer);
  6175. var decrypted = new Uint8Array(encrypted.byteLength);
  6176. var i = 0;
  6177. this.asyncStream_ = new _asyncStream2['default']();
  6178. // split up the encryption job and do the individual chunks asynchronously
  6179. this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
  6180. for (i = step; i < encrypted32.length; i += step) {
  6181. initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
  6182. this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
  6183. }
  6184. // invoke the done() callback when everything is finished
  6185. this.asyncStream_.push(function () {
  6186. // remove pkcs#7 padding from the decrypted bytes
  6187. done(null, (0, _pkcs7.unpad)(decrypted));
  6188. });
  6189. }
  6190. /**
  6191. * a getter for step the maximum number of bytes to process at one time
  6192. *
  6193. * @return {Number} the value of step 32000
  6194. */
  6195. _createClass(Decrypter, [{
  6196. key: 'decryptChunk_',
  6197. /**
  6198. * @private
  6199. */
  6200. value: function decryptChunk_(encrypted, key, initVector, decrypted) {
  6201. return function () {
  6202. var bytes = decrypt(encrypted, key, initVector);
  6203. decrypted.set(bytes, encrypted.byteOffset);
  6204. };
  6205. }
  6206. }], [{
  6207. key: 'STEP',
  6208. get: function get() {
  6209. // 4 * 8000;
  6210. return 32000;
  6211. }
  6212. }]);
  6213. return Decrypter;
  6214. })();
  6215. exports.Decrypter = Decrypter;
  6216. exports['default'] = {
  6217. Decrypter: Decrypter,
  6218. decrypt: decrypt
  6219. };
  6220. },{"./aes":20,"./async-stream":21,"pkcs7":26}],23:[function(require,module,exports){
  6221. /**
  6222. * @file index.js
  6223. *
  6224. * Index module to easily import the primary components of AES-128
  6225. * decryption. Like this:
  6226. *
  6227. * ```js
  6228. * import {Decrypter, decrypt, AsyncStream} from 'aes-decrypter';
  6229. * ```
  6230. */
  6231. 'use strict';
  6232. Object.defineProperty(exports, '__esModule', {
  6233. value: true
  6234. });
  6235. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  6236. var _decrypter = require('./decrypter');
  6237. var _asyncStream = require('./async-stream');
  6238. var _asyncStream2 = _interopRequireDefault(_asyncStream);
  6239. exports['default'] = {
  6240. decrypt: _decrypter.decrypt,
  6241. Decrypter: _decrypter.Decrypter,
  6242. AsyncStream: _asyncStream2['default']
  6243. };
  6244. module.exports = exports['default'];
  6245. },{"./async-stream":21,"./decrypter":22}],24:[function(require,module,exports){
  6246. arguments[4][16][0].apply(exports,arguments)
  6247. },{"dup":16}],25:[function(require,module,exports){
  6248. /*
  6249. * pkcs7.pad
  6250. * https://github.com/brightcove/pkcs7
  6251. *
  6252. * Copyright (c) 2014 Brightcove
  6253. * Licensed under the apache2 license.
  6254. */
  6255. 'use strict';
  6256. var PADDING;
  6257. /**
  6258. * Returns a new Uint8Array that is padded with PKCS#7 padding.
  6259. * @param plaintext {Uint8Array} the input bytes before encryption
  6260. * @return {Uint8Array} the padded bytes
  6261. * @see http://tools.ietf.org/html/rfc5652
  6262. */
  6263. module.exports = function pad(plaintext) {
  6264. var padding = PADDING[(plaintext.byteLength % 16) || 0],
  6265. result = new Uint8Array(plaintext.byteLength + padding.length);
  6266. result.set(plaintext);
  6267. result.set(padding, plaintext.byteLength);
  6268. return result;
  6269. };
  6270. // pre-define the padding values
  6271. PADDING = [
  6272. [16, 16, 16, 16,
  6273. 16, 16, 16, 16,
  6274. 16, 16, 16, 16,
  6275. 16, 16, 16, 16],
  6276. [15, 15, 15, 15,
  6277. 15, 15, 15, 15,
  6278. 15, 15, 15, 15,
  6279. 15, 15, 15],
  6280. [14, 14, 14, 14,
  6281. 14, 14, 14, 14,
  6282. 14, 14, 14, 14,
  6283. 14, 14],
  6284. [13, 13, 13, 13,
  6285. 13, 13, 13, 13,
  6286. 13, 13, 13, 13,
  6287. 13],
  6288. [12, 12, 12, 12,
  6289. 12, 12, 12, 12,
  6290. 12, 12, 12, 12],
  6291. [11, 11, 11, 11,
  6292. 11, 11, 11, 11,
  6293. 11, 11, 11],
  6294. [10, 10, 10, 10,
  6295. 10, 10, 10, 10,
  6296. 10, 10],
  6297. [9, 9, 9, 9,
  6298. 9, 9, 9, 9,
  6299. 9],
  6300. [8, 8, 8, 8,
  6301. 8, 8, 8, 8],
  6302. [7, 7, 7, 7,
  6303. 7, 7, 7],
  6304. [6, 6, 6, 6,
  6305. 6, 6],
  6306. [5, 5, 5, 5,
  6307. 5],
  6308. [4, 4, 4, 4],
  6309. [3, 3, 3],
  6310. [2, 2],
  6311. [1]
  6312. ];
  6313. },{}],26:[function(require,module,exports){
  6314. /*
  6315. * pkcs7
  6316. * https://github.com/brightcove/pkcs7
  6317. *
  6318. * Copyright (c) 2014 Brightcove
  6319. * Licensed under the apache2 license.
  6320. */
  6321. 'use strict';
  6322. exports.pad = require('./pad.js');
  6323. exports.unpad = require('./unpad.js');
  6324. },{"./pad.js":25,"./unpad.js":27}],27:[function(require,module,exports){
  6325. /*
  6326. * pkcs7.unpad
  6327. * https://github.com/brightcove/pkcs7
  6328. *
  6329. * Copyright (c) 2014 Brightcove
  6330. * Licensed under the apache2 license.
  6331. */
  6332. 'use strict';
  6333. /**
  6334. * Returns the subarray of a Uint8Array without PKCS#7 padding.
  6335. * @param padded {Uint8Array} unencrypted bytes that have been padded
  6336. * @return {Uint8Array} the unpadded bytes
  6337. * @see http://tools.ietf.org/html/rfc5652
  6338. */
  6339. module.exports = function unpad(padded) {
  6340. return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
  6341. };
  6342. },{}],28:[function(require,module,exports){
  6343. },{}],29:[function(require,module,exports){
  6344. (function (global){
  6345. var topLevel = typeof global !== 'undefined' ? global :
  6346. typeof window !== 'undefined' ? window : {}
  6347. var minDoc = require('min-document');
  6348. var doccy;
  6349. if (typeof document !== 'undefined') {
  6350. doccy = document;
  6351. } else {
  6352. doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'];
  6353. if (!doccy) {
  6354. doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'] = minDoc;
  6355. }
  6356. }
  6357. module.exports = doccy;
  6358. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  6359. },{"min-document":28}],30:[function(require,module,exports){
  6360. (function (global){
  6361. var win;
  6362. if (typeof window !== "undefined") {
  6363. win = window;
  6364. } else if (typeof global !== "undefined") {
  6365. win = global;
  6366. } else if (typeof self !== "undefined"){
  6367. win = self;
  6368. } else {
  6369. win = {};
  6370. }
  6371. module.exports = win;
  6372. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  6373. },{}],31:[function(require,module,exports){
  6374. 'use strict';
  6375. var _lineStream = require('./line-stream');
  6376. var _lineStream2 = _interopRequireDefault(_lineStream);
  6377. var _parseStream = require('./parse-stream');
  6378. var _parseStream2 = _interopRequireDefault(_parseStream);
  6379. var _parser = require('./parser');
  6380. var _parser2 = _interopRequireDefault(_parser);
  6381. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  6382. module.exports = {
  6383. LineStream: _lineStream2['default'],
  6384. ParseStream: _parseStream2['default'],
  6385. Parser: _parser2['default']
  6386. }; /**
  6387. * @file m3u8/index.js
  6388. *
  6389. * Utilities for parsing M3U8 files. If the entire manifest is available,
  6390. * `Parser` will create an object representation with enough detail for managing
  6391. * playback. `ParseStream` and `LineStream` are lower-level parsing primitives
  6392. * that do not assume the entirety of the manifest is ready and expose a
  6393. * ReadableStream-like interface.
  6394. */
  6395. },{"./line-stream":32,"./parse-stream":33,"./parser":34}],32:[function(require,module,exports){
  6396. 'use strict';
  6397. Object.defineProperty(exports, "__esModule", {
  6398. value: true
  6399. });
  6400. var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
  6401. var _stream = require('./stream');
  6402. var _stream2 = _interopRequireDefault(_stream);
  6403. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  6404. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
  6405. function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
  6406. function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /**
  6407. * @file m3u8/line-stream.js
  6408. */
  6409. /**
  6410. * A stream that buffers string input and generates a `data` event for each
  6411. * line.
  6412. *
  6413. * @class LineStream
  6414. * @extends Stream
  6415. */
  6416. var LineStream = function (_Stream) {
  6417. _inherits(LineStream, _Stream);
  6418. function LineStream() {
  6419. _classCallCheck(this, LineStream);
  6420. var _this = _possibleConstructorReturn(this, (LineStream.__proto__ || Object.getPrototypeOf(LineStream)).call(this));
  6421. _this.buffer = '';
  6422. return _this;
  6423. }
  6424. /**
  6425. * Add new data to be parsed.
  6426. *
  6427. * @param {String} data the text to process
  6428. */
  6429. _createClass(LineStream, [{
  6430. key: 'push',
  6431. value: function push(data) {
  6432. var nextNewline = void 0;
  6433. this.buffer += data;
  6434. nextNewline = this.buffer.indexOf('\n');
  6435. for (; nextNewline > -1; nextNewline = this.buffer.indexOf('\n')) {
  6436. this.trigger('data', this.buffer.substring(0, nextNewline));
  6437. this.buffer = this.buffer.substring(nextNewline + 1);
  6438. }
  6439. }
  6440. }]);
  6441. return LineStream;
  6442. }(_stream2['default']);
  6443. exports['default'] = LineStream;
  6444. },{"./stream":35}],33:[function(require,module,exports){
  6445. 'use strict';
  6446. Object.defineProperty(exports, "__esModule", {
  6447. value: true
  6448. });
  6449. var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }();
  6450. var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
  6451. var _stream = require('./stream');
  6452. var _stream2 = _interopRequireDefault(_stream);
  6453. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  6454. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
  6455. function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
  6456. function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /**
  6457. * @file m3u8/parse-stream.js
  6458. */
  6459. /**
  6460. * "forgiving" attribute list psuedo-grammar:
  6461. * attributes -> keyvalue (',' keyvalue)*
  6462. * keyvalue -> key '=' value
  6463. * key -> [^=]*
  6464. * value -> '"' [^"]* '"' | [^,]*
  6465. */
  6466. var attributeSeparator = function attributeSeparator() {
  6467. var key = '[^=]*';
  6468. var value = '"[^"]*"|[^,]*';
  6469. var keyvalue = '(?:' + key + ')=(?:' + value + ')';
  6470. return new RegExp('(?:^|,)(' + keyvalue + ')');
  6471. };
  6472. /**
  6473. * Parse attributes from a line given the seperator
  6474. *
  6475. * @param {String} attributes the attibute line to parse
  6476. */
  6477. var parseAttributes = function parseAttributes(attributes) {
  6478. // split the string using attributes as the separator
  6479. var attrs = attributes.split(attributeSeparator());
  6480. var result = {};
  6481. var i = attrs.length;
  6482. var attr = void 0;
  6483. while (i--) {
  6484. // filter out unmatched portions of the string
  6485. if (attrs[i] === '') {
  6486. continue;
  6487. }
  6488. // split the key and value
  6489. attr = /([^=]*)=(.*)/.exec(attrs[i]).slice(1);
  6490. // trim whitespace and remove optional quotes around the value
  6491. attr[0] = attr[0].replace(/^\s+|\s+$/g, '');
  6492. attr[1] = attr[1].replace(/^\s+|\s+$/g, '');
  6493. attr[1] = attr[1].replace(/^['"](.*)['"]$/g, '$1');
  6494. result[attr[0]] = attr[1];
  6495. }
  6496. return result;
  6497. };
  6498. /**
  6499. * A line-level M3U8 parser event stream. It expects to receive input one
  6500. * line at a time and performs a context-free parse of its contents. A stream
  6501. * interpretation of a manifest can be useful if the manifest is expected to
  6502. * be too large to fit comfortably into memory or the entirety of the input
  6503. * is not immediately available. Otherwise, it's probably much easier to work
  6504. * with a regular `Parser` object.
  6505. *
  6506. * Produces `data` events with an object that captures the parser's
  6507. * interpretation of the input. That object has a property `tag` that is one
  6508. * of `uri`, `comment`, or `tag`. URIs only have a single additional
  6509. * property, `line`, which captures the entirety of the input without
  6510. * interpretation. Comments similarly have a single additional property
  6511. * `text` which is the input without the leading `#`.
  6512. *
  6513. * Tags always have a property `tagType` which is the lower-cased version of
  6514. * the M3U8 directive without the `#EXT` or `#EXT-X-` prefix. For instance,
  6515. * `#EXT-X-MEDIA-SEQUENCE` becomes `media-sequence` when parsed. Unrecognized
  6516. * tags are given the tag type `unknown` and a single additional property
  6517. * `data` with the remainder of the input.
  6518. *
  6519. * @class ParseStream
  6520. * @extends Stream
  6521. */
  6522. var ParseStream = function (_Stream) {
  6523. _inherits(ParseStream, _Stream);
  6524. function ParseStream() {
  6525. _classCallCheck(this, ParseStream);
  6526. return _possibleConstructorReturn(this, (ParseStream.__proto__ || Object.getPrototypeOf(ParseStream)).call(this));
  6527. }
  6528. /**
  6529. * Parses an additional line of input.
  6530. *
  6531. * @param {String} line a single line of an M3U8 file to parse
  6532. */
  6533. _createClass(ParseStream, [{
  6534. key: 'push',
  6535. value: function push(line) {
  6536. var match = void 0;
  6537. var event = void 0;
  6538. // strip whitespace
  6539. line = line.replace(/^[\u0000\s]+|[\u0000\s]+$/g, '');
  6540. if (line.length === 0) {
  6541. // ignore empty lines
  6542. return;
  6543. }
  6544. // URIs
  6545. if (line[0] !== '#') {
  6546. this.trigger('data', {
  6547. type: 'uri',
  6548. uri: line
  6549. });
  6550. return;
  6551. }
  6552. // Comments
  6553. if (line.indexOf('#EXT') !== 0) {
  6554. this.trigger('data', {
  6555. type: 'comment',
  6556. text: line.slice(1)
  6557. });
  6558. return;
  6559. }
  6560. // strip off any carriage returns here so the regex matching
  6561. // doesn't have to account for them.
  6562. line = line.replace('\r', '');
  6563. // Tags
  6564. match = /^#EXTM3U/.exec(line);
  6565. if (match) {
  6566. this.trigger('data', {
  6567. type: 'tag',
  6568. tagType: 'm3u'
  6569. });
  6570. return;
  6571. }
  6572. match = /^#EXTINF:?([0-9\.]*)?,?(.*)?$/.exec(line);
  6573. if (match) {
  6574. event = {
  6575. type: 'tag',
  6576. tagType: 'inf'
  6577. };
  6578. if (match[1]) {
  6579. event.duration = parseFloat(match[1]);
  6580. }
  6581. if (match[2]) {
  6582. event.title = match[2];
  6583. }
  6584. this.trigger('data', event);
  6585. return;
  6586. }
  6587. match = /^#EXT-X-TARGETDURATION:?([0-9.]*)?/.exec(line);
  6588. if (match) {
  6589. event = {
  6590. type: 'tag',
  6591. tagType: 'targetduration'
  6592. };
  6593. if (match[1]) {
  6594. event.duration = parseInt(match[1], 10);
  6595. }
  6596. this.trigger('data', event);
  6597. return;
  6598. }
  6599. match = /^#ZEN-TOTAL-DURATION:?([0-9.]*)?/.exec(line);
  6600. if (match) {
  6601. event = {
  6602. type: 'tag',
  6603. tagType: 'totalduration'
  6604. };
  6605. if (match[1]) {
  6606. event.duration = parseInt(match[1], 10);
  6607. }
  6608. this.trigger('data', event);
  6609. return;
  6610. }
  6611. match = /^#EXT-X-VERSION:?([0-9.]*)?/.exec(line);
  6612. if (match) {
  6613. event = {
  6614. type: 'tag',
  6615. tagType: 'version'
  6616. };
  6617. if (match[1]) {
  6618. event.version = parseInt(match[1], 10);
  6619. }
  6620. this.trigger('data', event);
  6621. return;
  6622. }
  6623. match = /^#EXT-X-MEDIA-SEQUENCE:?(\-?[0-9.]*)?/.exec(line);
  6624. if (match) {
  6625. event = {
  6626. type: 'tag',
  6627. tagType: 'media-sequence'
  6628. };
  6629. if (match[1]) {
  6630. event.number = parseInt(match[1], 10);
  6631. }
  6632. this.trigger('data', event);
  6633. return;
  6634. }
  6635. match = /^#EXT-X-DISCONTINUITY-SEQUENCE:?(\-?[0-9.]*)?/.exec(line);
  6636. if (match) {
  6637. event = {
  6638. type: 'tag',
  6639. tagType: 'discontinuity-sequence'
  6640. };
  6641. if (match[1]) {
  6642. event.number = parseInt(match[1], 10);
  6643. }
  6644. this.trigger('data', event);
  6645. return;
  6646. }
  6647. match = /^#EXT-X-PLAYLIST-TYPE:?(.*)?$/.exec(line);
  6648. if (match) {
  6649. event = {
  6650. type: 'tag',
  6651. tagType: 'playlist-type'
  6652. };
  6653. if (match[1]) {
  6654. event.playlistType = match[1];
  6655. }
  6656. this.trigger('data', event);
  6657. return;
  6658. }
  6659. match = /^#EXT-X-BYTERANGE:?([0-9.]*)?@?([0-9.]*)?/.exec(line);
  6660. if (match) {
  6661. event = {
  6662. type: 'tag',
  6663. tagType: 'byterange'
  6664. };
  6665. if (match[1]) {
  6666. event.length = parseInt(match[1], 10);
  6667. }
  6668. if (match[2]) {
  6669. event.offset = parseInt(match[2], 10);
  6670. }
  6671. this.trigger('data', event);
  6672. return;
  6673. }
  6674. match = /^#EXT-X-ALLOW-CACHE:?(YES|NO)?/.exec(line);
  6675. if (match) {
  6676. event = {
  6677. type: 'tag',
  6678. tagType: 'allow-cache'
  6679. };
  6680. if (match[1]) {
  6681. event.allowed = !/NO/.test(match[1]);
  6682. }
  6683. this.trigger('data', event);
  6684. return;
  6685. }
  6686. match = /^#EXT-X-MAP:?(.*)$/.exec(line);
  6687. if (match) {
  6688. event = {
  6689. type: 'tag',
  6690. tagType: 'map'
  6691. };
  6692. if (match[1]) {
  6693. var attributes = parseAttributes(match[1]);
  6694. if (attributes.URI) {
  6695. event.uri = attributes.URI;
  6696. }
  6697. if (attributes.BYTERANGE) {
  6698. var _attributes$BYTERANGE = attributes.BYTERANGE.split('@'),
  6699. _attributes$BYTERANGE2 = _slicedToArray(_attributes$BYTERANGE, 2),
  6700. length = _attributes$BYTERANGE2[0],
  6701. offset = _attributes$BYTERANGE2[1];
  6702. event.byterange = {};
  6703. if (length) {
  6704. event.byterange.length = parseInt(length, 10);
  6705. }
  6706. if (offset) {
  6707. event.byterange.offset = parseInt(offset, 10);
  6708. }
  6709. }
  6710. }
  6711. this.trigger('data', event);
  6712. return;
  6713. }
  6714. match = /^#EXT-X-STREAM-INF:?(.*)$/.exec(line);
  6715. if (match) {
  6716. event = {
  6717. type: 'tag',
  6718. tagType: 'stream-inf'
  6719. };
  6720. if (match[1]) {
  6721. event.attributes = parseAttributes(match[1]);
  6722. if (event.attributes.RESOLUTION) {
  6723. var split = event.attributes.RESOLUTION.split('x');
  6724. var resolution = {};
  6725. if (split[0]) {
  6726. resolution.width = parseInt(split[0], 10);
  6727. }
  6728. if (split[1]) {
  6729. resolution.height = parseInt(split[1], 10);
  6730. }
  6731. event.attributes.RESOLUTION = resolution;
  6732. }
  6733. if (event.attributes.BANDWIDTH) {
  6734. event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
  6735. }
  6736. if (event.attributes['PROGRAM-ID']) {
  6737. event.attributes['PROGRAM-ID'] = parseInt(event.attributes['PROGRAM-ID'], 10);
  6738. }
  6739. }
  6740. this.trigger('data', event);
  6741. return;
  6742. }
  6743. match = /^#EXT-X-MEDIA:?(.*)$/.exec(line);
  6744. if (match) {
  6745. event = {
  6746. type: 'tag',
  6747. tagType: 'media'
  6748. };
  6749. if (match[1]) {
  6750. event.attributes = parseAttributes(match[1]);
  6751. }
  6752. this.trigger('data', event);
  6753. return;
  6754. }
  6755. match = /^#EXT-X-ENDLIST/.exec(line);
  6756. if (match) {
  6757. this.trigger('data', {
  6758. type: 'tag',
  6759. tagType: 'endlist'
  6760. });
  6761. return;
  6762. }
  6763. match = /^#EXT-X-DISCONTINUITY/.exec(line);
  6764. if (match) {
  6765. this.trigger('data', {
  6766. type: 'tag',
  6767. tagType: 'discontinuity'
  6768. });
  6769. return;
  6770. }
  6771. match = /^#EXT-X-PROGRAM-DATE-TIME:?(.*)$/.exec(line);
  6772. if (match) {
  6773. event = {
  6774. type: 'tag',
  6775. tagType: 'program-date-time'
  6776. };
  6777. if (match[1]) {
  6778. event.dateTimeString = match[1];
  6779. event.dateTimeObject = new Date(match[1]);
  6780. }
  6781. this.trigger('data', event);
  6782. return;
  6783. }
  6784. match = /^#EXT-X-KEY:?(.*)$/.exec(line);
  6785. if (match) {
  6786. event = {
  6787. type: 'tag',
  6788. tagType: 'key'
  6789. };
  6790. if (match[1]) {
  6791. event.attributes = parseAttributes(match[1]);
  6792. // parse the IV string into a Uint32Array
  6793. if (event.attributes.IV) {
  6794. if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {
  6795. event.attributes.IV = event.attributes.IV.substring(2);
  6796. }
  6797. event.attributes.IV = event.attributes.IV.match(/.{8}/g);
  6798. event.attributes.IV[0] = parseInt(event.attributes.IV[0], 16);
  6799. event.attributes.IV[1] = parseInt(event.attributes.IV[1], 16);
  6800. event.attributes.IV[2] = parseInt(event.attributes.IV[2], 16);
  6801. event.attributes.IV[3] = parseInt(event.attributes.IV[3], 16);
  6802. event.attributes.IV = new Uint32Array(event.attributes.IV);
  6803. }
  6804. }
  6805. this.trigger('data', event);
  6806. return;
  6807. }
  6808. match = /^#EXT-X-CUE-OUT-CONT:?(.*)?$/.exec(line);
  6809. if (match) {
  6810. event = {
  6811. type: 'tag',
  6812. tagType: 'cue-out-cont'
  6813. };
  6814. if (match[1]) {
  6815. event.data = match[1];
  6816. } else {
  6817. event.data = '';
  6818. }
  6819. this.trigger('data', event);
  6820. return;
  6821. }
  6822. match = /^#EXT-X-CUE-OUT:?(.*)?$/.exec(line);
  6823. if (match) {
  6824. event = {
  6825. type: 'tag',
  6826. tagType: 'cue-out'
  6827. };
  6828. if (match[1]) {
  6829. event.data = match[1];
  6830. } else {
  6831. event.data = '';
  6832. }
  6833. this.trigger('data', event);
  6834. return;
  6835. }
  6836. match = /^#EXT-X-CUE-IN:?(.*)?$/.exec(line);
  6837. if (match) {
  6838. event = {
  6839. type: 'tag',
  6840. tagType: 'cue-in'
  6841. };
  6842. if (match[1]) {
  6843. event.data = match[1];
  6844. } else {
  6845. event.data = '';
  6846. }
  6847. this.trigger('data', event);
  6848. return;
  6849. }
  6850. // unknown tag type
  6851. this.trigger('data', {
  6852. type: 'tag',
  6853. data: line.slice(4)
  6854. });
  6855. }
  6856. }]);
  6857. return ParseStream;
  6858. }(_stream2['default']);
  6859. exports['default'] = ParseStream;
  6860. },{"./stream":35}],34:[function(require,module,exports){
  6861. 'use strict';
  6862. Object.defineProperty(exports, "__esModule", {
  6863. value: true
  6864. });
  6865. var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
  6866. var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
  6867. var _stream = require('./stream');
  6868. var _stream2 = _interopRequireDefault(_stream);
  6869. var _lineStream = require('./line-stream');
  6870. var _lineStream2 = _interopRequireDefault(_lineStream);
  6871. var _parseStream = require('./parse-stream');
  6872. var _parseStream2 = _interopRequireDefault(_parseStream);
  6873. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  6874. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
  6875. function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
  6876. function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /**
  6877. * @file m3u8/parser.js
  6878. */
  6879. /**
  6880. * A parser for M3U8 files. The current interpretation of the input is
  6881. * exposed as a property `manifest` on parser objects. It's just two lines to
  6882. * create and parse a manifest once you have the contents available as a string:
  6883. *
  6884. * ```js
  6885. * var parser = new m3u8.Parser();
  6886. * parser.push(xhr.responseText);
  6887. * ```
  6888. *
  6889. * New input can later be applied to update the manifest object by calling
  6890. * `push` again.
  6891. *
  6892. * The parser attempts to create a usable manifest object even if the
  6893. * underlying input is somewhat nonsensical. It emits `info` and `warning`
  6894. * events during the parse if it encounters input that seems invalid or
  6895. * requires some property of the manifest object to be defaulted.
  6896. *
  6897. * @class Parser
  6898. * @extends Stream
  6899. */
  6900. var Parser = function (_Stream) {
  6901. _inherits(Parser, _Stream);
  6902. function Parser() {
  6903. _classCallCheck(this, Parser);
  6904. var _this = _possibleConstructorReturn(this, (Parser.__proto__ || Object.getPrototypeOf(Parser)).call(this));
  6905. _this.lineStream = new _lineStream2['default']();
  6906. _this.parseStream = new _parseStream2['default']();
  6907. _this.lineStream.pipe(_this.parseStream);
  6908. /* eslint-disable consistent-this */
  6909. var self = _this;
  6910. /* eslint-enable consistent-this */
  6911. var uris = [];
  6912. var currentUri = {};
  6913. // if specified, the active EXT-X-MAP definition
  6914. var currentMap = void 0;
  6915. // if specified, the active decryption key
  6916. var _key = void 0;
  6917. var noop = function noop() {};
  6918. var defaultMediaGroups = {
  6919. 'AUDIO': {},
  6920. 'VIDEO': {},
  6921. 'CLOSED-CAPTIONS': {},
  6922. 'SUBTITLES': {}
  6923. };
  6924. // group segments into numbered timelines delineated by discontinuities
  6925. var currentTimeline = 0;
  6926. // the manifest is empty until the parse stream begins delivering data
  6927. _this.manifest = {
  6928. allowCache: true,
  6929. discontinuityStarts: [],
  6930. segments: []
  6931. };
  6932. // update the manifest with the m3u8 entry from the parse stream
  6933. _this.parseStream.on('data', function (entry) {
  6934. var mediaGroup = void 0;
  6935. var rendition = void 0;
  6936. ({
  6937. tag: function tag() {
  6938. // switch based on the tag type
  6939. (({
  6940. 'allow-cache': function allowCache() {
  6941. this.manifest.allowCache = entry.allowed;
  6942. if (!('allowed' in entry)) {
  6943. this.trigger('info', {
  6944. message: 'defaulting allowCache to YES'
  6945. });
  6946. this.manifest.allowCache = true;
  6947. }
  6948. },
  6949. byterange: function byterange() {
  6950. var byterange = {};
  6951. if ('length' in entry) {
  6952. currentUri.byterange = byterange;
  6953. byterange.length = entry.length;
  6954. if (!('offset' in entry)) {
  6955. this.trigger('info', {
  6956. message: 'defaulting offset to zero'
  6957. });
  6958. entry.offset = 0;
  6959. }
  6960. }
  6961. if ('offset' in entry) {
  6962. currentUri.byterange = byterange;
  6963. byterange.offset = entry.offset;
  6964. }
  6965. },
  6966. endlist: function endlist() {
  6967. this.manifest.endList = true;
  6968. },
  6969. inf: function inf() {
  6970. if (!('mediaSequence' in this.manifest)) {
  6971. this.manifest.mediaSequence = 0;
  6972. this.trigger('info', {
  6973. message: 'defaulting media sequence to zero'
  6974. });
  6975. }
  6976. if (!('discontinuitySequence' in this.manifest)) {
  6977. this.manifest.discontinuitySequence = 0;
  6978. this.trigger('info', {
  6979. message: 'defaulting discontinuity sequence to zero'
  6980. });
  6981. }
  6982. if (entry.duration > 0) {
  6983. currentUri.duration = entry.duration;
  6984. }
  6985. if (entry.duration === 0) {
  6986. currentUri.duration = 0.01;
  6987. this.trigger('info', {
  6988. message: 'updating zero segment duration to a small value'
  6989. });
  6990. }
  6991. this.manifest.segments = uris;
  6992. },
  6993. key: function key() {
  6994. if (!entry.attributes) {
  6995. this.trigger('warn', {
  6996. message: 'ignoring key declaration without attribute list'
  6997. });
  6998. return;
  6999. }
  7000. // clear the active encryption key
  7001. if (entry.attributes.METHOD === 'NONE') {
  7002. _key = null;
  7003. return;
  7004. }
  7005. if (!entry.attributes.URI) {
  7006. this.trigger('warn', {
  7007. message: 'ignoring key declaration without URI'
  7008. });
  7009. return;
  7010. }
  7011. if (!entry.attributes.METHOD) {
  7012. this.trigger('warn', {
  7013. message: 'defaulting key method to AES-128'
  7014. });
  7015. }
  7016. // setup an encryption key for upcoming segments
  7017. _key = {
  7018. method: entry.attributes.METHOD || 'AES-128',
  7019. uri: entry.attributes.URI
  7020. };
  7021. if (typeof entry.attributes.IV !== 'undefined') {
  7022. _key.iv = entry.attributes.IV;
  7023. }
  7024. },
  7025. 'media-sequence': function mediaSequence() {
  7026. if (!isFinite(entry.number)) {
  7027. this.trigger('warn', {
  7028. message: 'ignoring invalid media sequence: ' + entry.number
  7029. });
  7030. return;
  7031. }
  7032. this.manifest.mediaSequence = entry.number;
  7033. },
  7034. 'discontinuity-sequence': function discontinuitySequence() {
  7035. if (!isFinite(entry.number)) {
  7036. this.trigger('warn', {
  7037. message: 'ignoring invalid discontinuity sequence: ' + entry.number
  7038. });
  7039. return;
  7040. }
  7041. this.manifest.discontinuitySequence = entry.number;
  7042. currentTimeline = entry.number;
  7043. },
  7044. 'playlist-type': function playlistType() {
  7045. if (!/VOD|EVENT/.test(entry.playlistType)) {
  7046. this.trigger('warn', {
  7047. message: 'ignoring unknown playlist type: ' + entry.playlist
  7048. });
  7049. return;
  7050. }
  7051. this.manifest.playlistType = entry.playlistType;
  7052. },
  7053. map: function map() {
  7054. currentMap = {};
  7055. if (entry.uri) {
  7056. currentMap.uri = entry.uri;
  7057. }
  7058. if (entry.byterange) {
  7059. currentMap.byterange = entry.byterange;
  7060. }
  7061. },
  7062. 'stream-inf': function streamInf() {
  7063. this.manifest.playlists = uris;
  7064. this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
  7065. if (!entry.attributes) {
  7066. this.trigger('warn', {
  7067. message: 'ignoring empty stream-inf attributes'
  7068. });
  7069. return;
  7070. }
  7071. if (!currentUri.attributes) {
  7072. currentUri.attributes = {};
  7073. }
  7074. _extends(currentUri.attributes, entry.attributes);
  7075. },
  7076. media: function media() {
  7077. this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
  7078. if (!(entry.attributes && entry.attributes.TYPE && entry.attributes['GROUP-ID'] && entry.attributes.NAME)) {
  7079. this.trigger('warn', {
  7080. message: 'ignoring incomplete or missing media group'
  7081. });
  7082. return;
  7083. }
  7084. // find the media group, creating defaults as necessary
  7085. var mediaGroupType = this.manifest.mediaGroups[entry.attributes.TYPE];
  7086. mediaGroupType[entry.attributes['GROUP-ID']] = mediaGroupType[entry.attributes['GROUP-ID']] || {};
  7087. mediaGroup = mediaGroupType[entry.attributes['GROUP-ID']];
  7088. // collect the rendition metadata
  7089. rendition = {
  7090. 'default': /yes/i.test(entry.attributes.DEFAULT)
  7091. };
  7092. if (rendition['default']) {
  7093. rendition.autoselect = true;
  7094. } else {
  7095. rendition.autoselect = /yes/i.test(entry.attributes.AUTOSELECT);
  7096. }
  7097. if (entry.attributes.LANGUAGE) {
  7098. rendition.language = entry.attributes.LANGUAGE;
  7099. }
  7100. if (entry.attributes.URI) {
  7101. rendition.uri = entry.attributes.URI;
  7102. }
  7103. if (entry.attributes['INSTREAM-ID']) {
  7104. rendition.instreamId = entry.attributes['INSTREAM-ID'];
  7105. }
  7106. if (entry.attributes.CHARACTERISTICS) {
  7107. rendition.characteristics = entry.attributes.CHARACTERISTICS;
  7108. }
  7109. if (entry.attributes.FORCED) {
  7110. rendition.forced = /yes/i.test(entry.attributes.FORCED);
  7111. }
  7112. // insert the new rendition
  7113. mediaGroup[entry.attributes.NAME] = rendition;
  7114. },
  7115. discontinuity: function discontinuity() {
  7116. currentTimeline += 1;
  7117. currentUri.discontinuity = true;
  7118. this.manifest.discontinuityStarts.push(uris.length);
  7119. },
  7120. 'program-date-time': function programDateTime() {
  7121. this.manifest.dateTimeString = entry.dateTimeString;
  7122. this.manifest.dateTimeObject = entry.dateTimeObject;
  7123. },
  7124. targetduration: function targetduration() {
  7125. if (!isFinite(entry.duration) || entry.duration < 0) {
  7126. this.trigger('warn', {
  7127. message: 'ignoring invalid target duration: ' + entry.duration
  7128. });
  7129. return;
  7130. }
  7131. this.manifest.targetDuration = entry.duration;
  7132. },
  7133. totalduration: function totalduration() {
  7134. if (!isFinite(entry.duration) || entry.duration < 0) {
  7135. this.trigger('warn', {
  7136. message: 'ignoring invalid total duration: ' + entry.duration
  7137. });
  7138. return;
  7139. }
  7140. this.manifest.totalDuration = entry.duration;
  7141. },
  7142. 'cue-out': function cueOut() {
  7143. currentUri.cueOut = entry.data;
  7144. },
  7145. 'cue-out-cont': function cueOutCont() {
  7146. currentUri.cueOutCont = entry.data;
  7147. },
  7148. 'cue-in': function cueIn() {
  7149. currentUri.cueIn = entry.data;
  7150. }
  7151. })[entry.tagType] || noop).call(self);
  7152. },
  7153. uri: function uri() {
  7154. currentUri.uri = entry.uri;
  7155. uris.push(currentUri);
  7156. // if no explicit duration was declared, use the target duration
  7157. if (this.manifest.targetDuration && !('duration' in currentUri)) {
  7158. this.trigger('warn', {
  7159. message: 'defaulting segment duration to the target duration'
  7160. });
  7161. currentUri.duration = this.manifest.targetDuration;
  7162. }
  7163. // annotate with encryption information, if necessary
  7164. if (_key) {
  7165. currentUri.key = _key;
  7166. }
  7167. currentUri.timeline = currentTimeline;
  7168. // annotate with initialization segment information, if necessary
  7169. if (currentMap) {
  7170. currentUri.map = currentMap;
  7171. }
  7172. // prepare for the next URI
  7173. currentUri = {};
  7174. },
  7175. comment: function comment() {
  7176. // comments are not important for playback
  7177. }
  7178. })[entry.type].call(self);
  7179. });
  7180. return _this;
  7181. }
  7182. /**
  7183. * Parse the input string and update the manifest object.
  7184. *
  7185. * @param {String} chunk a potentially incomplete portion of the manifest
  7186. */
  7187. _createClass(Parser, [{
  7188. key: 'push',
  7189. value: function push(chunk) {
  7190. this.lineStream.push(chunk);
  7191. }
  7192. /**
  7193. * Flush any remaining input. This can be handy if the last line of an M3U8
  7194. * manifest did not contain a trailing newline but the file has been
  7195. * completely received.
  7196. */
  7197. }, {
  7198. key: 'end',
  7199. value: function end() {
  7200. // flush any buffered input
  7201. this.lineStream.push('\n');
  7202. }
  7203. }]);
  7204. return Parser;
  7205. }(_stream2['default']);
  7206. exports['default'] = Parser;
  7207. },{"./line-stream":32,"./parse-stream":33,"./stream":35}],35:[function(require,module,exports){
  7208. 'use strict';
  7209. Object.defineProperty(exports, "__esModule", {
  7210. value: true
  7211. });
  7212. var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
  7213. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
  7214. /**
  7215. * @file stream.js
  7216. */
  7217. /**
  7218. * A lightweight readable stream implemention that handles event dispatching.
  7219. *
  7220. * @class Stream
  7221. */
  7222. var Stream = function () {
  7223. function Stream() {
  7224. _classCallCheck(this, Stream);
  7225. this.listeners = {};
  7226. }
  7227. /**
  7228. * Add a listener for a specified event type.
  7229. *
  7230. * @param {String} type the event name
  7231. * @param {Function} listener the callback to be invoked when an event of
  7232. * the specified type occurs
  7233. */
  7234. _createClass(Stream, [{
  7235. key: 'on',
  7236. value: function on(type, listener) {
  7237. if (!this.listeners[type]) {
  7238. this.listeners[type] = [];
  7239. }
  7240. this.listeners[type].push(listener);
  7241. }
  7242. /**
  7243. * Remove a listener for a specified event type.
  7244. *
  7245. * @param {String} type the event name
  7246. * @param {Function} listener a function previously registered for this
  7247. * type of event through `on`
  7248. * @return {Boolean} if we could turn it off or not
  7249. */
  7250. }, {
  7251. key: 'off',
  7252. value: function off(type, listener) {
  7253. if (!this.listeners[type]) {
  7254. return false;
  7255. }
  7256. var index = this.listeners[type].indexOf(listener);
  7257. this.listeners[type].splice(index, 1);
  7258. return index > -1;
  7259. }
  7260. /**
  7261. * Trigger an event of the specified type on this stream. Any additional
  7262. * arguments to this function are passed as parameters to event listeners.
  7263. *
  7264. * @param {String} type the event name
  7265. */
  7266. }, {
  7267. key: 'trigger',
  7268. value: function trigger(type) {
  7269. var callbacks = this.listeners[type];
  7270. var i = void 0;
  7271. var length = void 0;
  7272. var args = void 0;
  7273. if (!callbacks) {
  7274. return;
  7275. }
  7276. // Slicing the arguments on every invocation of this method
  7277. // can add a significant amount of overhead. Avoid the
  7278. // intermediate object creation for the common case of a
  7279. // single callback argument
  7280. if (arguments.length === 2) {
  7281. length = callbacks.length;
  7282. for (i = 0; i < length; ++i) {
  7283. callbacks[i].call(this, arguments[1]);
  7284. }
  7285. } else {
  7286. args = Array.prototype.slice.call(arguments, 1);
  7287. length = callbacks.length;
  7288. for (i = 0; i < length; ++i) {
  7289. callbacks[i].apply(this, args);
  7290. }
  7291. }
  7292. }
  7293. /**
  7294. * Destroys the stream and cleans up.
  7295. */
  7296. }, {
  7297. key: 'dispose',
  7298. value: function dispose() {
  7299. this.listeners = {};
  7300. }
  7301. /**
  7302. * Forwards all `data` events on this stream to the destination stream. The
  7303. * destination stream should provide a method `push` to receive the data
  7304. * events as they arrive.
  7305. *
  7306. * @param {Stream} destination the stream that will receive all `data` events
  7307. * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
  7308. */
  7309. }, {
  7310. key: 'pipe',
  7311. value: function pipe(destination) {
  7312. this.on('data', function (data) {
  7313. destination.push(data);
  7314. });
  7315. }
  7316. }]);
  7317. return Stream;
  7318. }();
  7319. exports['default'] = Stream;
  7320. },{}],36:[function(require,module,exports){
  7321. /**
  7322. * mux.js
  7323. *
  7324. * Copyright (c) 2016 Brightcove
  7325. * All rights reserved.
  7326. *
  7327. * A stream-based aac to mp4 converter. This utility can be used to
  7328. * deliver mp4s to a SourceBuffer on platforms that support native
  7329. * Media Source Extensions.
  7330. */
  7331. 'use strict';
  7332. var Stream = require('../utils/stream.js');
  7333. // Constants
  7334. var AacStream;
  7335. /**
  7336. * Splits an incoming stream of binary data into ADTS and ID3 Frames.
  7337. */
  7338. AacStream = function() {
  7339. var
  7340. everything = new Uint8Array(),
  7341. timeStamp = 0;
  7342. AacStream.prototype.init.call(this);
  7343. this.setTimestamp = function(timestamp) {
  7344. timeStamp = timestamp;
  7345. };
  7346. this.parseId3TagSize = function(header, byteIndex) {
  7347. var
  7348. returnSize = (header[byteIndex + 6] << 21) |
  7349. (header[byteIndex + 7] << 14) |
  7350. (header[byteIndex + 8] << 7) |
  7351. (header[byteIndex + 9]),
  7352. flags = header[byteIndex + 5],
  7353. footerPresent = (flags & 16) >> 4;
  7354. if (footerPresent) {
  7355. return returnSize + 20;
  7356. }
  7357. return returnSize + 10;
  7358. };
  7359. this.parseAdtsSize = function(header, byteIndex) {
  7360. var
  7361. lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
  7362. middle = header[byteIndex + 4] << 3,
  7363. highTwo = header[byteIndex + 3] & 0x3 << 11;
  7364. return (highTwo | middle) | lowThree;
  7365. };
  7366. this.push = function(bytes) {
  7367. var
  7368. frameSize = 0,
  7369. byteIndex = 0,
  7370. bytesLeft,
  7371. chunk,
  7372. packet,
  7373. tempLength;
  7374. // If there are bytes remaining from the last segment, prepend them to the
  7375. // bytes that were pushed in
  7376. if (everything.length) {
  7377. tempLength = everything.length;
  7378. everything = new Uint8Array(bytes.byteLength + tempLength);
  7379. everything.set(everything.subarray(0, tempLength));
  7380. everything.set(bytes, tempLength);
  7381. } else {
  7382. everything = bytes;
  7383. }
  7384. while (everything.length - byteIndex >= 3) {
  7385. if ((everything[byteIndex] === 'I'.charCodeAt(0)) &&
  7386. (everything[byteIndex + 1] === 'D'.charCodeAt(0)) &&
  7387. (everything[byteIndex + 2] === '3'.charCodeAt(0))) {
  7388. // Exit early because we don't have enough to parse
  7389. // the ID3 tag header
  7390. if (everything.length - byteIndex < 10) {
  7391. break;
  7392. }
  7393. // check framesize
  7394. frameSize = this.parseId3TagSize(everything, byteIndex);
  7395. // Exit early if we don't have enough in the buffer
  7396. // to emit a full packet
  7397. if (frameSize > everything.length) {
  7398. break;
  7399. }
  7400. chunk = {
  7401. type: 'timed-metadata',
  7402. data: everything.subarray(byteIndex, byteIndex + frameSize)
  7403. };
  7404. this.trigger('data', chunk);
  7405. byteIndex += frameSize;
  7406. continue;
  7407. } else if ((everything[byteIndex] & 0xff === 0xff) &&
  7408. ((everything[byteIndex + 1] & 0xf0) === 0xf0)) {
  7409. // Exit early because we don't have enough to parse
  7410. // the ADTS frame header
  7411. if (everything.length - byteIndex < 7) {
  7412. break;
  7413. }
  7414. frameSize = this.parseAdtsSize(everything, byteIndex);
  7415. // Exit early if we don't have enough in the buffer
  7416. // to emit a full packet
  7417. if (frameSize > everything.length) {
  7418. break;
  7419. }
  7420. packet = {
  7421. type: 'audio',
  7422. data: everything.subarray(byteIndex, byteIndex + frameSize),
  7423. pts: timeStamp,
  7424. dts: timeStamp
  7425. };
  7426. this.trigger('data', packet);
  7427. byteIndex += frameSize;
  7428. continue;
  7429. }
  7430. byteIndex++;
  7431. }
  7432. bytesLeft = everything.length - byteIndex;
  7433. if (bytesLeft > 0) {
  7434. everything = everything.subarray(byteIndex);
  7435. } else {
  7436. everything = new Uint8Array();
  7437. }
  7438. };
  7439. };
  7440. AacStream.prototype = new Stream();
  7441. module.exports = AacStream;
  7442. },{"../utils/stream.js":60}],37:[function(require,module,exports){
  7443. /**
  7444. * mux.js
  7445. *
  7446. * Copyright (c) 2016 Brightcove
  7447. * All rights reserved.
  7448. *
  7449. * Utilities to detect basic properties and metadata about Aac data.
  7450. */
  7451. 'use strict';
  7452. var ADTS_SAMPLING_FREQUENCIES = [
  7453. 96000,
  7454. 88200,
  7455. 64000,
  7456. 48000,
  7457. 44100,
  7458. 32000,
  7459. 24000,
  7460. 22050,
  7461. 16000,
  7462. 12000,
  7463. 11025,
  7464. 8000,
  7465. 7350
  7466. ];
  7467. var parseSyncSafeInteger = function(data) {
  7468. return (data[0] << 21) |
  7469. (data[1] << 14) |
  7470. (data[2] << 7) |
  7471. (data[3]);
  7472. };
  7473. // return a percent-encoded representation of the specified byte range
  7474. // @see http://en.wikipedia.org/wiki/Percent-encoding
  7475. var percentEncode = function(bytes, start, end) {
  7476. var i, result = '';
  7477. for (i = start; i < end; i++) {
  7478. result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
  7479. }
  7480. return result;
  7481. };
  7482. // return the string representation of the specified byte range,
  7483. // interpreted as ISO-8859-1.
  7484. var parseIso88591 = function(bytes, start, end) {
  7485. return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
  7486. };
  7487. var parseId3TagSize = function(header, byteIndex) {
  7488. var
  7489. returnSize = (header[byteIndex + 6] << 21) |
  7490. (header[byteIndex + 7] << 14) |
  7491. (header[byteIndex + 8] << 7) |
  7492. (header[byteIndex + 9]),
  7493. flags = header[byteIndex + 5],
  7494. footerPresent = (flags & 16) >> 4;
  7495. if (footerPresent) {
  7496. return returnSize + 20;
  7497. }
  7498. return returnSize + 10;
  7499. };
  7500. var parseAdtsSize = function(header, byteIndex) {
  7501. var
  7502. lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
  7503. middle = header[byteIndex + 4] << 3,
  7504. highTwo = header[byteIndex + 3] & 0x3 << 11;
  7505. return (highTwo | middle) | lowThree;
  7506. };
  7507. var parseType = function(header, byteIndex) {
  7508. if ((header[byteIndex] === 'I'.charCodeAt(0)) &&
  7509. (header[byteIndex + 1] === 'D'.charCodeAt(0)) &&
  7510. (header[byteIndex + 2] === '3'.charCodeAt(0))) {
  7511. return 'timed-metadata';
  7512. } else if ((header[byteIndex] & 0xff === 0xff) &&
  7513. ((header[byteIndex + 1] & 0xf0) === 0xf0)) {
  7514. return 'audio';
  7515. }
  7516. return null;
  7517. };
  7518. var parseSampleRate = function(packet) {
  7519. var i = 0;
  7520. while (i + 5 < packet.length) {
  7521. if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
  7522. // If a valid header was not found, jump one forward and attempt to
  7523. // find a valid ADTS header starting at the next byte
  7524. i++;
  7525. continue;
  7526. }
  7527. return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
  7528. }
  7529. return null;
  7530. };
  7531. var parseAacTimestamp = function(packet) {
  7532. var frameStart, frameSize, frame, frameHeader;
  7533. // find the start of the first frame and the end of the tag
  7534. frameStart = 10;
  7535. if (packet[5] & 0x40) {
  7536. // advance the frame start past the extended header
  7537. frameStart += 4; // header size field
  7538. frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
  7539. }
  7540. // parse one or more ID3 frames
  7541. // http://id3.org/id3v2.3.0#ID3v2_frame_overview
  7542. do {
  7543. // determine the number of bytes in this frame
  7544. frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
  7545. if (frameSize < 1) {
  7546. return null;
  7547. }
  7548. frameHeader = String.fromCharCode(packet[frameStart],
  7549. packet[frameStart + 1],
  7550. packet[frameStart + 2],
  7551. packet[frameStart + 3]);
  7552. if (frameHeader === 'PRIV') {
  7553. frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
  7554. for (var i = 0; i < frame.byteLength; i++) {
  7555. if (frame[i] === 0) {
  7556. var owner = parseIso88591(frame, 0, i);
  7557. if (owner === 'com.apple.streaming.transportStreamTimestamp') {
  7558. var d = frame.subarray(i + 1);
  7559. var size = ((d[3] & 0x01) << 30) |
  7560. (d[4] << 22) |
  7561. (d[5] << 14) |
  7562. (d[6] << 6) |
  7563. (d[7] >>> 2);
  7564. size *= 4;
  7565. size += d[7] & 0x03;
  7566. return size;
  7567. }
  7568. break;
  7569. }
  7570. }
  7571. }
  7572. frameStart += 10; // advance past the frame header
  7573. frameStart += frameSize; // advance past the frame body
  7574. } while (frameStart < packet.byteLength);
  7575. return null;
  7576. };
  7577. module.exports = {
  7578. parseId3TagSize: parseId3TagSize,
  7579. parseAdtsSize: parseAdtsSize,
  7580. parseType: parseType,
  7581. parseSampleRate: parseSampleRate,
  7582. parseAacTimestamp: parseAacTimestamp
  7583. };
  7584. },{}],38:[function(require,module,exports){
  7585. 'use strict';
  7586. var Stream = require('../utils/stream.js');
  7587. var AdtsStream;
  7588. var
  7589. ADTS_SAMPLING_FREQUENCIES = [
  7590. 96000,
  7591. 88200,
  7592. 64000,
  7593. 48000,
  7594. 44100,
  7595. 32000,
  7596. 24000,
  7597. 22050,
  7598. 16000,
  7599. 12000,
  7600. 11025,
  7601. 8000,
  7602. 7350
  7603. ];
  7604. /*
  7605. * Accepts a ElementaryStream and emits data events with parsed
  7606. * AAC Audio Frames of the individual packets. Input audio in ADTS
  7607. * format is unpacked and re-emitted as AAC frames.
  7608. *
  7609. * @see http://wiki.multimedia.cx/index.php?title=ADTS
  7610. * @see http://wiki.multimedia.cx/?title=Understanding_AAC
  7611. */
  7612. AdtsStream = function() {
  7613. var buffer;
  7614. AdtsStream.prototype.init.call(this);
  7615. this.push = function(packet) {
  7616. var
  7617. i = 0,
  7618. frameNum = 0,
  7619. frameLength,
  7620. protectionSkipBytes,
  7621. frameEnd,
  7622. oldBuffer,
  7623. sampleCount,
  7624. adtsFrameDuration;
  7625. if (packet.type !== 'audio') {
  7626. // ignore non-audio data
  7627. return;
  7628. }
  7629. // Prepend any data in the buffer to the input data so that we can parse
  7630. // aac frames the cross a PES packet boundary
  7631. if (buffer) {
  7632. oldBuffer = buffer;
  7633. buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
  7634. buffer.set(oldBuffer);
  7635. buffer.set(packet.data, oldBuffer.byteLength);
  7636. } else {
  7637. buffer = packet.data;
  7638. }
  7639. // unpack any ADTS frames which have been fully received
  7640. // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
  7641. while (i + 5 < buffer.length) {
  7642. // Loook for the start of an ADTS header..
  7643. if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
  7644. // If a valid header was not found, jump one forward and attempt to
  7645. // find a valid ADTS header starting at the next byte
  7646. i++;
  7647. continue;
  7648. }
  7649. // The protection skip bit tells us if we have 2 bytes of CRC data at the
  7650. // end of the ADTS header
  7651. protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2;
  7652. // Frame length is a 13 bit integer starting 16 bits from the
  7653. // end of the sync sequence
  7654. frameLength = ((buffer[i + 3] & 0x03) << 11) |
  7655. (buffer[i + 4] << 3) |
  7656. ((buffer[i + 5] & 0xe0) >> 5);
  7657. sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
  7658. adtsFrameDuration = (sampleCount * 90000) /
  7659. ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2];
  7660. frameEnd = i + frameLength;
  7661. // If we don't have enough data to actually finish this ADTS frame, return
  7662. // and wait for more data
  7663. if (buffer.byteLength < frameEnd) {
  7664. return;
  7665. }
  7666. // Otherwise, deliver the complete AAC frame
  7667. this.trigger('data', {
  7668. pts: packet.pts + (frameNum * adtsFrameDuration),
  7669. dts: packet.dts + (frameNum * adtsFrameDuration),
  7670. sampleCount: sampleCount,
  7671. audioobjecttype: ((buffer[i + 2] >>> 6) & 0x03) + 1,
  7672. channelcount: ((buffer[i + 2] & 1) << 2) |
  7673. ((buffer[i + 3] & 0xc0) >>> 6),
  7674. samplerate: ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2],
  7675. samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
  7676. // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
  7677. samplesize: 16,
  7678. data: buffer.subarray(i + 7 + protectionSkipBytes, frameEnd)
  7679. });
  7680. // If the buffer is empty, clear it and return
  7681. if (buffer.byteLength === frameEnd) {
  7682. buffer = undefined;
  7683. return;
  7684. }
  7685. frameNum++;
  7686. // Remove the finished frame from the buffer and start the process again
  7687. buffer = buffer.subarray(frameEnd);
  7688. }
  7689. };
  7690. this.flush = function() {
  7691. this.trigger('done');
  7692. };
  7693. };
  7694. AdtsStream.prototype = new Stream();
  7695. module.exports = AdtsStream;
  7696. },{"../utils/stream.js":60}],39:[function(require,module,exports){
  7697. 'use strict';
  7698. var Stream = require('../utils/stream.js');
  7699. var ExpGolomb = require('../utils/exp-golomb.js');
  7700. var H264Stream, NalByteStream;
  7701. var PROFILES_WITH_OPTIONAL_SPS_DATA;
  7702. /**
  7703. * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
  7704. */
  7705. NalByteStream = function() {
  7706. var
  7707. syncPoint = 0,
  7708. i,
  7709. buffer;
  7710. NalByteStream.prototype.init.call(this);
  7711. this.push = function(data) {
  7712. var swapBuffer;
  7713. if (!buffer) {
  7714. buffer = data.data;
  7715. } else {
  7716. swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
  7717. swapBuffer.set(buffer);
  7718. swapBuffer.set(data.data, buffer.byteLength);
  7719. buffer = swapBuffer;
  7720. }
  7721. // Rec. ITU-T H.264, Annex B
  7722. // scan for NAL unit boundaries
  7723. // a match looks like this:
  7724. // 0 0 1 .. NAL .. 0 0 1
  7725. // ^ sync point ^ i
  7726. // or this:
  7727. // 0 0 1 .. NAL .. 0 0 0
  7728. // ^ sync point ^ i
  7729. // advance the sync point to a NAL start, if necessary
  7730. for (; syncPoint < buffer.byteLength - 3; syncPoint++) {
  7731. if (buffer[syncPoint + 2] === 1) {
  7732. // the sync point is properly aligned
  7733. i = syncPoint + 5;
  7734. break;
  7735. }
  7736. }
  7737. while (i < buffer.byteLength) {
  7738. // look at the current byte to determine if we've hit the end of
  7739. // a NAL unit boundary
  7740. switch (buffer[i]) {
  7741. case 0:
  7742. // skip past non-sync sequences
  7743. if (buffer[i - 1] !== 0) {
  7744. i += 2;
  7745. break;
  7746. } else if (buffer[i - 2] !== 0) {
  7747. i++;
  7748. break;
  7749. }
  7750. // deliver the NAL unit if it isn't empty
  7751. if (syncPoint + 3 !== i - 2) {
  7752. this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
  7753. }
  7754. // drop trailing zeroes
  7755. do {
  7756. i++;
  7757. } while (buffer[i] !== 1 && i < buffer.length);
  7758. syncPoint = i - 2;
  7759. i += 3;
  7760. break;
  7761. case 1:
  7762. // skip past non-sync sequences
  7763. if (buffer[i - 1] !== 0 ||
  7764. buffer[i - 2] !== 0) {
  7765. i += 3;
  7766. break;
  7767. }
  7768. // deliver the NAL unit
  7769. this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
  7770. syncPoint = i - 2;
  7771. i += 3;
  7772. break;
  7773. default:
  7774. // the current byte isn't a one or zero, so it cannot be part
  7775. // of a sync sequence
  7776. i += 3;
  7777. break;
  7778. }
  7779. }
  7780. // filter out the NAL units that were delivered
  7781. buffer = buffer.subarray(syncPoint);
  7782. i -= syncPoint;
  7783. syncPoint = 0;
  7784. };
  7785. this.flush = function() {
  7786. // deliver the last buffered NAL unit
  7787. if (buffer && buffer.byteLength > 3) {
  7788. this.trigger('data', buffer.subarray(syncPoint + 3));
  7789. }
  7790. // reset the stream state
  7791. buffer = null;
  7792. syncPoint = 0;
  7793. this.trigger('done');
  7794. };
  7795. };
  7796. NalByteStream.prototype = new Stream();
  7797. // values of profile_idc that indicate additional fields are included in the SPS
  7798. // see Recommendation ITU-T H.264 (4/2013),
  7799. // 7.3.2.1.1 Sequence parameter set data syntax
  7800. PROFILES_WITH_OPTIONAL_SPS_DATA = {
  7801. 100: true,
  7802. 110: true,
  7803. 122: true,
  7804. 244: true,
  7805. 44: true,
  7806. 83: true,
  7807. 86: true,
  7808. 118: true,
  7809. 128: true,
  7810. 138: true,
  7811. 139: true,
  7812. 134: true
  7813. };
  7814. /**
  7815. * Accepts input from a ElementaryStream and produces H.264 NAL unit data
  7816. * events.
  7817. */
  7818. H264Stream = function() {
  7819. var
  7820. nalByteStream = new NalByteStream(),
  7821. self,
  7822. trackId,
  7823. currentPts,
  7824. currentDts,
  7825. discardEmulationPreventionBytes,
  7826. readSequenceParameterSet,
  7827. skipScalingList;
  7828. H264Stream.prototype.init.call(this);
  7829. self = this;
  7830. this.push = function(packet) {
  7831. if (packet.type !== 'video') {
  7832. return;
  7833. }
  7834. trackId = packet.trackId;
  7835. currentPts = packet.pts;
  7836. currentDts = packet.dts;
  7837. nalByteStream.push(packet);
  7838. };
  7839. nalByteStream.on('data', function(data) {
  7840. var
  7841. event = {
  7842. trackId: trackId,
  7843. pts: currentPts,
  7844. dts: currentDts,
  7845. data: data
  7846. };
  7847. switch (data[0] & 0x1f) {
  7848. case 0x05:
  7849. event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
  7850. break;
  7851. case 0x06:
  7852. event.nalUnitType = 'sei_rbsp';
  7853. event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
  7854. break;
  7855. case 0x07:
  7856. event.nalUnitType = 'seq_parameter_set_rbsp';
  7857. event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
  7858. event.config = readSequenceParameterSet(event.escapedRBSP);
  7859. break;
  7860. case 0x08:
  7861. event.nalUnitType = 'pic_parameter_set_rbsp';
  7862. break;
  7863. case 0x09:
  7864. event.nalUnitType = 'access_unit_delimiter_rbsp';
  7865. break;
  7866. default:
  7867. break;
  7868. }
  7869. self.trigger('data', event);
  7870. });
  7871. nalByteStream.on('done', function() {
  7872. self.trigger('done');
  7873. });
  7874. this.flush = function() {
  7875. nalByteStream.flush();
  7876. };
  7877. /**
  7878. * Advance the ExpGolomb decoder past a scaling list. The scaling
  7879. * list is optionally transmitted as part of a sequence parameter
  7880. * set and is not relevant to transmuxing.
  7881. * @param count {number} the number of entries in this scaling list
  7882. * @param expGolombDecoder {object} an ExpGolomb pointed to the
  7883. * start of a scaling list
  7884. * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
  7885. */
  7886. skipScalingList = function(count, expGolombDecoder) {
  7887. var
  7888. lastScale = 8,
  7889. nextScale = 8,
  7890. j,
  7891. deltaScale;
  7892. for (j = 0; j < count; j++) {
  7893. if (nextScale !== 0) {
  7894. deltaScale = expGolombDecoder.readExpGolomb();
  7895. nextScale = (lastScale + deltaScale + 256) % 256;
  7896. }
  7897. lastScale = (nextScale === 0) ? lastScale : nextScale;
  7898. }
  7899. };
  7900. /**
  7901. * Expunge any "Emulation Prevention" bytes from a "Raw Byte
  7902. * Sequence Payload"
  7903. * @param data {Uint8Array} the bytes of a RBSP from a NAL
  7904. * unit
  7905. * @return {Uint8Array} the RBSP without any Emulation
  7906. * Prevention Bytes
  7907. */
  7908. discardEmulationPreventionBytes = function(data) {
  7909. var
  7910. length = data.byteLength,
  7911. emulationPreventionBytesPositions = [],
  7912. i = 1,
  7913. newLength, newData;
  7914. // Find all `Emulation Prevention Bytes`
  7915. while (i < length - 2) {
  7916. if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
  7917. emulationPreventionBytesPositions.push(i + 2);
  7918. i += 2;
  7919. } else {
  7920. i++;
  7921. }
  7922. }
  7923. // If no Emulation Prevention Bytes were found just return the original
  7924. // array
  7925. if (emulationPreventionBytesPositions.length === 0) {
  7926. return data;
  7927. }
  7928. // Create a new array to hold the NAL unit data
  7929. newLength = length - emulationPreventionBytesPositions.length;
  7930. newData = new Uint8Array(newLength);
  7931. var sourceIndex = 0;
  7932. for (i = 0; i < newLength; sourceIndex++, i++) {
  7933. if (sourceIndex === emulationPreventionBytesPositions[0]) {
  7934. // Skip this byte
  7935. sourceIndex++;
  7936. // Remove this position index
  7937. emulationPreventionBytesPositions.shift();
  7938. }
  7939. newData[i] = data[sourceIndex];
  7940. }
  7941. return newData;
  7942. };
  7943. /**
  7944. * Read a sequence parameter set and return some interesting video
  7945. * properties. A sequence parameter set is the H264 metadata that
  7946. * describes the properties of upcoming video frames.
  7947. * @param data {Uint8Array} the bytes of a sequence parameter set
  7948. * @return {object} an object with configuration parsed from the
  7949. * sequence parameter set, including the dimensions of the
  7950. * associated video frames.
  7951. */
  7952. readSequenceParameterSet = function(data) {
  7953. var
  7954. frameCropLeftOffset = 0,
  7955. frameCropRightOffset = 0,
  7956. frameCropTopOffset = 0,
  7957. frameCropBottomOffset = 0,
  7958. sarScale = 1,
  7959. expGolombDecoder, profileIdc, levelIdc, profileCompatibility,
  7960. chromaFormatIdc, picOrderCntType,
  7961. numRefFramesInPicOrderCntCycle, picWidthInMbsMinus1,
  7962. picHeightInMapUnitsMinus1,
  7963. frameMbsOnlyFlag,
  7964. scalingListCount,
  7965. sarRatio,
  7966. aspectRatioIdc,
  7967. i;
  7968. expGolombDecoder = new ExpGolomb(data);
  7969. profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
  7970. profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
  7971. levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
  7972. expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
  7973. // some profiles have more optional data we don't need
  7974. if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
  7975. chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
  7976. if (chromaFormatIdc === 3) {
  7977. expGolombDecoder.skipBits(1); // separate_colour_plane_flag
  7978. }
  7979. expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
  7980. expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
  7981. expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
  7982. if (expGolombDecoder.readBoolean()) { // seq_scaling_matrix_present_flag
  7983. scalingListCount = (chromaFormatIdc !== 3) ? 8 : 12;
  7984. for (i = 0; i < scalingListCount; i++) {
  7985. if (expGolombDecoder.readBoolean()) { // seq_scaling_list_present_flag[ i ]
  7986. if (i < 6) {
  7987. skipScalingList(16, expGolombDecoder);
  7988. } else {
  7989. skipScalingList(64, expGolombDecoder);
  7990. }
  7991. }
  7992. }
  7993. }
  7994. }
  7995. expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
  7996. picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
  7997. if (picOrderCntType === 0) {
  7998. expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
  7999. } else if (picOrderCntType === 1) {
  8000. expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
  8001. expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
  8002. expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
  8003. numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
  8004. for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
  8005. expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
  8006. }
  8007. }
  8008. expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
  8009. expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
  8010. picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
  8011. picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
  8012. frameMbsOnlyFlag = expGolombDecoder.readBits(1);
  8013. if (frameMbsOnlyFlag === 0) {
  8014. expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
  8015. }
  8016. expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
  8017. if (expGolombDecoder.readBoolean()) { // frame_cropping_flag
  8018. frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
  8019. frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
  8020. frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
  8021. frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
  8022. }
  8023. if (expGolombDecoder.readBoolean()) {
  8024. // vui_parameters_present_flag
  8025. if (expGolombDecoder.readBoolean()) {
  8026. // aspect_ratio_info_present_flag
  8027. aspectRatioIdc = expGolombDecoder.readUnsignedByte();
  8028. switch (aspectRatioIdc) {
  8029. case 1: sarRatio = [1, 1]; break;
  8030. case 2: sarRatio = [12, 11]; break;
  8031. case 3: sarRatio = [10, 11]; break;
  8032. case 4: sarRatio = [16, 11]; break;
  8033. case 5: sarRatio = [40, 33]; break;
  8034. case 6: sarRatio = [24, 11]; break;
  8035. case 7: sarRatio = [20, 11]; break;
  8036. case 8: sarRatio = [32, 11]; break;
  8037. case 9: sarRatio = [80, 33]; break;
  8038. case 10: sarRatio = [18, 11]; break;
  8039. case 11: sarRatio = [15, 11]; break;
  8040. case 12: sarRatio = [64, 33]; break;
  8041. case 13: sarRatio = [160, 99]; break;
  8042. case 14: sarRatio = [4, 3]; break;
  8043. case 15: sarRatio = [3, 2]; break;
  8044. case 16: sarRatio = [2, 1]; break;
  8045. case 255: {
  8046. sarRatio = [expGolombDecoder.readUnsignedByte() << 8 |
  8047. expGolombDecoder.readUnsignedByte(),
  8048. expGolombDecoder.readUnsignedByte() << 8 |
  8049. expGolombDecoder.readUnsignedByte() ];
  8050. break;
  8051. }
  8052. }
  8053. if (sarRatio) {
  8054. sarScale = sarRatio[0] / sarRatio[1];
  8055. }
  8056. }
  8057. }
  8058. return {
  8059. profileIdc: profileIdc,
  8060. levelIdc: levelIdc,
  8061. profileCompatibility: profileCompatibility,
  8062. width: Math.ceil((((picWidthInMbsMinus1 + 1) * 16) - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
  8063. height: ((2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16) - (frameCropTopOffset * 2) - (frameCropBottomOffset * 2)
  8064. };
  8065. };
  8066. };
  8067. H264Stream.prototype = new Stream();
  8068. module.exports = {
  8069. H264Stream: H264Stream,
  8070. NalByteStream: NalByteStream
  8071. };
  8072. },{"../utils/exp-golomb.js":59,"../utils/stream.js":60}],40:[function(require,module,exports){
  8073. var highPrefix = [33, 16, 5, 32, 164, 27];
  8074. var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
  8075. var zeroFill = function(count) {
  8076. var a = [];
  8077. while (count--) {
  8078. a.push(0);
  8079. }
  8080. return a;
  8081. };
  8082. var makeTable = function(metaTable) {
  8083. return Object.keys(metaTable).reduce(function(obj, key) {
  8084. obj[key] = new Uint8Array(metaTable[key].reduce(function(arr, part) {
  8085. return arr.concat(part);
  8086. }, []));
  8087. return obj;
  8088. }, {});
  8089. };
  8090. // Frames-of-silence to use for filling in missing AAC frames
  8091. var coneOfSilence = {
  8092. 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
  8093. 88200: [highPrefix, [231], zeroFill(170), [56]],
  8094. 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
  8095. 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
  8096. 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
  8097. 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
  8098. 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
  8099. 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
  8100. 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
  8101. 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
  8102. 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
  8103. };
  8104. module.exports = makeTable(coneOfSilence);
  8105. },{}],41:[function(require,module,exports){
  8106. 'use strict';
  8107. var Stream = require('../utils/stream.js');
  8108. /**
  8109. * The final stage of the transmuxer that emits the flv tags
  8110. * for audio, video, and metadata. Also tranlates in time and
  8111. * outputs caption data and id3 cues.
  8112. */
  8113. var CoalesceStream = function(options) {
  8114. // Number of Tracks per output segment
  8115. // If greater than 1, we combine multiple
  8116. // tracks into a single segment
  8117. this.numberOfTracks = 0;
  8118. this.metadataStream = options.metadataStream;
  8119. this.videoTags = [];
  8120. this.audioTags = [];
  8121. this.videoTrack = null;
  8122. this.audioTrack = null;
  8123. this.pendingCaptions = [];
  8124. this.pendingMetadata = [];
  8125. this.pendingTracks = 0;
  8126. this.processedTracks = 0;
  8127. CoalesceStream.prototype.init.call(this);
  8128. // Take output from multiple
  8129. this.push = function(output) {
  8130. // buffer incoming captions until the associated video segment
  8131. // finishes
  8132. if (output.text) {
  8133. return this.pendingCaptions.push(output);
  8134. }
  8135. // buffer incoming id3 tags until the final flush
  8136. if (output.frames) {
  8137. return this.pendingMetadata.push(output);
  8138. }
  8139. if (output.track.type === 'video') {
  8140. this.videoTrack = output.track;
  8141. this.videoTags = output.tags;
  8142. this.pendingTracks++;
  8143. }
  8144. if (output.track.type === 'audio') {
  8145. this.audioTrack = output.track;
  8146. this.audioTags = output.tags;
  8147. this.pendingTracks++;
  8148. }
  8149. };
  8150. };
  8151. CoalesceStream.prototype = new Stream();
  8152. CoalesceStream.prototype.flush = function(flushSource) {
  8153. var
  8154. id3,
  8155. caption,
  8156. i,
  8157. timelineStartPts,
  8158. event = {
  8159. tags: {},
  8160. captions: [],
  8161. metadata: []
  8162. };
  8163. if (this.pendingTracks < this.numberOfTracks) {
  8164. if (flushSource !== 'VideoSegmentStream' &&
  8165. flushSource !== 'AudioSegmentStream') {
  8166. // Return because we haven't received a flush from a data-generating
  8167. // portion of the segment (meaning that we have only recieved meta-data
  8168. // or captions.)
  8169. return;
  8170. } else if (this.pendingTracks === 0) {
  8171. // In the case where we receive a flush without any data having been
  8172. // received we consider it an emitted track for the purposes of coalescing
  8173. // `done` events.
  8174. // We do this for the case where there is an audio and video track in the
  8175. // segment but no audio data. (seen in several playlists with alternate
  8176. // audio tracks and no audio present in the main TS segments.)
  8177. this.processedTracks++;
  8178. if (this.processedTracks < this.numberOfTracks) {
  8179. return;
  8180. }
  8181. }
  8182. }
  8183. this.processedTracks += this.pendingTracks;
  8184. this.pendingTracks = 0;
  8185. if (this.processedTracks < this.numberOfTracks) {
  8186. return;
  8187. }
  8188. if (this.videoTrack) {
  8189. timelineStartPts = this.videoTrack.timelineStartInfo.pts;
  8190. } else if (this.audioTrack) {
  8191. timelineStartPts = this.audioTrack.timelineStartInfo.pts;
  8192. }
  8193. event.tags.videoTags = this.videoTags;
  8194. event.tags.audioTags = this.audioTags;
  8195. // Translate caption PTS times into second offsets into the
  8196. // video timeline for the segment
  8197. for (i = 0; i < this.pendingCaptions.length; i++) {
  8198. caption = this.pendingCaptions[i];
  8199. caption.startTime = caption.startPts - timelineStartPts;
  8200. caption.startTime /= 90e3;
  8201. caption.endTime = caption.endPts - timelineStartPts;
  8202. caption.endTime /= 90e3;
  8203. event.captions.push(caption);
  8204. }
  8205. // Translate ID3 frame PTS times into second offsets into the
  8206. // video timeline for the segment
  8207. for (i = 0; i < this.pendingMetadata.length; i++) {
  8208. id3 = this.pendingMetadata[i];
  8209. id3.cueTime = id3.pts - timelineStartPts;
  8210. id3.cueTime /= 90e3;
  8211. event.metadata.push(id3);
  8212. }
  8213. // We add this to every single emitted segment even though we only need
  8214. // it for the first
  8215. event.metadata.dispatchType = this.metadataStream.dispatchType;
  8216. // Reset stream state
  8217. this.videoTrack = null;
  8218. this.audioTrack = null;
  8219. this.videoTags = [];
  8220. this.audioTags = [];
  8221. this.pendingCaptions.length = 0;
  8222. this.pendingMetadata.length = 0;
  8223. this.pendingTracks = 0;
  8224. this.processedTracks = 0;
  8225. // Emit the final segment
  8226. this.trigger('data', event);
  8227. this.trigger('done');
  8228. };
  8229. module.exports = CoalesceStream;
  8230. },{"../utils/stream.js":60}],42:[function(require,module,exports){
  8231. 'use strict';
  8232. var FlvTag = require('./flv-tag.js');
  8233. // For information on the FLV format, see
  8234. // http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf.
  8235. // Technically, this function returns the header and a metadata FLV tag
  8236. // if duration is greater than zero
  8237. // duration in seconds
  8238. // @return {object} the bytes of the FLV header as a Uint8Array
  8239. var getFlvHeader = function(duration, audio, video) { // :ByteArray {
  8240. var
  8241. headBytes = new Uint8Array(3 + 1 + 1 + 4),
  8242. head = new DataView(headBytes.buffer),
  8243. metadata,
  8244. result,
  8245. metadataLength;
  8246. // default arguments
  8247. duration = duration || 0;
  8248. audio = audio === undefined ? true : audio;
  8249. video = video === undefined ? true : video;
  8250. // signature
  8251. head.setUint8(0, 0x46); // 'F'
  8252. head.setUint8(1, 0x4c); // 'L'
  8253. head.setUint8(2, 0x56); // 'V'
  8254. // version
  8255. head.setUint8(3, 0x01);
  8256. // flags
  8257. head.setUint8(4, (audio ? 0x04 : 0x00) | (video ? 0x01 : 0x00));
  8258. // data offset, should be 9 for FLV v1
  8259. head.setUint32(5, headBytes.byteLength);
  8260. // init the first FLV tag
  8261. if (duration <= 0) {
  8262. // no duration available so just write the first field of the first
  8263. // FLV tag
  8264. result = new Uint8Array(headBytes.byteLength + 4);
  8265. result.set(headBytes);
  8266. result.set([0, 0, 0, 0], headBytes.byteLength);
  8267. return result;
  8268. }
  8269. // write out the duration metadata tag
  8270. metadata = new FlvTag(FlvTag.METADATA_TAG);
  8271. metadata.pts = metadata.dts = 0;
  8272. metadata.writeMetaDataDouble('duration', duration);
  8273. metadataLength = metadata.finalize().length;
  8274. result = new Uint8Array(headBytes.byteLength + metadataLength);
  8275. result.set(headBytes);
  8276. result.set(head.byteLength, metadataLength);
  8277. return result;
  8278. };
  8279. module.exports = getFlvHeader;
  8280. },{"./flv-tag.js":43}],43:[function(require,module,exports){
  8281. /**
  8282. * An object that stores the bytes of an FLV tag and methods for
  8283. * querying and manipulating that data.
  8284. * @see http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf
  8285. */
  8286. 'use strict';
  8287. var FlvTag;
  8288. // (type:uint, extraData:Boolean = false) extends ByteArray
  8289. FlvTag = function(type, extraData) {
  8290. var
  8291. // Counter if this is a metadata tag, nal start marker if this is a video
  8292. // tag. unused if this is an audio tag
  8293. adHoc = 0, // :uint
  8294. // The default size is 16kb but this is not enough to hold iframe
  8295. // data and the resizing algorithm costs a bit so we create a larger
  8296. // starting buffer for video tags
  8297. bufferStartSize = 16384,
  8298. // checks whether the FLV tag has enough capacity to accept the proposed
  8299. // write and re-allocates the internal buffers if necessary
  8300. prepareWrite = function(flv, count) {
  8301. var
  8302. bytes,
  8303. minLength = flv.position + count;
  8304. if (minLength < flv.bytes.byteLength) {
  8305. // there's enough capacity so do nothing
  8306. return;
  8307. }
  8308. // allocate a new buffer and copy over the data that will not be modified
  8309. bytes = new Uint8Array(minLength * 2);
  8310. bytes.set(flv.bytes.subarray(0, flv.position), 0);
  8311. flv.bytes = bytes;
  8312. flv.view = new DataView(flv.bytes.buffer);
  8313. },
  8314. // commonly used metadata properties
  8315. widthBytes = FlvTag.widthBytes || new Uint8Array('width'.length),
  8316. heightBytes = FlvTag.heightBytes || new Uint8Array('height'.length),
  8317. videocodecidBytes = FlvTag.videocodecidBytes || new Uint8Array('videocodecid'.length),
  8318. i;
  8319. if (!FlvTag.widthBytes) {
  8320. // calculating the bytes of common metadata names ahead of time makes the
  8321. // corresponding writes faster because we don't have to loop over the
  8322. // characters
  8323. // re-test with test/perf.html if you're planning on changing this
  8324. for (i = 0; i < 'width'.length; i++) {
  8325. widthBytes[i] = 'width'.charCodeAt(i);
  8326. }
  8327. for (i = 0; i < 'height'.length; i++) {
  8328. heightBytes[i] = 'height'.charCodeAt(i);
  8329. }
  8330. for (i = 0; i < 'videocodecid'.length; i++) {
  8331. videocodecidBytes[i] = 'videocodecid'.charCodeAt(i);
  8332. }
  8333. FlvTag.widthBytes = widthBytes;
  8334. FlvTag.heightBytes = heightBytes;
  8335. FlvTag.videocodecidBytes = videocodecidBytes;
  8336. }
  8337. this.keyFrame = false; // :Boolean
  8338. switch (type) {
  8339. case FlvTag.VIDEO_TAG:
  8340. this.length = 16;
  8341. // Start the buffer at 256k
  8342. bufferStartSize *= 6;
  8343. break;
  8344. case FlvTag.AUDIO_TAG:
  8345. this.length = 13;
  8346. this.keyFrame = true;
  8347. break;
  8348. case FlvTag.METADATA_TAG:
  8349. this.length = 29;
  8350. this.keyFrame = true;
  8351. break;
  8352. default:
  8353. throw new Error('Unknown FLV tag type');
  8354. }
  8355. this.bytes = new Uint8Array(bufferStartSize);
  8356. this.view = new DataView(this.bytes.buffer);
  8357. this.bytes[0] = type;
  8358. this.position = this.length;
  8359. this.keyFrame = extraData; // Defaults to false
  8360. // presentation timestamp
  8361. this.pts = 0;
  8362. // decoder timestamp
  8363. this.dts = 0;
  8364. // ByteArray#writeBytes(bytes:ByteArray, offset:uint = 0, length:uint = 0)
  8365. this.writeBytes = function(bytes, offset, length) {
  8366. var
  8367. start = offset || 0,
  8368. end;
  8369. length = length || bytes.byteLength;
  8370. end = start + length;
  8371. prepareWrite(this, length);
  8372. this.bytes.set(bytes.subarray(start, end), this.position);
  8373. this.position += length;
  8374. this.length = Math.max(this.length, this.position);
  8375. };
  8376. // ByteArray#writeByte(value:int):void
  8377. this.writeByte = function(byte) {
  8378. prepareWrite(this, 1);
  8379. this.bytes[this.position] = byte;
  8380. this.position++;
  8381. this.length = Math.max(this.length, this.position);
  8382. };
  8383. // ByteArray#writeShort(value:int):void
  8384. this.writeShort = function(short) {
  8385. prepareWrite(this, 2);
  8386. this.view.setUint16(this.position, short);
  8387. this.position += 2;
  8388. this.length = Math.max(this.length, this.position);
  8389. };
  8390. // Negative index into array
  8391. // (pos:uint):int
  8392. this.negIndex = function(pos) {
  8393. return this.bytes[this.length - pos];
  8394. };
  8395. // The functions below ONLY work when this[0] == VIDEO_TAG.
  8396. // We are not going to check for that because we dont want the overhead
  8397. // (nal:ByteArray = null):int
  8398. this.nalUnitSize = function() {
  8399. if (adHoc === 0) {
  8400. return 0;
  8401. }
  8402. return this.length - (adHoc + 4);
  8403. };
  8404. this.startNalUnit = function() {
  8405. // remember position and add 4 bytes
  8406. if (adHoc > 0) {
  8407. throw new Error('Attempted to create new NAL wihout closing the old one');
  8408. }
  8409. // reserve 4 bytes for nal unit size
  8410. adHoc = this.length;
  8411. this.length += 4;
  8412. this.position = this.length;
  8413. };
  8414. // (nal:ByteArray = null):void
  8415. this.endNalUnit = function(nalContainer) {
  8416. var
  8417. nalStart, // :uint
  8418. nalLength; // :uint
  8419. // Rewind to the marker and write the size
  8420. if (this.length === adHoc + 4) {
  8421. // we started a nal unit, but didnt write one, so roll back the 4 byte size value
  8422. this.length -= 4;
  8423. } else if (adHoc > 0) {
  8424. nalStart = adHoc + 4;
  8425. nalLength = this.length - nalStart;
  8426. this.position = adHoc;
  8427. this.view.setUint32(this.position, nalLength);
  8428. this.position = this.length;
  8429. if (nalContainer) {
  8430. // Add the tag to the NAL unit
  8431. nalContainer.push(this.bytes.subarray(nalStart, nalStart + nalLength));
  8432. }
  8433. }
  8434. adHoc = 0;
  8435. };
  8436. /**
  8437. * Write out a 64-bit floating point valued metadata property. This method is
  8438. * called frequently during a typical parse and needs to be fast.
  8439. */
  8440. // (key:String, val:Number):void
  8441. this.writeMetaDataDouble = function(key, val) {
  8442. var i;
  8443. prepareWrite(this, 2 + key.length + 9);
  8444. // write size of property name
  8445. this.view.setUint16(this.position, key.length);
  8446. this.position += 2;
  8447. // this next part looks terrible but it improves parser throughput by
  8448. // 10kB/s in my testing
  8449. // write property name
  8450. if (key === 'width') {
  8451. this.bytes.set(widthBytes, this.position);
  8452. this.position += 5;
  8453. } else if (key === 'height') {
  8454. this.bytes.set(heightBytes, this.position);
  8455. this.position += 6;
  8456. } else if (key === 'videocodecid') {
  8457. this.bytes.set(videocodecidBytes, this.position);
  8458. this.position += 12;
  8459. } else {
  8460. for (i = 0; i < key.length; i++) {
  8461. this.bytes[this.position] = key.charCodeAt(i);
  8462. this.position++;
  8463. }
  8464. }
  8465. // skip null byte
  8466. this.position++;
  8467. // write property value
  8468. this.view.setFloat64(this.position, val);
  8469. this.position += 8;
  8470. // update flv tag length
  8471. this.length = Math.max(this.length, this.position);
  8472. ++adHoc;
  8473. };
  8474. // (key:String, val:Boolean):void
  8475. this.writeMetaDataBoolean = function(key, val) {
  8476. var i;
  8477. prepareWrite(this, 2);
  8478. this.view.setUint16(this.position, key.length);
  8479. this.position += 2;
  8480. for (i = 0; i < key.length; i++) {
  8481. // if key.charCodeAt(i) >= 255, handle error
  8482. prepareWrite(this, 1);
  8483. this.bytes[this.position] = key.charCodeAt(i);
  8484. this.position++;
  8485. }
  8486. prepareWrite(this, 2);
  8487. this.view.setUint8(this.position, 0x01);
  8488. this.position++;
  8489. this.view.setUint8(this.position, val ? 0x01 : 0x00);
  8490. this.position++;
  8491. this.length = Math.max(this.length, this.position);
  8492. ++adHoc;
  8493. };
  8494. // ():ByteArray
  8495. this.finalize = function() {
  8496. var
  8497. dtsDelta, // :int
  8498. len; // :int
  8499. switch (this.bytes[0]) {
  8500. // Video Data
  8501. case FlvTag.VIDEO_TAG:
  8502. // We only support AVC, 1 = key frame (for AVC, a seekable
  8503. // frame), 2 = inter frame (for AVC, a non-seekable frame)
  8504. this.bytes[11] = ((this.keyFrame || extraData) ? 0x10 : 0x20) | 0x07;
  8505. this.bytes[12] = extraData ? 0x00 : 0x01;
  8506. dtsDelta = this.pts - this.dts;
  8507. this.bytes[13] = (dtsDelta & 0x00FF0000) >>> 16;
  8508. this.bytes[14] = (dtsDelta & 0x0000FF00) >>> 8;
  8509. this.bytes[15] = (dtsDelta & 0x000000FF) >>> 0;
  8510. break;
  8511. case FlvTag.AUDIO_TAG:
  8512. this.bytes[11] = 0xAF; // 44 kHz, 16-bit stereo
  8513. this.bytes[12] = extraData ? 0x00 : 0x01;
  8514. break;
  8515. case FlvTag.METADATA_TAG:
  8516. this.position = 11;
  8517. this.view.setUint8(this.position, 0x02); // String type
  8518. this.position++;
  8519. this.view.setUint16(this.position, 0x0A); // 10 Bytes
  8520. this.position += 2;
  8521. // set "onMetaData"
  8522. this.bytes.set([0x6f, 0x6e, 0x4d, 0x65,
  8523. 0x74, 0x61, 0x44, 0x61,
  8524. 0x74, 0x61], this.position);
  8525. this.position += 10;
  8526. this.bytes[this.position] = 0x08; // Array type
  8527. this.position++;
  8528. this.view.setUint32(this.position, adHoc);
  8529. this.position = this.length;
  8530. this.bytes.set([0, 0, 9], this.position);
  8531. this.position += 3; // End Data Tag
  8532. this.length = this.position;
  8533. break;
  8534. }
  8535. len = this.length - 11;
  8536. // write the DataSize field
  8537. this.bytes[ 1] = (len & 0x00FF0000) >>> 16;
  8538. this.bytes[ 2] = (len & 0x0000FF00) >>> 8;
  8539. this.bytes[ 3] = (len & 0x000000FF) >>> 0;
  8540. // write the Timestamp
  8541. this.bytes[ 4] = (this.dts & 0x00FF0000) >>> 16;
  8542. this.bytes[ 5] = (this.dts & 0x0000FF00) >>> 8;
  8543. this.bytes[ 6] = (this.dts & 0x000000FF) >>> 0;
  8544. this.bytes[ 7] = (this.dts & 0xFF000000) >>> 24;
  8545. // write the StreamID
  8546. this.bytes[ 8] = 0;
  8547. this.bytes[ 9] = 0;
  8548. this.bytes[10] = 0;
  8549. // Sometimes we're at the end of the view and have one slot to write a
  8550. // uint32, so, prepareWrite of count 4, since, view is uint8
  8551. prepareWrite(this, 4);
  8552. this.view.setUint32(this.length, this.length);
  8553. this.length += 4;
  8554. this.position += 4;
  8555. // trim down the byte buffer to what is actually being used
  8556. this.bytes = this.bytes.subarray(0, this.length);
  8557. this.frameTime = FlvTag.frameTime(this.bytes);
  8558. // if bytes.bytelength isn't equal to this.length, handle error
  8559. return this;
  8560. };
  8561. };
  8562. FlvTag.AUDIO_TAG = 0x08; // == 8, :uint
  8563. FlvTag.VIDEO_TAG = 0x09; // == 9, :uint
  8564. FlvTag.METADATA_TAG = 0x12; // == 18, :uint
  8565. // (tag:ByteArray):Boolean {
  8566. FlvTag.isAudioFrame = function(tag) {
  8567. return FlvTag.AUDIO_TAG === tag[0];
  8568. };
  8569. // (tag:ByteArray):Boolean {
  8570. FlvTag.isVideoFrame = function(tag) {
  8571. return FlvTag.VIDEO_TAG === tag[0];
  8572. };
  8573. // (tag:ByteArray):Boolean {
  8574. FlvTag.isMetaData = function(tag) {
  8575. return FlvTag.METADATA_TAG === tag[0];
  8576. };
  8577. // (tag:ByteArray):Boolean {
  8578. FlvTag.isKeyFrame = function(tag) {
  8579. if (FlvTag.isVideoFrame(tag)) {
  8580. return tag[11] === 0x17;
  8581. }
  8582. if (FlvTag.isAudioFrame(tag)) {
  8583. return true;
  8584. }
  8585. if (FlvTag.isMetaData(tag)) {
  8586. return true;
  8587. }
  8588. return false;
  8589. };
  8590. // (tag:ByteArray):uint {
  8591. FlvTag.frameTime = function(tag) {
  8592. var pts = tag[ 4] << 16; // :uint
  8593. pts |= tag[ 5] << 8;
  8594. pts |= tag[ 6] << 0;
  8595. pts |= tag[ 7] << 24;
  8596. return pts;
  8597. };
  8598. module.exports = FlvTag;
  8599. },{}],44:[function(require,module,exports){
  8600. module.exports = {
  8601. tag: require('./flv-tag'),
  8602. Transmuxer: require('./transmuxer'),
  8603. getFlvHeader: require('./flv-header')
  8604. };
  8605. },{"./flv-header":42,"./flv-tag":43,"./transmuxer":46}],45:[function(require,module,exports){
  8606. 'use strict';
  8607. var TagList = function() {
  8608. var self = this;
  8609. this.list = [];
  8610. this.push = function(tag) {
  8611. this.list.push({
  8612. bytes: tag.bytes,
  8613. dts: tag.dts,
  8614. pts: tag.pts,
  8615. keyFrame: tag.keyFrame,
  8616. metaDataTag: tag.metaDataTag
  8617. });
  8618. };
  8619. Object.defineProperty(this, 'length', {
  8620. get: function() {
  8621. return self.list.length;
  8622. }
  8623. });
  8624. };
  8625. module.exports = TagList;
  8626. },{}],46:[function(require,module,exports){
  8627. 'use strict';
  8628. var Stream = require('../utils/stream.js');
  8629. var FlvTag = require('./flv-tag.js');
  8630. var m2ts = require('../m2ts/m2ts.js');
  8631. var AdtsStream = require('../codecs/adts.js');
  8632. var H264Stream = require('../codecs/h264').H264Stream;
  8633. var CoalesceStream = require('./coalesce-stream.js');
  8634. var TagList = require('./tag-list.js');
  8635. var
  8636. Transmuxer,
  8637. VideoSegmentStream,
  8638. AudioSegmentStream,
  8639. collectTimelineInfo,
  8640. metaDataTag,
  8641. extraDataTag;
  8642. /**
  8643. * Store information about the start and end of the tracka and the
  8644. * duration for each frame/sample we process in order to calculate
  8645. * the baseMediaDecodeTime
  8646. */
  8647. collectTimelineInfo = function(track, data) {
  8648. if (typeof data.pts === 'number') {
  8649. if (track.timelineStartInfo.pts === undefined) {
  8650. track.timelineStartInfo.pts = data.pts;
  8651. } else {
  8652. track.timelineStartInfo.pts =
  8653. Math.min(track.timelineStartInfo.pts, data.pts);
  8654. }
  8655. }
  8656. if (typeof data.dts === 'number') {
  8657. if (track.timelineStartInfo.dts === undefined) {
  8658. track.timelineStartInfo.dts = data.dts;
  8659. } else {
  8660. track.timelineStartInfo.dts =
  8661. Math.min(track.timelineStartInfo.dts, data.dts);
  8662. }
  8663. }
  8664. };
  8665. metaDataTag = function(track, pts) {
  8666. var
  8667. tag = new FlvTag(FlvTag.METADATA_TAG); // :FlvTag
  8668. tag.dts = pts;
  8669. tag.pts = pts;
  8670. tag.writeMetaDataDouble('videocodecid', 7);
  8671. tag.writeMetaDataDouble('width', track.width);
  8672. tag.writeMetaDataDouble('height', track.height);
  8673. return tag;
  8674. };
  8675. extraDataTag = function(track, pts) {
  8676. var
  8677. i,
  8678. tag = new FlvTag(FlvTag.VIDEO_TAG, true);
  8679. tag.dts = pts;
  8680. tag.pts = pts;
  8681. tag.writeByte(0x01);// version
  8682. tag.writeByte(track.profileIdc);// profile
  8683. tag.writeByte(track.profileCompatibility);// compatibility
  8684. tag.writeByte(track.levelIdc);// level
  8685. tag.writeByte(0xFC | 0x03); // reserved (6 bits), NULA length size - 1 (2 bits)
  8686. tag.writeByte(0xE0 | 0x01); // reserved (3 bits), num of SPS (5 bits)
  8687. tag.writeShort(track.sps[0].length); // data of SPS
  8688. tag.writeBytes(track.sps[0]); // SPS
  8689. tag.writeByte(track.pps.length); // num of PPS (will there ever be more that 1 PPS?)
  8690. for (i = 0; i < track.pps.length; ++i) {
  8691. tag.writeShort(track.pps[i].length); // 2 bytes for length of PPS
  8692. tag.writeBytes(track.pps[i]); // data of PPS
  8693. }
  8694. return tag;
  8695. };
  8696. /**
  8697. * Constructs a single-track, media segment from AAC data
  8698. * events. The output of this stream can be fed to flash.
  8699. */
  8700. AudioSegmentStream = function(track) {
  8701. var
  8702. adtsFrames = [],
  8703. oldExtraData;
  8704. AudioSegmentStream.prototype.init.call(this);
  8705. this.push = function(data) {
  8706. collectTimelineInfo(track, data);
  8707. if (track && track.channelcount === undefined) {
  8708. track.audioobjecttype = data.audioobjecttype;
  8709. track.channelcount = data.channelcount;
  8710. track.samplerate = data.samplerate;
  8711. track.samplingfrequencyindex = data.samplingfrequencyindex;
  8712. track.samplesize = data.samplesize;
  8713. track.extraData = (track.audioobjecttype << 11) |
  8714. (track.samplingfrequencyindex << 7) |
  8715. (track.channelcount << 3);
  8716. }
  8717. data.pts = Math.round(data.pts / 90);
  8718. data.dts = Math.round(data.dts / 90);
  8719. // buffer audio data until end() is called
  8720. adtsFrames.push(data);
  8721. };
  8722. this.flush = function() {
  8723. var currentFrame, adtsFrame, lastMetaPts, tags = new TagList();
  8724. // return early if no audio data has been observed
  8725. if (adtsFrames.length === 0) {
  8726. this.trigger('done', 'AudioSegmentStream');
  8727. return;
  8728. }
  8729. lastMetaPts = -Infinity;
  8730. while (adtsFrames.length) {
  8731. currentFrame = adtsFrames.shift();
  8732. // write out metadata tags every 1 second so that the decoder
  8733. // is re-initialized quickly after seeking into a different
  8734. // audio configuration
  8735. if (track.extraData !== oldExtraData || currentFrame.pts - lastMetaPts >= 1000) {
  8736. adtsFrame = new FlvTag(FlvTag.METADATA_TAG);
  8737. adtsFrame.pts = currentFrame.pts;
  8738. adtsFrame.dts = currentFrame.dts;
  8739. // AAC is always 10
  8740. adtsFrame.writeMetaDataDouble('audiocodecid', 10);
  8741. adtsFrame.writeMetaDataBoolean('stereo', track.channelcount === 2);
  8742. adtsFrame.writeMetaDataDouble('audiosamplerate', track.samplerate);
  8743. // Is AAC always 16 bit?
  8744. adtsFrame.writeMetaDataDouble('audiosamplesize', 16);
  8745. tags.push(adtsFrame.finalize());
  8746. oldExtraData = track.extraData;
  8747. adtsFrame = new FlvTag(FlvTag.AUDIO_TAG, true);
  8748. // For audio, DTS is always the same as PTS. We want to set the DTS
  8749. // however so we can compare with video DTS to determine approximate
  8750. // packet order
  8751. adtsFrame.pts = currentFrame.pts;
  8752. adtsFrame.dts = currentFrame.dts;
  8753. adtsFrame.view.setUint16(adtsFrame.position, track.extraData);
  8754. adtsFrame.position += 2;
  8755. adtsFrame.length = Math.max(adtsFrame.length, adtsFrame.position);
  8756. tags.push(adtsFrame.finalize());
  8757. lastMetaPts = currentFrame.pts;
  8758. }
  8759. adtsFrame = new FlvTag(FlvTag.AUDIO_TAG);
  8760. adtsFrame.pts = currentFrame.pts;
  8761. adtsFrame.dts = currentFrame.dts;
  8762. adtsFrame.writeBytes(currentFrame.data);
  8763. tags.push(adtsFrame.finalize());
  8764. }
  8765. oldExtraData = null;
  8766. this.trigger('data', {track: track, tags: tags.list});
  8767. this.trigger('done', 'AudioSegmentStream');
  8768. };
  8769. };
  8770. AudioSegmentStream.prototype = new Stream();
  8771. /**
  8772. * Store FlvTags for the h264 stream
  8773. * @param track {object} track metadata configuration
  8774. */
  8775. VideoSegmentStream = function(track) {
  8776. var
  8777. nalUnits = [],
  8778. config,
  8779. h264Frame;
  8780. VideoSegmentStream.prototype.init.call(this);
  8781. this.finishFrame = function(tags, frame) {
  8782. if (!frame) {
  8783. return;
  8784. }
  8785. // Check if keyframe and the length of tags.
  8786. // This makes sure we write metadata on the first frame of a segment.
  8787. if (config && track && track.newMetadata &&
  8788. (frame.keyFrame || tags.length === 0)) {
  8789. // Push extra data on every IDR frame in case we did a stream change + seek
  8790. var metaTag = metaDataTag(config, frame.dts).finalize();
  8791. var extraTag = extraDataTag(track, frame.dts).finalize();
  8792. metaTag.metaDataTag = extraTag.metaDataTag = true;
  8793. tags.push(metaTag);
  8794. tags.push(extraTag);
  8795. track.newMetadata = false;
  8796. }
  8797. frame.endNalUnit();
  8798. tags.push(frame.finalize());
  8799. h264Frame = null;
  8800. };
  8801. this.push = function(data) {
  8802. collectTimelineInfo(track, data);
  8803. data.pts = Math.round(data.pts / 90);
  8804. data.dts = Math.round(data.dts / 90);
  8805. // buffer video until flush() is called
  8806. nalUnits.push(data);
  8807. };
  8808. this.flush = function() {
  8809. var
  8810. currentNal,
  8811. tags = new TagList();
  8812. // Throw away nalUnits at the start of the byte stream until we find
  8813. // the first AUD
  8814. while (nalUnits.length) {
  8815. if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
  8816. break;
  8817. }
  8818. nalUnits.shift();
  8819. }
  8820. // return early if no video data has been observed
  8821. if (nalUnits.length === 0) {
  8822. this.trigger('done', 'VideoSegmentStream');
  8823. return;
  8824. }
  8825. while (nalUnits.length) {
  8826. currentNal = nalUnits.shift();
  8827. // record the track config
  8828. if (currentNal.nalUnitType === 'seq_parameter_set_rbsp') {
  8829. track.newMetadata = true;
  8830. config = currentNal.config;
  8831. track.width = config.width;
  8832. track.height = config.height;
  8833. track.sps = [currentNal.data];
  8834. track.profileIdc = config.profileIdc;
  8835. track.levelIdc = config.levelIdc;
  8836. track.profileCompatibility = config.profileCompatibility;
  8837. h264Frame.endNalUnit();
  8838. } else if (currentNal.nalUnitType === 'pic_parameter_set_rbsp') {
  8839. track.newMetadata = true;
  8840. track.pps = [currentNal.data];
  8841. h264Frame.endNalUnit();
  8842. } else if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
  8843. if (h264Frame) {
  8844. this.finishFrame(tags, h264Frame);
  8845. }
  8846. h264Frame = new FlvTag(FlvTag.VIDEO_TAG);
  8847. h264Frame.pts = currentNal.pts;
  8848. h264Frame.dts = currentNal.dts;
  8849. } else {
  8850. if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
  8851. // the current sample is a key frame
  8852. h264Frame.keyFrame = true;
  8853. }
  8854. h264Frame.endNalUnit();
  8855. }
  8856. h264Frame.startNalUnit();
  8857. h264Frame.writeBytes(currentNal.data);
  8858. }
  8859. if (h264Frame) {
  8860. this.finishFrame(tags, h264Frame);
  8861. }
  8862. this.trigger('data', {track: track, tags: tags.list});
  8863. // Continue with the flush process now
  8864. this.trigger('done', 'VideoSegmentStream');
  8865. };
  8866. };
  8867. VideoSegmentStream.prototype = new Stream();
  8868. /**
  8869. * An object that incrementally transmuxes MPEG2 Trasport Stream
  8870. * chunks into an FLV.
  8871. */
  8872. Transmuxer = function(options) {
  8873. var
  8874. self = this,
  8875. packetStream, parseStream, elementaryStream,
  8876. videoTimestampRolloverStream, audioTimestampRolloverStream,
  8877. timedMetadataTimestampRolloverStream,
  8878. adtsStream, h264Stream,
  8879. videoSegmentStream, audioSegmentStream, captionStream,
  8880. coalesceStream;
  8881. Transmuxer.prototype.init.call(this);
  8882. options = options || {};
  8883. // expose the metadata stream
  8884. this.metadataStream = new m2ts.MetadataStream();
  8885. options.metadataStream = this.metadataStream;
  8886. // set up the parsing pipeline
  8887. packetStream = new m2ts.TransportPacketStream();
  8888. parseStream = new m2ts.TransportParseStream();
  8889. elementaryStream = new m2ts.ElementaryStream();
  8890. videoTimestampRolloverStream = new m2ts.TimestampRolloverStream('video');
  8891. audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
  8892. timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
  8893. adtsStream = new AdtsStream();
  8894. h264Stream = new H264Stream();
  8895. coalesceStream = new CoalesceStream(options);
  8896. // disassemble MPEG2-TS packets into elementary streams
  8897. packetStream
  8898. .pipe(parseStream)
  8899. .pipe(elementaryStream);
  8900. // !!THIS ORDER IS IMPORTANT!!
  8901. // demux the streams
  8902. elementaryStream
  8903. .pipe(videoTimestampRolloverStream)
  8904. .pipe(h264Stream);
  8905. elementaryStream
  8906. .pipe(audioTimestampRolloverStream)
  8907. .pipe(adtsStream);
  8908. elementaryStream
  8909. .pipe(timedMetadataTimestampRolloverStream)
  8910. .pipe(this.metadataStream)
  8911. .pipe(coalesceStream);
  8912. // if CEA-708 parsing is available, hook up a caption stream
  8913. captionStream = new m2ts.CaptionStream();
  8914. h264Stream.pipe(captionStream)
  8915. .pipe(coalesceStream);
  8916. // hook up the segment streams once track metadata is delivered
  8917. elementaryStream.on('data', function(data) {
  8918. var i, videoTrack, audioTrack;
  8919. if (data.type === 'metadata') {
  8920. i = data.tracks.length;
  8921. // scan the tracks listed in the metadata
  8922. while (i--) {
  8923. if (data.tracks[i].type === 'video') {
  8924. videoTrack = data.tracks[i];
  8925. } else if (data.tracks[i].type === 'audio') {
  8926. audioTrack = data.tracks[i];
  8927. }
  8928. }
  8929. // hook up the video segment stream to the first track with h264 data
  8930. if (videoTrack && !videoSegmentStream) {
  8931. coalesceStream.numberOfTracks++;
  8932. videoSegmentStream = new VideoSegmentStream(videoTrack);
  8933. // Set up the final part of the video pipeline
  8934. h264Stream
  8935. .pipe(videoSegmentStream)
  8936. .pipe(coalesceStream);
  8937. }
  8938. if (audioTrack && !audioSegmentStream) {
  8939. // hook up the audio segment stream to the first track with aac data
  8940. coalesceStream.numberOfTracks++;
  8941. audioSegmentStream = new AudioSegmentStream(audioTrack);
  8942. // Set up the final part of the audio pipeline
  8943. adtsStream
  8944. .pipe(audioSegmentStream)
  8945. .pipe(coalesceStream);
  8946. }
  8947. }
  8948. });
  8949. // feed incoming data to the front of the parsing pipeline
  8950. this.push = function(data) {
  8951. packetStream.push(data);
  8952. };
  8953. // flush any buffered data
  8954. this.flush = function() {
  8955. // Start at the top of the pipeline and flush all pending work
  8956. packetStream.flush();
  8957. };
  8958. // Re-emit any data coming from the coalesce stream to the outside world
  8959. coalesceStream.on('data', function(event) {
  8960. self.trigger('data', event);
  8961. });
  8962. // Let the consumer know we have finished flushing the entire pipeline
  8963. coalesceStream.on('done', function() {
  8964. self.trigger('done');
  8965. });
  8966. };
  8967. Transmuxer.prototype = new Stream();
  8968. // forward compatibility
  8969. module.exports = Transmuxer;
  8970. },{"../codecs/adts.js":38,"../codecs/h264":39,"../m2ts/m2ts.js":48,"../utils/stream.js":60,"./coalesce-stream.js":41,"./flv-tag.js":43,"./tag-list.js":45}],47:[function(require,module,exports){
  8971. /**
  8972. * mux.js
  8973. *
  8974. * Copyright (c) 2015 Brightcove
  8975. * All rights reserved.
  8976. *
  8977. * Reads in-band caption information from a video elementary
  8978. * stream. Captions must follow the CEA-708 standard for injection
  8979. * into an MPEG-2 transport streams.
  8980. * @see https://en.wikipedia.org/wiki/CEA-708
  8981. */
  8982. 'use strict';
  8983. // -----------------
  8984. // Link To Transport
  8985. // -----------------
  8986. // Supplemental enhancement information (SEI) NAL units have a
  8987. // payload type field to indicate how they are to be
  8988. // interpreted. CEAS-708 caption content is always transmitted with
  8989. // payload type 0x04.
  8990. var USER_DATA_REGISTERED_ITU_T_T35 = 4,
  8991. RBSP_TRAILING_BITS = 128,
  8992. Stream = require('../utils/stream');
  8993. /**
  8994. * Parse a supplemental enhancement information (SEI) NAL unit.
  8995. * Stops parsing once a message of type ITU T T35 has been found.
  8996. *
  8997. * @param bytes {Uint8Array} the bytes of a SEI NAL unit
  8998. * @return {object} the parsed SEI payload
  8999. * @see Rec. ITU-T H.264, 7.3.2.3.1
  9000. */
  9001. var parseSei = function(bytes) {
  9002. var
  9003. i = 0,
  9004. result = {
  9005. payloadType: -1,
  9006. payloadSize: 0
  9007. },
  9008. payloadType = 0,
  9009. payloadSize = 0;
  9010. // go through the sei_rbsp parsing each each individual sei_message
  9011. while (i < bytes.byteLength) {
  9012. // stop once we have hit the end of the sei_rbsp
  9013. if (bytes[i] === RBSP_TRAILING_BITS) {
  9014. break;
  9015. }
  9016. // Parse payload type
  9017. while (bytes[i] === 0xFF) {
  9018. payloadType += 255;
  9019. i++;
  9020. }
  9021. payloadType += bytes[i++];
  9022. // Parse payload size
  9023. while (bytes[i] === 0xFF) {
  9024. payloadSize += 255;
  9025. i++;
  9026. }
  9027. payloadSize += bytes[i++];
  9028. // this sei_message is a 608/708 caption so save it and break
  9029. // there can only ever be one caption message in a frame's sei
  9030. if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
  9031. result.payloadType = payloadType;
  9032. result.payloadSize = payloadSize;
  9033. result.payload = bytes.subarray(i, i + payloadSize);
  9034. break;
  9035. }
  9036. // skip the payload and parse the next message
  9037. i += payloadSize;
  9038. payloadType = 0;
  9039. payloadSize = 0;
  9040. }
  9041. return result;
  9042. };
  9043. // see ANSI/SCTE 128-1 (2013), section 8.1
  9044. var parseUserData = function(sei) {
  9045. // itu_t_t35_contry_code must be 181 (United States) for
  9046. // captions
  9047. if (sei.payload[0] !== 181) {
  9048. return null;
  9049. }
  9050. // itu_t_t35_provider_code should be 49 (ATSC) for captions
  9051. if (((sei.payload[1] << 8) | sei.payload[2]) !== 49) {
  9052. return null;
  9053. }
  9054. // the user_identifier should be "GA94" to indicate ATSC1 data
  9055. if (String.fromCharCode(sei.payload[3],
  9056. sei.payload[4],
  9057. sei.payload[5],
  9058. sei.payload[6]) !== 'GA94') {
  9059. return null;
  9060. }
  9061. // finally, user_data_type_code should be 0x03 for caption data
  9062. if (sei.payload[7] !== 0x03) {
  9063. return null;
  9064. }
  9065. // return the user_data_type_structure and strip the trailing
  9066. // marker bits
  9067. return sei.payload.subarray(8, sei.payload.length - 1);
  9068. };
  9069. // see CEA-708-D, section 4.4
  9070. var parseCaptionPackets = function(pts, userData) {
  9071. var results = [], i, count, offset, data;
  9072. // if this is just filler, return immediately
  9073. if (!(userData[0] & 0x40)) {
  9074. return results;
  9075. }
  9076. // parse out the cc_data_1 and cc_data_2 fields
  9077. count = userData[0] & 0x1f;
  9078. for (i = 0; i < count; i++) {
  9079. offset = i * 3;
  9080. data = {
  9081. type: userData[offset + 2] & 0x03,
  9082. pts: pts
  9083. };
  9084. // capture cc data when cc_valid is 1
  9085. if (userData[offset + 2] & 0x04) {
  9086. data.ccData = (userData[offset + 3] << 8) | userData[offset + 4];
  9087. results.push(data);
  9088. }
  9089. }
  9090. return results;
  9091. };
  9092. var CaptionStream = function() {
  9093. CaptionStream.prototype.init.call(this);
  9094. this.captionPackets_ = [];
  9095. this.field1_ = new Cea608Stream(); // eslint-disable-line no-use-before-define
  9096. // forward data and done events from field1_ to this CaptionStream
  9097. this.field1_.on('data', this.trigger.bind(this, 'data'));
  9098. this.field1_.on('done', this.trigger.bind(this, 'done'));
  9099. };
  9100. CaptionStream.prototype = new Stream();
  9101. CaptionStream.prototype.push = function(event) {
  9102. var sei, userData;
  9103. // only examine SEI NALs
  9104. if (event.nalUnitType !== 'sei_rbsp') {
  9105. return;
  9106. }
  9107. // parse the sei
  9108. sei = parseSei(event.escapedRBSP);
  9109. // ignore everything but user_data_registered_itu_t_t35
  9110. if (sei.payloadType !== USER_DATA_REGISTERED_ITU_T_T35) {
  9111. return;
  9112. }
  9113. // parse out the user data payload
  9114. userData = parseUserData(sei);
  9115. // ignore unrecognized userData
  9116. if (!userData) {
  9117. return;
  9118. }
  9119. // parse out CC data packets and save them for later
  9120. this.captionPackets_ = this.captionPackets_.concat(parseCaptionPackets(event.pts, userData));
  9121. };
  9122. CaptionStream.prototype.flush = function() {
  9123. // make sure we actually parsed captions before proceeding
  9124. if (!this.captionPackets_.length) {
  9125. this.field1_.flush();
  9126. return;
  9127. }
  9128. // In Chrome, the Array#sort function is not stable so add a
  9129. // presortIndex that we can use to ensure we get a stable-sort
  9130. this.captionPackets_.forEach(function(elem, idx) {
  9131. elem.presortIndex = idx;
  9132. });
  9133. // sort caption byte-pairs based on their PTS values
  9134. this.captionPackets_.sort(function(a, b) {
  9135. if (a.pts === b.pts) {
  9136. return a.presortIndex - b.presortIndex;
  9137. }
  9138. return a.pts - b.pts;
  9139. });
  9140. // Push each caption into Cea608Stream
  9141. this.captionPackets_.forEach(this.field1_.push, this.field1_);
  9142. this.captionPackets_.length = 0;
  9143. this.field1_.flush();
  9144. return;
  9145. };
  9146. // ----------------------
  9147. // Session to Application
  9148. // ----------------------
  9149. var BASIC_CHARACTER_TRANSLATION = {
  9150. 0x2a: 0xe1,
  9151. 0x5c: 0xe9,
  9152. 0x5e: 0xed,
  9153. 0x5f: 0xf3,
  9154. 0x60: 0xfa,
  9155. 0x7b: 0xe7,
  9156. 0x7c: 0xf7,
  9157. 0x7d: 0xd1,
  9158. 0x7e: 0xf1,
  9159. 0x7f: 0x2588
  9160. };
  9161. var getCharFromCode = function(code) {
  9162. if (code === null) {
  9163. return '';
  9164. }
  9165. code = BASIC_CHARACTER_TRANSLATION[code] || code;
  9166. return String.fromCharCode(code);
  9167. };
  9168. // Constants for the byte codes recognized by Cea608Stream. This
  9169. // list is not exhaustive. For a more comprehensive listing and
  9170. // semantics see
  9171. // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
  9172. var PADDING = 0x0000,
  9173. // Pop-on Mode
  9174. RESUME_CAPTION_LOADING = 0x1420,
  9175. END_OF_CAPTION = 0x142f,
  9176. // Roll-up Mode
  9177. ROLL_UP_2_ROWS = 0x1425,
  9178. ROLL_UP_3_ROWS = 0x1426,
  9179. ROLL_UP_4_ROWS = 0x1427,
  9180. CARRIAGE_RETURN = 0x142d,
  9181. // Erasure
  9182. BACKSPACE = 0x1421,
  9183. ERASE_DISPLAYED_MEMORY = 0x142c,
  9184. ERASE_NON_DISPLAYED_MEMORY = 0x142e;
  9185. // the index of the last row in a CEA-608 display buffer
  9186. var BOTTOM_ROW = 14;
  9187. // CEA-608 captions are rendered onto a 34x15 matrix of character
  9188. // cells. The "bottom" row is the last element in the outer array.
  9189. var createDisplayBuffer = function() {
  9190. var result = [], i = BOTTOM_ROW + 1;
  9191. while (i--) {
  9192. result.push('');
  9193. }
  9194. return result;
  9195. };
  9196. var Cea608Stream = function() {
  9197. Cea608Stream.prototype.init.call(this);
  9198. this.mode_ = 'popOn';
  9199. // When in roll-up mode, the index of the last row that will
  9200. // actually display captions. If a caption is shifted to a row
  9201. // with a lower index than this, it is cleared from the display
  9202. // buffer
  9203. this.topRow_ = 0;
  9204. this.startPts_ = 0;
  9205. this.displayed_ = createDisplayBuffer();
  9206. this.nonDisplayed_ = createDisplayBuffer();
  9207. this.lastControlCode_ = null;
  9208. this.push = function(packet) {
  9209. // Ignore other channels
  9210. if (packet.type !== 0) {
  9211. return;
  9212. }
  9213. var data, swap, char0, char1;
  9214. // remove the parity bits
  9215. data = packet.ccData & 0x7f7f;
  9216. // ignore duplicate control codes
  9217. if (data === this.lastControlCode_) {
  9218. this.lastControlCode_ = null;
  9219. return;
  9220. }
  9221. // Store control codes
  9222. if ((data & 0xf000) === 0x1000) {
  9223. this.lastControlCode_ = data;
  9224. } else {
  9225. this.lastControlCode_ = null;
  9226. }
  9227. switch (data) {
  9228. case PADDING:
  9229. break;
  9230. case RESUME_CAPTION_LOADING:
  9231. this.mode_ = 'popOn';
  9232. break;
  9233. case END_OF_CAPTION:
  9234. // if a caption was being displayed, it's gone now
  9235. this.flushDisplayed(packet.pts);
  9236. // flip memory
  9237. swap = this.displayed_;
  9238. this.displayed_ = this.nonDisplayed_;
  9239. this.nonDisplayed_ = swap;
  9240. // start measuring the time to display the caption
  9241. this.startPts_ = packet.pts;
  9242. break;
  9243. case ROLL_UP_2_ROWS:
  9244. this.topRow_ = BOTTOM_ROW - 1;
  9245. this.mode_ = 'rollUp';
  9246. break;
  9247. case ROLL_UP_3_ROWS:
  9248. this.topRow_ = BOTTOM_ROW - 2;
  9249. this.mode_ = 'rollUp';
  9250. break;
  9251. case ROLL_UP_4_ROWS:
  9252. this.topRow_ = BOTTOM_ROW - 3;
  9253. this.mode_ = 'rollUp';
  9254. break;
  9255. case CARRIAGE_RETURN:
  9256. this.flushDisplayed(packet.pts);
  9257. this.shiftRowsUp_();
  9258. this.startPts_ = packet.pts;
  9259. break;
  9260. case BACKSPACE:
  9261. if (this.mode_ === 'popOn') {
  9262. this.nonDisplayed_[BOTTOM_ROW] = this.nonDisplayed_[BOTTOM_ROW].slice(0, -1);
  9263. } else {
  9264. this.displayed_[BOTTOM_ROW] = this.displayed_[BOTTOM_ROW].slice(0, -1);
  9265. }
  9266. break;
  9267. case ERASE_DISPLAYED_MEMORY:
  9268. this.flushDisplayed(packet.pts);
  9269. this.displayed_ = createDisplayBuffer();
  9270. break;
  9271. case ERASE_NON_DISPLAYED_MEMORY:
  9272. this.nonDisplayed_ = createDisplayBuffer();
  9273. break;
  9274. default:
  9275. char0 = data >>> 8;
  9276. char1 = data & 0xff;
  9277. // Look for a Channel 1 Preamble Address Code
  9278. if (char0 >= 0x10 && char0 <= 0x17 &&
  9279. char1 >= 0x40 && char1 <= 0x7F &&
  9280. (char0 !== 0x10 || char1 < 0x60)) {
  9281. // Follow Safari's lead and replace the PAC with a space
  9282. char0 = 0x20;
  9283. // we only want one space so make the second character null
  9284. // which will get become '' in getCharFromCode
  9285. char1 = null;
  9286. }
  9287. // Look for special character sets
  9288. if ((char0 === 0x11 || char0 === 0x19) &&
  9289. (char1 >= 0x30 && char1 <= 0x3F)) {
  9290. // Put in eigth note and space
  9291. char0 = 0x266A;
  9292. char1 = '';
  9293. }
  9294. // ignore unsupported control codes
  9295. if ((char0 & 0xf0) === 0x10) {
  9296. return;
  9297. }
  9298. // remove null chars
  9299. if (char0 === 0x00) {
  9300. char0 = null;
  9301. }
  9302. if (char1 === 0x00) {
  9303. char1 = null;
  9304. }
  9305. // character handling is dependent on the current mode
  9306. this[this.mode_](packet.pts, char0, char1);
  9307. break;
  9308. }
  9309. };
  9310. };
  9311. Cea608Stream.prototype = new Stream();
  9312. // Trigger a cue point that captures the current state of the
  9313. // display buffer
  9314. Cea608Stream.prototype.flushDisplayed = function(pts) {
  9315. var content = this.displayed_
  9316. // remove spaces from the start and end of the string
  9317. .map(function(row) {
  9318. return row.trim();
  9319. })
  9320. // remove empty rows
  9321. .filter(function(row) {
  9322. return row.length;
  9323. })
  9324. // combine all text rows to display in one cue
  9325. .join('\n');
  9326. if (content.length) {
  9327. this.trigger('data', {
  9328. startPts: this.startPts_,
  9329. endPts: pts,
  9330. text: content
  9331. });
  9332. }
  9333. };
  9334. // Mode Implementations
  9335. Cea608Stream.prototype.popOn = function(pts, char0, char1) {
  9336. var baseRow = this.nonDisplayed_[BOTTOM_ROW];
  9337. // buffer characters
  9338. baseRow += getCharFromCode(char0);
  9339. baseRow += getCharFromCode(char1);
  9340. this.nonDisplayed_[BOTTOM_ROW] = baseRow;
  9341. };
  9342. Cea608Stream.prototype.rollUp = function(pts, char0, char1) {
  9343. var baseRow = this.displayed_[BOTTOM_ROW];
  9344. if (baseRow === '') {
  9345. // we're starting to buffer new display input, so flush out the
  9346. // current display
  9347. this.flushDisplayed(pts);
  9348. this.startPts_ = pts;
  9349. }
  9350. baseRow += getCharFromCode(char0);
  9351. baseRow += getCharFromCode(char1);
  9352. this.displayed_[BOTTOM_ROW] = baseRow;
  9353. };
  9354. Cea608Stream.prototype.shiftRowsUp_ = function() {
  9355. var i;
  9356. // clear out inactive rows
  9357. for (i = 0; i < this.topRow_; i++) {
  9358. this.displayed_[i] = '';
  9359. }
  9360. // shift displayed rows up
  9361. for (i = this.topRow_; i < BOTTOM_ROW; i++) {
  9362. this.displayed_[i] = this.displayed_[i + 1];
  9363. }
  9364. // clear out the bottom row
  9365. this.displayed_[BOTTOM_ROW] = '';
  9366. };
  9367. // exports
  9368. module.exports = {
  9369. CaptionStream: CaptionStream,
  9370. Cea608Stream: Cea608Stream
  9371. };
  9372. },{"../utils/stream":60}],48:[function(require,module,exports){
  9373. /**
  9374. * mux.js
  9375. *
  9376. * Copyright (c) 2015 Brightcove
  9377. * All rights reserved.
  9378. *
  9379. * A stream-based mp2t to mp4 converter. This utility can be used to
  9380. * deliver mp4s to a SourceBuffer on platforms that support native
  9381. * Media Source Extensions.
  9382. */
  9383. 'use strict';
  9384. var Stream = require('../utils/stream.js'),
  9385. CaptionStream = require('./caption-stream'),
  9386. StreamTypes = require('./stream-types'),
  9387. TimestampRolloverStream = require('./timestamp-rollover-stream').TimestampRolloverStream;
  9388. var m2tsStreamTypes = require('./stream-types.js');
  9389. // object types
  9390. var TransportPacketStream, TransportParseStream, ElementaryStream;
  9391. // constants
  9392. var
  9393. MP2T_PACKET_LENGTH = 188, // bytes
  9394. SYNC_BYTE = 0x47;
  9395. /**
  9396. * Splits an incoming stream of binary data into MPEG-2 Transport
  9397. * Stream packets.
  9398. */
  9399. TransportPacketStream = function() {
  9400. var
  9401. buffer = new Uint8Array(MP2T_PACKET_LENGTH),
  9402. bytesInBuffer = 0;
  9403. TransportPacketStream.prototype.init.call(this);
  9404. // Deliver new bytes to the stream.
  9405. this.push = function(bytes) {
  9406. var
  9407. startIndex = 0,
  9408. endIndex = MP2T_PACKET_LENGTH,
  9409. everything;
  9410. // If there are bytes remaining from the last segment, prepend them to the
  9411. // bytes that were pushed in
  9412. if (bytesInBuffer) {
  9413. everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
  9414. everything.set(buffer.subarray(0, bytesInBuffer));
  9415. everything.set(bytes, bytesInBuffer);
  9416. bytesInBuffer = 0;
  9417. } else {
  9418. everything = bytes;
  9419. }
  9420. // While we have enough data for a packet
  9421. while (endIndex < everything.byteLength) {
  9422. // Look for a pair of start and end sync bytes in the data..
  9423. if (everything[startIndex] === SYNC_BYTE && everything[endIndex] === SYNC_BYTE) {
  9424. // We found a packet so emit it and jump one whole packet forward in
  9425. // the stream
  9426. this.trigger('data', everything.subarray(startIndex, endIndex));
  9427. startIndex += MP2T_PACKET_LENGTH;
  9428. endIndex += MP2T_PACKET_LENGTH;
  9429. continue;
  9430. }
  9431. // If we get here, we have somehow become de-synchronized and we need to step
  9432. // forward one byte at a time until we find a pair of sync bytes that denote
  9433. // a packet
  9434. startIndex++;
  9435. endIndex++;
  9436. }
  9437. // If there was some data left over at the end of the segment that couldn't
  9438. // possibly be a whole packet, keep it because it might be the start of a packet
  9439. // that continues in the next segment
  9440. if (startIndex < everything.byteLength) {
  9441. buffer.set(everything.subarray(startIndex), 0);
  9442. bytesInBuffer = everything.byteLength - startIndex;
  9443. }
  9444. };
  9445. this.flush = function() {
  9446. // If the buffer contains a whole packet when we are being flushed, emit it
  9447. // and empty the buffer. Otherwise hold onto the data because it may be
  9448. // important for decoding the next segment
  9449. if (bytesInBuffer === MP2T_PACKET_LENGTH && buffer[0] === SYNC_BYTE) {
  9450. this.trigger('data', buffer);
  9451. bytesInBuffer = 0;
  9452. }
  9453. this.trigger('done');
  9454. };
  9455. };
  9456. TransportPacketStream.prototype = new Stream();
  9457. /**
  9458. * Accepts an MP2T TransportPacketStream and emits data events with parsed
  9459. * forms of the individual transport stream packets.
  9460. */
  9461. TransportParseStream = function() {
  9462. var parsePsi, parsePat, parsePmt, self;
  9463. TransportParseStream.prototype.init.call(this);
  9464. self = this;
  9465. this.packetsWaitingForPmt = [];
  9466. this.programMapTable = undefined;
  9467. parsePsi = function(payload, psi) {
  9468. var offset = 0;
  9469. // PSI packets may be split into multiple sections and those
  9470. // sections may be split into multiple packets. If a PSI
  9471. // section starts in this packet, the payload_unit_start_indicator
  9472. // will be true and the first byte of the payload will indicate
  9473. // the offset from the current position to the start of the
  9474. // section.
  9475. if (psi.payloadUnitStartIndicator) {
  9476. offset += payload[offset] + 1;
  9477. }
  9478. if (psi.type === 'pat') {
  9479. parsePat(payload.subarray(offset), psi);
  9480. } else {
  9481. parsePmt(payload.subarray(offset), psi);
  9482. }
  9483. };
  9484. parsePat = function(payload, pat) {
  9485. pat.section_number = payload[7]; // eslint-disable-line camelcase
  9486. pat.last_section_number = payload[8]; // eslint-disable-line camelcase
  9487. // skip the PSI header and parse the first PMT entry
  9488. self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
  9489. pat.pmtPid = self.pmtPid;
  9490. };
  9491. /**
  9492. * Parse out the relevant fields of a Program Map Table (PMT).
  9493. * @param payload {Uint8Array} the PMT-specific portion of an MP2T
  9494. * packet. The first byte in this array should be the table_id
  9495. * field.
  9496. * @param pmt {object} the object that should be decorated with
  9497. * fields parsed from the PMT.
  9498. */
  9499. parsePmt = function(payload, pmt) {
  9500. var sectionLength, tableEnd, programInfoLength, offset;
  9501. // PMTs can be sent ahead of the time when they should actually
  9502. // take effect. We don't believe this should ever be the case
  9503. // for HLS but we'll ignore "forward" PMT declarations if we see
  9504. // them. Future PMT declarations have the current_next_indicator
  9505. // set to zero.
  9506. if (!(payload[5] & 0x01)) {
  9507. return;
  9508. }
  9509. // overwrite any existing program map table
  9510. self.programMapTable = {};
  9511. // the mapping table ends at the end of the current section
  9512. sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
  9513. tableEnd = 3 + sectionLength - 4;
  9514. // to determine where the table is, we have to figure out how
  9515. // long the program info descriptors are
  9516. programInfoLength = (payload[10] & 0x0f) << 8 | payload[11];
  9517. // advance the offset to the first entry in the mapping table
  9518. offset = 12 + programInfoLength;
  9519. while (offset < tableEnd) {
  9520. // add an entry that maps the elementary_pid to the stream_type
  9521. self.programMapTable[(payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]] = payload[offset];
  9522. // move to the next table entry
  9523. // skip past the elementary stream descriptors, if present
  9524. offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
  9525. }
  9526. // record the map on the packet as well
  9527. pmt.programMapTable = self.programMapTable;
  9528. // if there are any packets waiting for a PMT to be found, process them now
  9529. while (self.packetsWaitingForPmt.length) {
  9530. self.processPes_.apply(self, self.packetsWaitingForPmt.shift());
  9531. }
  9532. };
  9533. /**
  9534. * Deliver a new MP2T packet to the stream.
  9535. */
  9536. this.push = function(packet) {
  9537. var
  9538. result = {},
  9539. offset = 4;
  9540. result.payloadUnitStartIndicator = !!(packet[1] & 0x40);
  9541. // pid is a 13-bit field starting at the last bit of packet[1]
  9542. result.pid = packet[1] & 0x1f;
  9543. result.pid <<= 8;
  9544. result.pid |= packet[2];
  9545. // if an adaption field is present, its length is specified by the
  9546. // fifth byte of the TS packet header. The adaptation field is
  9547. // used to add stuffing to PES packets that don't fill a complete
  9548. // TS packet, and to specify some forms of timing and control data
  9549. // that we do not currently use.
  9550. if (((packet[3] & 0x30) >>> 4) > 0x01) {
  9551. offset += packet[offset] + 1;
  9552. }
  9553. // parse the rest of the packet based on the type
  9554. if (result.pid === 0) {
  9555. result.type = 'pat';
  9556. parsePsi(packet.subarray(offset), result);
  9557. this.trigger('data', result);
  9558. } else if (result.pid === this.pmtPid) {
  9559. result.type = 'pmt';
  9560. parsePsi(packet.subarray(offset), result);
  9561. this.trigger('data', result);
  9562. } else if (this.programMapTable === undefined) {
  9563. // When we have not seen a PMT yet, defer further processing of
  9564. // PES packets until one has been parsed
  9565. this.packetsWaitingForPmt.push([packet, offset, result]);
  9566. } else {
  9567. this.processPes_(packet, offset, result);
  9568. }
  9569. };
  9570. this.processPes_ = function(packet, offset, result) {
  9571. result.streamType = this.programMapTable[result.pid];
  9572. result.type = 'pes';
  9573. result.data = packet.subarray(offset);
  9574. this.trigger('data', result);
  9575. };
  9576. };
  9577. TransportParseStream.prototype = new Stream();
  9578. TransportParseStream.STREAM_TYPES = {
  9579. h264: 0x1b,
  9580. adts: 0x0f
  9581. };
  9582. /**
  9583. * Reconsistutes program elementary stream (PES) packets from parsed
  9584. * transport stream packets. That is, if you pipe an
  9585. * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
  9586. * events will be events which capture the bytes for individual PES
  9587. * packets plus relevant metadata that has been extracted from the
  9588. * container.
  9589. */
  9590. ElementaryStream = function() {
  9591. var
  9592. self = this,
  9593. // PES packet fragments
  9594. video = {
  9595. data: [],
  9596. size: 0
  9597. },
  9598. audio = {
  9599. data: [],
  9600. size: 0
  9601. },
  9602. timedMetadata = {
  9603. data: [],
  9604. size: 0
  9605. },
  9606. parsePes = function(payload, pes) {
  9607. var ptsDtsFlags;
  9608. // find out if this packets starts a new keyframe
  9609. pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0;
  9610. // PES packets may be annotated with a PTS value, or a PTS value
  9611. // and a DTS value. Determine what combination of values is
  9612. // available to work with.
  9613. ptsDtsFlags = payload[7];
  9614. // PTS and DTS are normally stored as a 33-bit number. Javascript
  9615. // performs all bitwise operations on 32-bit integers but javascript
  9616. // supports a much greater range (52-bits) of integer using standard
  9617. // mathematical operations.
  9618. // We construct a 31-bit value using bitwise operators over the 31
  9619. // most significant bits and then multiply by 4 (equal to a left-shift
  9620. // of 2) before we add the final 2 least significant bits of the
  9621. // timestamp (equal to an OR.)
  9622. if (ptsDtsFlags & 0xC0) {
  9623. // the PTS and DTS are not written out directly. For information
  9624. // on how they are encoded, see
  9625. // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
  9626. pes.pts = (payload[9] & 0x0E) << 27 |
  9627. (payload[10] & 0xFF) << 20 |
  9628. (payload[11] & 0xFE) << 12 |
  9629. (payload[12] & 0xFF) << 5 |
  9630. (payload[13] & 0xFE) >>> 3;
  9631. pes.pts *= 4; // Left shift by 2
  9632. pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
  9633. pes.dts = pes.pts;
  9634. if (ptsDtsFlags & 0x40) {
  9635. pes.dts = (payload[14] & 0x0E) << 27 |
  9636. (payload[15] & 0xFF) << 20 |
  9637. (payload[16] & 0xFE) << 12 |
  9638. (payload[17] & 0xFF) << 5 |
  9639. (payload[18] & 0xFE) >>> 3;
  9640. pes.dts *= 4; // Left shift by 2
  9641. pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
  9642. }
  9643. }
  9644. // the data section starts immediately after the PES header.
  9645. // pes_header_data_length specifies the number of header bytes
  9646. // that follow the last byte of the field.
  9647. pes.data = payload.subarray(9 + payload[8]);
  9648. },
  9649. flushStream = function(stream, type) {
  9650. var
  9651. packetData = new Uint8Array(stream.size),
  9652. event = {
  9653. type: type
  9654. },
  9655. i = 0,
  9656. fragment;
  9657. // do nothing if there is no buffered data
  9658. if (!stream.data.length) {
  9659. return;
  9660. }
  9661. event.trackId = stream.data[0].pid;
  9662. // reassemble the packet
  9663. while (stream.data.length) {
  9664. fragment = stream.data.shift();
  9665. packetData.set(fragment.data, i);
  9666. i += fragment.data.byteLength;
  9667. }
  9668. // parse assembled packet's PES header
  9669. parsePes(packetData, event);
  9670. stream.size = 0;
  9671. self.trigger('data', event);
  9672. };
  9673. ElementaryStream.prototype.init.call(this);
  9674. this.push = function(data) {
  9675. ({
  9676. pat: function() {
  9677. // we have to wait for the PMT to arrive as well before we
  9678. // have any meaningful metadata
  9679. },
  9680. pes: function() {
  9681. var stream, streamType;
  9682. switch (data.streamType) {
  9683. case StreamTypes.H264_STREAM_TYPE:
  9684. case m2tsStreamTypes.H264_STREAM_TYPE:
  9685. stream = video;
  9686. streamType = 'video';
  9687. break;
  9688. case StreamTypes.ADTS_STREAM_TYPE:
  9689. stream = audio;
  9690. streamType = 'audio';
  9691. break;
  9692. case StreamTypes.METADATA_STREAM_TYPE:
  9693. stream = timedMetadata;
  9694. streamType = 'timed-metadata';
  9695. break;
  9696. default:
  9697. // ignore unknown stream types
  9698. return;
  9699. }
  9700. // if a new packet is starting, we can flush the completed
  9701. // packet
  9702. if (data.payloadUnitStartIndicator) {
  9703. flushStream(stream, streamType);
  9704. }
  9705. // buffer this fragment until we are sure we've received the
  9706. // complete payload
  9707. stream.data.push(data);
  9708. stream.size += data.data.byteLength;
  9709. },
  9710. pmt: function() {
  9711. var
  9712. event = {
  9713. type: 'metadata',
  9714. tracks: []
  9715. },
  9716. programMapTable = data.programMapTable,
  9717. k,
  9718. track;
  9719. // translate streams to tracks
  9720. for (k in programMapTable) {
  9721. if (programMapTable.hasOwnProperty(k)) {
  9722. track = {
  9723. timelineStartInfo: {
  9724. baseMediaDecodeTime: 0
  9725. }
  9726. };
  9727. track.id = +k;
  9728. if (programMapTable[k] === m2tsStreamTypes.H264_STREAM_TYPE) {
  9729. track.codec = 'avc';
  9730. track.type = 'video';
  9731. } else if (programMapTable[k] === m2tsStreamTypes.ADTS_STREAM_TYPE) {
  9732. track.codec = 'adts';
  9733. track.type = 'audio';
  9734. }
  9735. event.tracks.push(track);
  9736. }
  9737. }
  9738. self.trigger('data', event);
  9739. }
  9740. })[data.type]();
  9741. };
  9742. /**
  9743. * Flush any remaining input. Video PES packets may be of variable
  9744. * length. Normally, the start of a new video packet can trigger the
  9745. * finalization of the previous packet. That is not possible if no
  9746. * more video is forthcoming, however. In that case, some other
  9747. * mechanism (like the end of the file) has to be employed. When it is
  9748. * clear that no additional data is forthcoming, calling this method
  9749. * will flush the buffered packets.
  9750. */
  9751. this.flush = function() {
  9752. // !!THIS ORDER IS IMPORTANT!!
  9753. // video first then audio
  9754. flushStream(video, 'video');
  9755. flushStream(audio, 'audio');
  9756. flushStream(timedMetadata, 'timed-metadata');
  9757. this.trigger('done');
  9758. };
  9759. };
  9760. ElementaryStream.prototype = new Stream();
  9761. var m2ts = {
  9762. PAT_PID: 0x0000,
  9763. MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH,
  9764. TransportPacketStream: TransportPacketStream,
  9765. TransportParseStream: TransportParseStream,
  9766. ElementaryStream: ElementaryStream,
  9767. TimestampRolloverStream: TimestampRolloverStream,
  9768. CaptionStream: CaptionStream.CaptionStream,
  9769. Cea608Stream: CaptionStream.Cea608Stream,
  9770. MetadataStream: require('./metadata-stream')
  9771. };
  9772. for (var type in StreamTypes) {
  9773. if (StreamTypes.hasOwnProperty(type)) {
  9774. m2ts[type] = StreamTypes[type];
  9775. }
  9776. }
  9777. module.exports = m2ts;
  9778. },{"../utils/stream.js":60,"./caption-stream":47,"./metadata-stream":49,"./stream-types":51,"./stream-types.js":51,"./timestamp-rollover-stream":52}],49:[function(require,module,exports){
  9779. /**
  9780. * Accepts program elementary stream (PES) data events and parses out
  9781. * ID3 metadata from them, if present.
  9782. * @see http://id3.org/id3v2.3.0
  9783. */
  9784. 'use strict';
  9785. var
  9786. Stream = require('../utils/stream'),
  9787. StreamTypes = require('./stream-types'),
  9788. // return a percent-encoded representation of the specified byte range
  9789. // @see http://en.wikipedia.org/wiki/Percent-encoding
  9790. percentEncode = function(bytes, start, end) {
  9791. var i, result = '';
  9792. for (i = start; i < end; i++) {
  9793. result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
  9794. }
  9795. return result;
  9796. },
  9797. // return the string representation of the specified byte range,
  9798. // interpreted as UTf-8.
  9799. parseUtf8 = function(bytes, start, end) {
  9800. return decodeURIComponent(percentEncode(bytes, start, end));
  9801. },
  9802. // return the string representation of the specified byte range,
  9803. // interpreted as ISO-8859-1.
  9804. parseIso88591 = function(bytes, start, end) {
  9805. return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
  9806. },
  9807. parseSyncSafeInteger = function(data) {
  9808. return (data[0] << 21) |
  9809. (data[1] << 14) |
  9810. (data[2] << 7) |
  9811. (data[3]);
  9812. },
  9813. tagParsers = {
  9814. TXXX: function(tag) {
  9815. var i;
  9816. if (tag.data[0] !== 3) {
  9817. // ignore frames with unrecognized character encodings
  9818. return;
  9819. }
  9820. for (i = 1; i < tag.data.length; i++) {
  9821. if (tag.data[i] === 0) {
  9822. // parse the text fields
  9823. tag.description = parseUtf8(tag.data, 1, i);
  9824. // do not include the null terminator in the tag value
  9825. tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
  9826. break;
  9827. }
  9828. }
  9829. tag.data = tag.value;
  9830. },
  9831. WXXX: function(tag) {
  9832. var i;
  9833. if (tag.data[0] !== 3) {
  9834. // ignore frames with unrecognized character encodings
  9835. return;
  9836. }
  9837. for (i = 1; i < tag.data.length; i++) {
  9838. if (tag.data[i] === 0) {
  9839. // parse the description and URL fields
  9840. tag.description = parseUtf8(tag.data, 1, i);
  9841. tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
  9842. break;
  9843. }
  9844. }
  9845. },
  9846. PRIV: function(tag) {
  9847. var i;
  9848. for (i = 0; i < tag.data.length; i++) {
  9849. if (tag.data[i] === 0) {
  9850. // parse the description and URL fields
  9851. tag.owner = parseIso88591(tag.data, 0, i);
  9852. break;
  9853. }
  9854. }
  9855. tag.privateData = tag.data.subarray(i + 1);
  9856. tag.data = tag.privateData;
  9857. }
  9858. },
  9859. MetadataStream;
  9860. MetadataStream = function(options) {
  9861. var
  9862. settings = {
  9863. debug: !!(options && options.debug),
  9864. // the bytes of the program-level descriptor field in MP2T
  9865. // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
  9866. // program element descriptors"
  9867. descriptor: options && options.descriptor
  9868. },
  9869. // the total size in bytes of the ID3 tag being parsed
  9870. tagSize = 0,
  9871. // tag data that is not complete enough to be parsed
  9872. buffer = [],
  9873. // the total number of bytes currently in the buffer
  9874. bufferSize = 0,
  9875. i;
  9876. MetadataStream.prototype.init.call(this);
  9877. // calculate the text track in-band metadata track dispatch type
  9878. // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
  9879. this.dispatchType = StreamTypes.METADATA_STREAM_TYPE.toString(16);
  9880. if (settings.descriptor) {
  9881. for (i = 0; i < settings.descriptor.length; i++) {
  9882. this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
  9883. }
  9884. }
  9885. this.push = function(chunk) {
  9886. var tag, frameStart, frameSize, frame, i, frameHeader;
  9887. if (chunk.type !== 'timed-metadata') {
  9888. return;
  9889. }
  9890. // if data_alignment_indicator is set in the PES header,
  9891. // we must have the start of a new ID3 tag. Assume anything
  9892. // remaining in the buffer was malformed and throw it out
  9893. if (chunk.dataAlignmentIndicator) {
  9894. bufferSize = 0;
  9895. buffer.length = 0;
  9896. }
  9897. // ignore events that don't look like ID3 data
  9898. if (buffer.length === 0 &&
  9899. (chunk.data.length < 10 ||
  9900. chunk.data[0] !== 'I'.charCodeAt(0) ||
  9901. chunk.data[1] !== 'D'.charCodeAt(0) ||
  9902. chunk.data[2] !== '3'.charCodeAt(0))) {
  9903. if (settings.debug) {
  9904. // eslint-disable-next-line no-console
  9905. console.log('Skipping unrecognized metadata packet');
  9906. }
  9907. return;
  9908. }
  9909. // add this chunk to the data we've collected so far
  9910. buffer.push(chunk);
  9911. bufferSize += chunk.data.byteLength;
  9912. // grab the size of the entire frame from the ID3 header
  9913. if (buffer.length === 1) {
  9914. // the frame size is transmitted as a 28-bit integer in the
  9915. // last four bytes of the ID3 header.
  9916. // The most significant bit of each byte is dropped and the
  9917. // results concatenated to recover the actual value.
  9918. tagSize = parseSyncSafeInteger(chunk.data.subarray(6, 10));
  9919. // ID3 reports the tag size excluding the header but it's more
  9920. // convenient for our comparisons to include it
  9921. tagSize += 10;
  9922. }
  9923. // if the entire frame has not arrived, wait for more data
  9924. if (bufferSize < tagSize) {
  9925. return;
  9926. }
  9927. // collect the entire frame so it can be parsed
  9928. tag = {
  9929. data: new Uint8Array(tagSize),
  9930. frames: [],
  9931. pts: buffer[0].pts,
  9932. dts: buffer[0].dts
  9933. };
  9934. for (i = 0; i < tagSize;) {
  9935. tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
  9936. i += buffer[0].data.byteLength;
  9937. bufferSize -= buffer[0].data.byteLength;
  9938. buffer.shift();
  9939. }
  9940. // find the start of the first frame and the end of the tag
  9941. frameStart = 10;
  9942. if (tag.data[5] & 0x40) {
  9943. // advance the frame start past the extended header
  9944. frameStart += 4; // header size field
  9945. frameStart += parseSyncSafeInteger(tag.data.subarray(10, 14));
  9946. // clip any padding off the end
  9947. tagSize -= parseSyncSafeInteger(tag.data.subarray(16, 20));
  9948. }
  9949. // parse one or more ID3 frames
  9950. // http://id3.org/id3v2.3.0#ID3v2_frame_overview
  9951. do {
  9952. // determine the number of bytes in this frame
  9953. frameSize = parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
  9954. if (frameSize < 1) {
  9955. // eslint-disable-next-line no-console
  9956. return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
  9957. }
  9958. frameHeader = String.fromCharCode(tag.data[frameStart],
  9959. tag.data[frameStart + 1],
  9960. tag.data[frameStart + 2],
  9961. tag.data[frameStart + 3]);
  9962. frame = {
  9963. id: frameHeader,
  9964. data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
  9965. };
  9966. frame.key = frame.id;
  9967. if (tagParsers[frame.id]) {
  9968. tagParsers[frame.id](frame);
  9969. // handle the special PRIV frame used to indicate the start
  9970. // time for raw AAC data
  9971. if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
  9972. var
  9973. d = frame.data,
  9974. size = ((d[3] & 0x01) << 30) |
  9975. (d[4] << 22) |
  9976. (d[5] << 14) |
  9977. (d[6] << 6) |
  9978. (d[7] >>> 2);
  9979. size *= 4;
  9980. size += d[7] & 0x03;
  9981. frame.timeStamp = size;
  9982. // in raw AAC, all subsequent data will be timestamped based
  9983. // on the value of this frame
  9984. // we couldn't have known the appropriate pts and dts before
  9985. // parsing this ID3 tag so set those values now
  9986. if (tag.pts === undefined && tag.dts === undefined) {
  9987. tag.pts = frame.timeStamp;
  9988. tag.dts = frame.timeStamp;
  9989. }
  9990. this.trigger('timestamp', frame);
  9991. }
  9992. }
  9993. tag.frames.push(frame);
  9994. frameStart += 10; // advance past the frame header
  9995. frameStart += frameSize; // advance past the frame body
  9996. } while (frameStart < tagSize);
  9997. this.trigger('data', tag);
  9998. };
  9999. };
  10000. MetadataStream.prototype = new Stream();
  10001. module.exports = MetadataStream;
  10002. },{"../utils/stream":60,"./stream-types":51}],50:[function(require,module,exports){
  10003. /**
  10004. * mux.js
  10005. *
  10006. * Copyright (c) 2016 Brightcove
  10007. * All rights reserved.
  10008. *
  10009. * Utilities to detect basic properties and metadata about TS Segments.
  10010. */
  10011. 'use strict';
  10012. var StreamTypes = require('./stream-types.js');
  10013. var parsePid = function(packet) {
  10014. var pid = packet[1] & 0x1f;
  10015. pid <<= 8;
  10016. pid |= packet[2];
  10017. return pid;
  10018. };
  10019. var parsePayloadUnitStartIndicator = function(packet) {
  10020. return !!(packet[1] & 0x40);
  10021. };
  10022. var parseAdaptionField = function(packet) {
  10023. var offset = 0;
  10024. // if an adaption field is present, its length is specified by the
  10025. // fifth byte of the TS packet header. The adaptation field is
  10026. // used to add stuffing to PES packets that don't fill a complete
  10027. // TS packet, and to specify some forms of timing and control data
  10028. // that we do not currently use.
  10029. if (((packet[3] & 0x30) >>> 4) > 0x01) {
  10030. offset += packet[4] + 1;
  10031. }
  10032. return offset;
  10033. };
  10034. var parseType = function(packet, pmtPid) {
  10035. var pid = parsePid(packet);
  10036. if (pid === 0) {
  10037. return 'pat';
  10038. } else if (pid === pmtPid) {
  10039. return 'pmt';
  10040. } else if (pmtPid) {
  10041. return 'pes';
  10042. }
  10043. return null;
  10044. };
  10045. var parsePat = function(packet) {
  10046. var pusi = parsePayloadUnitStartIndicator(packet);
  10047. var offset = 4 + parseAdaptionField(packet);
  10048. if (pusi) {
  10049. offset += packet[offset] + 1;
  10050. }
  10051. return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
  10052. };
  10053. var parsePmt = function(packet) {
  10054. var programMapTable = {};
  10055. var pusi = parsePayloadUnitStartIndicator(packet);
  10056. var payloadOffset = 4 + parseAdaptionField(packet);
  10057. if (pusi) {
  10058. payloadOffset += packet[payloadOffset] + 1;
  10059. }
  10060. // PMTs can be sent ahead of the time when they should actually
  10061. // take effect. We don't believe this should ever be the case
  10062. // for HLS but we'll ignore "forward" PMT declarations if we see
  10063. // them. Future PMT declarations have the current_next_indicator
  10064. // set to zero.
  10065. if (!(packet[payloadOffset + 5] & 0x01)) {
  10066. return;
  10067. }
  10068. var sectionLength, tableEnd, programInfoLength;
  10069. // the mapping table ends at the end of the current section
  10070. sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
  10071. tableEnd = 3 + sectionLength - 4;
  10072. // to determine where the table is, we have to figure out how
  10073. // long the program info descriptors are
  10074. programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11];
  10075. // advance the offset to the first entry in the mapping table
  10076. var offset = 12 + programInfoLength;
  10077. while (offset < tableEnd) {
  10078. var i = payloadOffset + offset;
  10079. // add an entry that maps the elementary_pid to the stream_type
  10080. programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i];
  10081. // move to the next table entry
  10082. // skip past the elementary stream descriptors, if present
  10083. offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
  10084. }
  10085. return programMapTable;
  10086. };
  10087. var parsePesType = function(packet, programMapTable) {
  10088. var pid = parsePid(packet);
  10089. var type = programMapTable[pid];
  10090. switch (type) {
  10091. case StreamTypes.H264_STREAM_TYPE:
  10092. return 'video';
  10093. case StreamTypes.ADTS_STREAM_TYPE:
  10094. return 'audio';
  10095. case StreamTypes.METADATA_STREAM_TYPE:
  10096. return 'timed-metadata';
  10097. default:
  10098. return null;
  10099. }
  10100. };
  10101. var parsePesTime = function(packet) {
  10102. var pusi = parsePayloadUnitStartIndicator(packet);
  10103. if (!pusi) {
  10104. return null;
  10105. }
  10106. var offset = 4 + parseAdaptionField(packet);
  10107. var pes = {};
  10108. var ptsDtsFlags;
  10109. // PES packets may be annotated with a PTS value, or a PTS value
  10110. // and a DTS value. Determine what combination of values is
  10111. // available to work with.
  10112. ptsDtsFlags = packet[offset + 7];
  10113. // PTS and DTS are normally stored as a 33-bit number. Javascript
  10114. // performs all bitwise operations on 32-bit integers but javascript
  10115. // supports a much greater range (52-bits) of integer using standard
  10116. // mathematical operations.
  10117. // We construct a 31-bit value using bitwise operators over the 31
  10118. // most significant bits and then multiply by 4 (equal to a left-shift
  10119. // of 2) before we add the final 2 least significant bits of the
  10120. // timestamp (equal to an OR.)
  10121. if (ptsDtsFlags & 0xC0) {
  10122. // the PTS and DTS are not written out directly. For information
  10123. // on how they are encoded, see
  10124. // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
  10125. pes.pts = (packet[offset + 9] & 0x0E) << 27 |
  10126. (packet[offset + 10] & 0xFF) << 20 |
  10127. (packet[offset + 11] & 0xFE) << 12 |
  10128. (packet[offset + 12] & 0xFF) << 5 |
  10129. (packet[offset + 13] & 0xFE) >>> 3;
  10130. pes.pts *= 4; // Left shift by 2
  10131. pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
  10132. pes.dts = pes.pts;
  10133. if (ptsDtsFlags & 0x40) {
  10134. pes.dts = (packet[offset + 14] & 0x0E) << 27 |
  10135. (packet[offset + 15] & 0xFF) << 20 |
  10136. (packet[offset + 16] & 0xFE) << 12 |
  10137. (packet[offset + 17] & 0xFF) << 5 |
  10138. (packet[offset + 18] & 0xFE) >>> 3;
  10139. pes.dts *= 4; // Left shift by 2
  10140. pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
  10141. }
  10142. }
  10143. return pes;
  10144. };
  10145. var parseNalUnitType = function(type) {
  10146. switch (type) {
  10147. case 0x05:
  10148. return 'slice_layer_without_partitioning_rbsp_idr';
  10149. case 0x06:
  10150. return 'sei_rbsp';
  10151. case 0x07:
  10152. return 'seq_parameter_set_rbsp';
  10153. case 0x08:
  10154. return 'pic_parameter_set_rbsp';
  10155. case 0x09:
  10156. return 'access_unit_delimiter_rbsp';
  10157. default:
  10158. return null;
  10159. }
  10160. };
  10161. var videoPacketContainsKeyFrame = function(packet) {
  10162. var offset = 4 + parseAdaptionField(packet);
  10163. var frameBuffer = packet.subarray(offset);
  10164. var frameI = 0;
  10165. var frameSyncPoint = 0;
  10166. var foundKeyFrame = false;
  10167. var nalType;
  10168. // advance the sync point to a NAL start, if necessary
  10169. for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
  10170. if (frameBuffer[frameSyncPoint + 2] === 1) {
  10171. // the sync point is properly aligned
  10172. frameI = frameSyncPoint + 5;
  10173. break;
  10174. }
  10175. }
  10176. while (frameI < frameBuffer.byteLength) {
  10177. // look at the current byte to determine if we've hit the end of
  10178. // a NAL unit boundary
  10179. switch (frameBuffer[frameI]) {
  10180. case 0:
  10181. // skip past non-sync sequences
  10182. if (frameBuffer[frameI - 1] !== 0) {
  10183. frameI += 2;
  10184. break;
  10185. } else if (frameBuffer[frameI - 2] !== 0) {
  10186. frameI++;
  10187. break;
  10188. }
  10189. if (frameSyncPoint + 3 !== frameI - 2) {
  10190. nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
  10191. if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
  10192. foundKeyFrame = true;
  10193. }
  10194. }
  10195. // drop trailing zeroes
  10196. do {
  10197. frameI++;
  10198. } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
  10199. frameSyncPoint = frameI - 2;
  10200. frameI += 3;
  10201. break;
  10202. case 1:
  10203. // skip past non-sync sequences
  10204. if (frameBuffer[frameI - 1] !== 0 ||
  10205. frameBuffer[frameI - 2] !== 0) {
  10206. frameI += 3;
  10207. break;
  10208. }
  10209. nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
  10210. if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
  10211. foundKeyFrame = true;
  10212. }
  10213. frameSyncPoint = frameI - 2;
  10214. frameI += 3;
  10215. break;
  10216. default:
  10217. // the current byte isn't a one or zero, so it cannot be part
  10218. // of a sync sequence
  10219. frameI += 3;
  10220. break;
  10221. }
  10222. }
  10223. frameBuffer = frameBuffer.subarray(frameSyncPoint);
  10224. frameI -= frameSyncPoint;
  10225. frameSyncPoint = 0;
  10226. // parse the final nal
  10227. if (frameBuffer && frameBuffer.byteLength > 3) {
  10228. nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
  10229. if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
  10230. foundKeyFrame = true;
  10231. }
  10232. }
  10233. return foundKeyFrame;
  10234. };
  10235. module.exports = {
  10236. parseType: parseType,
  10237. parsePat: parsePat,
  10238. parsePmt: parsePmt,
  10239. parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
  10240. parsePesType: parsePesType,
  10241. parsePesTime: parsePesTime,
  10242. videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
  10243. };
  10244. },{"./stream-types.js":51}],51:[function(require,module,exports){
  10245. 'use strict';
  10246. module.exports = {
  10247. H264_STREAM_TYPE: 0x1B,
  10248. ADTS_STREAM_TYPE: 0x0F,
  10249. METADATA_STREAM_TYPE: 0x15
  10250. };
  10251. },{}],52:[function(require,module,exports){
  10252. /**
  10253. * mux.js
  10254. *
  10255. * Copyright (c) 2016 Brightcove
  10256. * All rights reserved.
  10257. *
  10258. * Accepts program elementary stream (PES) data events and corrects
  10259. * decode and presentation time stamps to account for a rollover
  10260. * of the 33 bit value.
  10261. */
  10262. 'use strict';
  10263. var Stream = require('../utils/stream');
  10264. var MAX_TS = 8589934592;
  10265. var RO_THRESH = 4294967296;
  10266. var handleRollover = function(value, reference) {
  10267. var direction = 1;
  10268. if (value > reference) {
  10269. // If the current timestamp value is greater than our reference timestamp and we detect a
  10270. // timestamp rollover, this means the roll over is happening in the opposite direction.
  10271. // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
  10272. // point will be set to a small number, e.g. 1. The user then seeks backwards over the
  10273. // rollover point. In loading this segment, the timestamp values will be very large,
  10274. // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
  10275. // the time stamp to be `value - 2^33`.
  10276. direction = -1;
  10277. }
  10278. // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
  10279. // cause an incorrect adjustment.
  10280. while (Math.abs(reference - value) > RO_THRESH) {
  10281. value += (direction * MAX_TS);
  10282. }
  10283. return value;
  10284. };
  10285. var TimestampRolloverStream = function(type) {
  10286. var lastDTS, referenceDTS;
  10287. TimestampRolloverStream.prototype.init.call(this);
  10288. this.type_ = type;
  10289. this.push = function(data) {
  10290. if (data.type !== this.type_) {
  10291. return;
  10292. }
  10293. if (referenceDTS === undefined) {
  10294. referenceDTS = data.dts;
  10295. }
  10296. data.dts = handleRollover(data.dts, referenceDTS);
  10297. data.pts = handleRollover(data.pts, referenceDTS);
  10298. lastDTS = data.dts;
  10299. this.trigger('data', data);
  10300. };
  10301. this.flush = function() {
  10302. referenceDTS = lastDTS;
  10303. this.trigger('done');
  10304. };
  10305. this.discontinuity = function() {
  10306. referenceDTS = void 0;
  10307. lastDTS = void 0;
  10308. };
  10309. };
  10310. TimestampRolloverStream.prototype = new Stream();
  10311. module.exports = {
  10312. TimestampRolloverStream: TimestampRolloverStream,
  10313. handleRollover: handleRollover
  10314. };
  10315. },{"../utils/stream":60}],53:[function(require,module,exports){
  10316. module.exports = {
  10317. generator: require('./mp4-generator'),
  10318. Transmuxer: require('./transmuxer').Transmuxer,
  10319. AudioSegmentStream: require('./transmuxer').AudioSegmentStream,
  10320. VideoSegmentStream: require('./transmuxer').VideoSegmentStream
  10321. };
  10322. },{"./mp4-generator":54,"./transmuxer":56}],54:[function(require,module,exports){
  10323. /**
  10324. * mux.js
  10325. *
  10326. * Copyright (c) 2015 Brightcove
  10327. * All rights reserved.
  10328. *
  10329. * Functions that generate fragmented MP4s suitable for use with Media
  10330. * Source Extensions.
  10331. */
  10332. 'use strict';
  10333. var UINT32_MAX = Math.pow(2, 32) - 1;
  10334. var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd,
  10335. trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex,
  10336. trun, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR,
  10337. AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS;
  10338. // pre-calculate constants
  10339. (function() {
  10340. var i;
  10341. types = {
  10342. avc1: [], // codingname
  10343. avcC: [],
  10344. btrt: [],
  10345. dinf: [],
  10346. dref: [],
  10347. esds: [],
  10348. ftyp: [],
  10349. hdlr: [],
  10350. mdat: [],
  10351. mdhd: [],
  10352. mdia: [],
  10353. mfhd: [],
  10354. minf: [],
  10355. moof: [],
  10356. moov: [],
  10357. mp4a: [], // codingname
  10358. mvex: [],
  10359. mvhd: [],
  10360. sdtp: [],
  10361. smhd: [],
  10362. stbl: [],
  10363. stco: [],
  10364. stsc: [],
  10365. stsd: [],
  10366. stsz: [],
  10367. stts: [],
  10368. styp: [],
  10369. tfdt: [],
  10370. tfhd: [],
  10371. traf: [],
  10372. trak: [],
  10373. trun: [],
  10374. trex: [],
  10375. tkhd: [],
  10376. vmhd: []
  10377. };
  10378. // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
  10379. // don't throw an error
  10380. if (typeof Uint8Array === 'undefined') {
  10381. return;
  10382. }
  10383. for (i in types) {
  10384. if (types.hasOwnProperty(i)) {
  10385. types[i] = [
  10386. i.charCodeAt(0),
  10387. i.charCodeAt(1),
  10388. i.charCodeAt(2),
  10389. i.charCodeAt(3)
  10390. ];
  10391. }
  10392. }
  10393. MAJOR_BRAND = new Uint8Array([
  10394. 'i'.charCodeAt(0),
  10395. 's'.charCodeAt(0),
  10396. 'o'.charCodeAt(0),
  10397. 'm'.charCodeAt(0)
  10398. ]);
  10399. AVC1_BRAND = new Uint8Array([
  10400. 'a'.charCodeAt(0),
  10401. 'v'.charCodeAt(0),
  10402. 'c'.charCodeAt(0),
  10403. '1'.charCodeAt(0)
  10404. ]);
  10405. MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
  10406. VIDEO_HDLR = new Uint8Array([
  10407. 0x00, // version 0
  10408. 0x00, 0x00, 0x00, // flags
  10409. 0x00, 0x00, 0x00, 0x00, // pre_defined
  10410. 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
  10411. 0x00, 0x00, 0x00, 0x00, // reserved
  10412. 0x00, 0x00, 0x00, 0x00, // reserved
  10413. 0x00, 0x00, 0x00, 0x00, // reserved
  10414. 0x56, 0x69, 0x64, 0x65,
  10415. 0x6f, 0x48, 0x61, 0x6e,
  10416. 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
  10417. ]);
  10418. AUDIO_HDLR = new Uint8Array([
  10419. 0x00, // version 0
  10420. 0x00, 0x00, 0x00, // flags
  10421. 0x00, 0x00, 0x00, 0x00, // pre_defined
  10422. 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
  10423. 0x00, 0x00, 0x00, 0x00, // reserved
  10424. 0x00, 0x00, 0x00, 0x00, // reserved
  10425. 0x00, 0x00, 0x00, 0x00, // reserved
  10426. 0x53, 0x6f, 0x75, 0x6e,
  10427. 0x64, 0x48, 0x61, 0x6e,
  10428. 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
  10429. ]);
  10430. HDLR_TYPES = {
  10431. video: VIDEO_HDLR,
  10432. audio: AUDIO_HDLR
  10433. };
  10434. DREF = new Uint8Array([
  10435. 0x00, // version 0
  10436. 0x00, 0x00, 0x00, // flags
  10437. 0x00, 0x00, 0x00, 0x01, // entry_count
  10438. 0x00, 0x00, 0x00, 0x0c, // entry_size
  10439. 0x75, 0x72, 0x6c, 0x20, // 'url' type
  10440. 0x00, // version 0
  10441. 0x00, 0x00, 0x01 // entry_flags
  10442. ]);
  10443. SMHD = new Uint8Array([
  10444. 0x00, // version
  10445. 0x00, 0x00, 0x00, // flags
  10446. 0x00, 0x00, // balance, 0 means centered
  10447. 0x00, 0x00 // reserved
  10448. ]);
  10449. STCO = new Uint8Array([
  10450. 0x00, // version
  10451. 0x00, 0x00, 0x00, // flags
  10452. 0x00, 0x00, 0x00, 0x00 // entry_count
  10453. ]);
  10454. STSC = STCO;
  10455. STSZ = new Uint8Array([
  10456. 0x00, // version
  10457. 0x00, 0x00, 0x00, // flags
  10458. 0x00, 0x00, 0x00, 0x00, // sample_size
  10459. 0x00, 0x00, 0x00, 0x00 // sample_count
  10460. ]);
  10461. STTS = STCO;
  10462. VMHD = new Uint8Array([
  10463. 0x00, // version
  10464. 0x00, 0x00, 0x01, // flags
  10465. 0x00, 0x00, // graphicsmode
  10466. 0x00, 0x00,
  10467. 0x00, 0x00,
  10468. 0x00, 0x00 // opcolor
  10469. ]);
  10470. }());
  10471. box = function(type) {
  10472. var
  10473. payload = [],
  10474. size = 0,
  10475. i,
  10476. result,
  10477. view;
  10478. for (i = 1; i < arguments.length; i++) {
  10479. payload.push(arguments[i]);
  10480. }
  10481. i = payload.length;
  10482. // calculate the total size we need to allocate
  10483. while (i--) {
  10484. size += payload[i].byteLength;
  10485. }
  10486. result = new Uint8Array(size + 8);
  10487. view = new DataView(result.buffer, result.byteOffset, result.byteLength);
  10488. view.setUint32(0, result.byteLength);
  10489. result.set(type, 4);
  10490. // copy the payload into the result
  10491. for (i = 0, size = 8; i < payload.length; i++) {
  10492. result.set(payload[i], size);
  10493. size += payload[i].byteLength;
  10494. }
  10495. return result;
  10496. };
  10497. dinf = function() {
  10498. return box(types.dinf, box(types.dref, DREF));
  10499. };
  10500. esds = function(track) {
  10501. return box(types.esds, new Uint8Array([
  10502. 0x00, // version
  10503. 0x00, 0x00, 0x00, // flags
  10504. // ES_Descriptor
  10505. 0x03, // tag, ES_DescrTag
  10506. 0x19, // length
  10507. 0x00, 0x00, // ES_ID
  10508. 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
  10509. // DecoderConfigDescriptor
  10510. 0x04, // tag, DecoderConfigDescrTag
  10511. 0x11, // length
  10512. 0x40, // object type
  10513. 0x15, // streamType
  10514. 0x00, 0x06, 0x00, // bufferSizeDB
  10515. 0x00, 0x00, 0xda, 0xc0, // maxBitrate
  10516. 0x00, 0x00, 0xda, 0xc0, // avgBitrate
  10517. // DecoderSpecificInfo
  10518. 0x05, // tag, DecoderSpecificInfoTag
  10519. 0x02, // length
  10520. // ISO/IEC 14496-3, AudioSpecificConfig
  10521. // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
  10522. (track.audioobjecttype << 3) | (track.samplingfrequencyindex >>> 1),
  10523. (track.samplingfrequencyindex << 7) | (track.channelcount << 3),
  10524. 0x06, 0x01, 0x02 // GASpecificConfig
  10525. ]));
  10526. };
  10527. ftyp = function() {
  10528. return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
  10529. };
  10530. hdlr = function(type) {
  10531. return box(types.hdlr, HDLR_TYPES[type]);
  10532. };
  10533. mdat = function(data) {
  10534. return box(types.mdat, data);
  10535. };
  10536. mdhd = function(track) {
  10537. var result = new Uint8Array([
  10538. 0x00, // version 0
  10539. 0x00, 0x00, 0x00, // flags
  10540. 0x00, 0x00, 0x00, 0x02, // creation_time
  10541. 0x00, 0x00, 0x00, 0x03, // modification_time
  10542. 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
  10543. (track.duration >>> 24) & 0xFF,
  10544. (track.duration >>> 16) & 0xFF,
  10545. (track.duration >>> 8) & 0xFF,
  10546. track.duration & 0xFF, // duration
  10547. 0x55, 0xc4, // 'und' language (undetermined)
  10548. 0x00, 0x00
  10549. ]);
  10550. // Use the sample rate from the track metadata, when it is
  10551. // defined. The sample rate can be parsed out of an ADTS header, for
  10552. // instance.
  10553. if (track.samplerate) {
  10554. result[12] = (track.samplerate >>> 24) & 0xFF;
  10555. result[13] = (track.samplerate >>> 16) & 0xFF;
  10556. result[14] = (track.samplerate >>> 8) & 0xFF;
  10557. result[15] = (track.samplerate) & 0xFF;
  10558. }
  10559. return box(types.mdhd, result);
  10560. };
  10561. mdia = function(track) {
  10562. return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
  10563. };
  10564. mfhd = function(sequenceNumber) {
  10565. return box(types.mfhd, new Uint8Array([
  10566. 0x00,
  10567. 0x00, 0x00, 0x00, // flags
  10568. (sequenceNumber & 0xFF000000) >> 24,
  10569. (sequenceNumber & 0xFF0000) >> 16,
  10570. (sequenceNumber & 0xFF00) >> 8,
  10571. sequenceNumber & 0xFF // sequence_number
  10572. ]));
  10573. };
  10574. minf = function(track) {
  10575. return box(types.minf,
  10576. track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD),
  10577. dinf(),
  10578. stbl(track));
  10579. };
  10580. moof = function(sequenceNumber, tracks) {
  10581. var
  10582. trackFragments = [],
  10583. i = tracks.length;
  10584. // build traf boxes for each track fragment
  10585. while (i--) {
  10586. trackFragments[i] = traf(tracks[i]);
  10587. }
  10588. return box.apply(null, [
  10589. types.moof,
  10590. mfhd(sequenceNumber)
  10591. ].concat(trackFragments));
  10592. };
  10593. /**
  10594. * Returns a movie box.
  10595. * @param tracks {array} the tracks associated with this movie
  10596. * @see ISO/IEC 14496-12:2012(E), section 8.2.1
  10597. */
  10598. moov = function(tracks) {
  10599. var
  10600. i = tracks.length,
  10601. boxes = [];
  10602. while (i--) {
  10603. boxes[i] = trak(tracks[i]);
  10604. }
  10605. return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
  10606. };
  10607. mvex = function(tracks) {
  10608. var
  10609. i = tracks.length,
  10610. boxes = [];
  10611. while (i--) {
  10612. boxes[i] = trex(tracks[i]);
  10613. }
  10614. return box.apply(null, [types.mvex].concat(boxes));
  10615. };
  10616. mvhd = function(duration) {
  10617. var
  10618. bytes = new Uint8Array([
  10619. 0x00, // version 0
  10620. 0x00, 0x00, 0x00, // flags
  10621. 0x00, 0x00, 0x00, 0x01, // creation_time
  10622. 0x00, 0x00, 0x00, 0x02, // modification_time
  10623. 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
  10624. (duration & 0xFF000000) >> 24,
  10625. (duration & 0xFF0000) >> 16,
  10626. (duration & 0xFF00) >> 8,
  10627. duration & 0xFF, // duration
  10628. 0x00, 0x01, 0x00, 0x00, // 1.0 rate
  10629. 0x01, 0x00, // 1.0 volume
  10630. 0x00, 0x00, // reserved
  10631. 0x00, 0x00, 0x00, 0x00, // reserved
  10632. 0x00, 0x00, 0x00, 0x00, // reserved
  10633. 0x00, 0x01, 0x00, 0x00,
  10634. 0x00, 0x00, 0x00, 0x00,
  10635. 0x00, 0x00, 0x00, 0x00,
  10636. 0x00, 0x00, 0x00, 0x00,
  10637. 0x00, 0x01, 0x00, 0x00,
  10638. 0x00, 0x00, 0x00, 0x00,
  10639. 0x00, 0x00, 0x00, 0x00,
  10640. 0x00, 0x00, 0x00, 0x00,
  10641. 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
  10642. 0x00, 0x00, 0x00, 0x00,
  10643. 0x00, 0x00, 0x00, 0x00,
  10644. 0x00, 0x00, 0x00, 0x00,
  10645. 0x00, 0x00, 0x00, 0x00,
  10646. 0x00, 0x00, 0x00, 0x00,
  10647. 0x00, 0x00, 0x00, 0x00, // pre_defined
  10648. 0xff, 0xff, 0xff, 0xff // next_track_ID
  10649. ]);
  10650. return box(types.mvhd, bytes);
  10651. };
  10652. sdtp = function(track) {
  10653. var
  10654. samples = track.samples || [],
  10655. bytes = new Uint8Array(4 + samples.length),
  10656. flags,
  10657. i;
  10658. // leave the full box header (4 bytes) all zero
  10659. // write the sample table
  10660. for (i = 0; i < samples.length; i++) {
  10661. flags = samples[i].flags;
  10662. bytes[i + 4] = (flags.dependsOn << 4) |
  10663. (flags.isDependedOn << 2) |
  10664. (flags.hasRedundancy);
  10665. }
  10666. return box(types.sdtp,
  10667. bytes);
  10668. };
  10669. stbl = function(track) {
  10670. return box(types.stbl,
  10671. stsd(track),
  10672. box(types.stts, STTS),
  10673. box(types.stsc, STSC),
  10674. box(types.stsz, STSZ),
  10675. box(types.stco, STCO));
  10676. };
  10677. (function() {
  10678. var videoSample, audioSample;
  10679. stsd = function(track) {
  10680. return box(types.stsd, new Uint8Array([
  10681. 0x00, // version 0
  10682. 0x00, 0x00, 0x00, // flags
  10683. 0x00, 0x00, 0x00, 0x01
  10684. ]), track.type === 'video' ? videoSample(track) : audioSample(track));
  10685. };
  10686. videoSample = function(track) {
  10687. var
  10688. sps = track.sps || [],
  10689. pps = track.pps || [],
  10690. sequenceParameterSets = [],
  10691. pictureParameterSets = [],
  10692. i;
  10693. // assemble the SPSs
  10694. for (i = 0; i < sps.length; i++) {
  10695. sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
  10696. sequenceParameterSets.push((sps[i].byteLength & 0xFF)); // sequenceParameterSetLength
  10697. sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
  10698. }
  10699. // assemble the PPSs
  10700. for (i = 0; i < pps.length; i++) {
  10701. pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
  10702. pictureParameterSets.push((pps[i].byteLength & 0xFF));
  10703. pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
  10704. }
  10705. return box(types.avc1, new Uint8Array([
  10706. 0x00, 0x00, 0x00,
  10707. 0x00, 0x00, 0x00, // reserved
  10708. 0x00, 0x01, // data_reference_index
  10709. 0x00, 0x00, // pre_defined
  10710. 0x00, 0x00, // reserved
  10711. 0x00, 0x00, 0x00, 0x00,
  10712. 0x00, 0x00, 0x00, 0x00,
  10713. 0x00, 0x00, 0x00, 0x00, // pre_defined
  10714. (track.width & 0xff00) >> 8,
  10715. track.width & 0xff, // width
  10716. (track.height & 0xff00) >> 8,
  10717. track.height & 0xff, // height
  10718. 0x00, 0x48, 0x00, 0x00, // horizresolution
  10719. 0x00, 0x48, 0x00, 0x00, // vertresolution
  10720. 0x00, 0x00, 0x00, 0x00, // reserved
  10721. 0x00, 0x01, // frame_count
  10722. 0x13,
  10723. 0x76, 0x69, 0x64, 0x65,
  10724. 0x6f, 0x6a, 0x73, 0x2d,
  10725. 0x63, 0x6f, 0x6e, 0x74,
  10726. 0x72, 0x69, 0x62, 0x2d,
  10727. 0x68, 0x6c, 0x73, 0x00,
  10728. 0x00, 0x00, 0x00, 0x00,
  10729. 0x00, 0x00, 0x00, 0x00,
  10730. 0x00, 0x00, 0x00, // compressorname
  10731. 0x00, 0x18, // depth = 24
  10732. 0x11, 0x11 // pre_defined = -1
  10733. ]), box(types.avcC, new Uint8Array([
  10734. 0x01, // configurationVersion
  10735. track.profileIdc, // AVCProfileIndication
  10736. track.profileCompatibility, // profile_compatibility
  10737. track.levelIdc, // AVCLevelIndication
  10738. 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
  10739. ].concat([
  10740. sps.length // numOfSequenceParameterSets
  10741. ]).concat(sequenceParameterSets).concat([
  10742. pps.length // numOfPictureParameterSets
  10743. ]).concat(pictureParameterSets))), // "PPS"
  10744. box(types.btrt, new Uint8Array([
  10745. 0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
  10746. 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
  10747. 0x00, 0x2d, 0xc6, 0xc0
  10748. ])) // avgBitrate
  10749. );
  10750. };
  10751. audioSample = function(track) {
  10752. return box(types.mp4a, new Uint8Array([
  10753. // SampleEntry, ISO/IEC 14496-12
  10754. 0x00, 0x00, 0x00,
  10755. 0x00, 0x00, 0x00, // reserved
  10756. 0x00, 0x01, // data_reference_index
  10757. // AudioSampleEntry, ISO/IEC 14496-12
  10758. 0x00, 0x00, 0x00, 0x00, // reserved
  10759. 0x00, 0x00, 0x00, 0x00, // reserved
  10760. (track.channelcount & 0xff00) >> 8,
  10761. (track.channelcount & 0xff), // channelcount
  10762. (track.samplesize & 0xff00) >> 8,
  10763. (track.samplesize & 0xff), // samplesize
  10764. 0x00, 0x00, // pre_defined
  10765. 0x00, 0x00, // reserved
  10766. (track.samplerate & 0xff00) >> 8,
  10767. (track.samplerate & 0xff),
  10768. 0x00, 0x00 // samplerate, 16.16
  10769. // MP4AudioSampleEntry, ISO/IEC 14496-14
  10770. ]), esds(track));
  10771. };
  10772. }());
  10773. tkhd = function(track) {
  10774. var result = new Uint8Array([
  10775. 0x00, // version 0
  10776. 0x00, 0x00, 0x07, // flags
  10777. 0x00, 0x00, 0x00, 0x00, // creation_time
  10778. 0x00, 0x00, 0x00, 0x00, // modification_time
  10779. (track.id & 0xFF000000) >> 24,
  10780. (track.id & 0xFF0000) >> 16,
  10781. (track.id & 0xFF00) >> 8,
  10782. track.id & 0xFF, // track_ID
  10783. 0x00, 0x00, 0x00, 0x00, // reserved
  10784. (track.duration & 0xFF000000) >> 24,
  10785. (track.duration & 0xFF0000) >> 16,
  10786. (track.duration & 0xFF00) >> 8,
  10787. track.duration & 0xFF, // duration
  10788. 0x00, 0x00, 0x00, 0x00,
  10789. 0x00, 0x00, 0x00, 0x00, // reserved
  10790. 0x00, 0x00, // layer
  10791. 0x00, 0x00, // alternate_group
  10792. 0x01, 0x00, // non-audio track volume
  10793. 0x00, 0x00, // reserved
  10794. 0x00, 0x01, 0x00, 0x00,
  10795. 0x00, 0x00, 0x00, 0x00,
  10796. 0x00, 0x00, 0x00, 0x00,
  10797. 0x00, 0x00, 0x00, 0x00,
  10798. 0x00, 0x01, 0x00, 0x00,
  10799. 0x00, 0x00, 0x00, 0x00,
  10800. 0x00, 0x00, 0x00, 0x00,
  10801. 0x00, 0x00, 0x00, 0x00,
  10802. 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
  10803. (track.width & 0xFF00) >> 8,
  10804. track.width & 0xFF,
  10805. 0x00, 0x00, // width
  10806. (track.height & 0xFF00) >> 8,
  10807. track.height & 0xFF,
  10808. 0x00, 0x00 // height
  10809. ]);
  10810. return box(types.tkhd, result);
  10811. };
  10812. /**
  10813. * Generate a track fragment (traf) box. A traf box collects metadata
  10814. * about tracks in a movie fragment (moof) box.
  10815. */
  10816. traf = function(track) {
  10817. var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun,
  10818. sampleDependencyTable, dataOffset,
  10819. upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
  10820. trackFragmentHeader = box(types.tfhd, new Uint8Array([
  10821. 0x00, // version 0
  10822. 0x00, 0x00, 0x3a, // flags
  10823. (track.id & 0xFF000000) >> 24,
  10824. (track.id & 0xFF0000) >> 16,
  10825. (track.id & 0xFF00) >> 8,
  10826. (track.id & 0xFF), // track_ID
  10827. 0x00, 0x00, 0x00, 0x01, // sample_description_index
  10828. 0x00, 0x00, 0x00, 0x00, // default_sample_duration
  10829. 0x00, 0x00, 0x00, 0x00, // default_sample_size
  10830. 0x00, 0x00, 0x00, 0x00 // default_sample_flags
  10831. ]));
  10832. upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
  10833. lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
  10834. trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([
  10835. 0x01, // version 1
  10836. 0x00, 0x00, 0x00, // flags
  10837. // baseMediaDecodeTime
  10838. (upperWordBaseMediaDecodeTime >>> 24) & 0xFF,
  10839. (upperWordBaseMediaDecodeTime >>> 16) & 0xFF,
  10840. (upperWordBaseMediaDecodeTime >>> 8) & 0xFF,
  10841. upperWordBaseMediaDecodeTime & 0xFF,
  10842. (lowerWordBaseMediaDecodeTime >>> 24) & 0xFF,
  10843. (lowerWordBaseMediaDecodeTime >>> 16) & 0xFF,
  10844. (lowerWordBaseMediaDecodeTime >>> 8) & 0xFF,
  10845. lowerWordBaseMediaDecodeTime & 0xFF
  10846. ]));
  10847. // the data offset specifies the number of bytes from the start of
  10848. // the containing moof to the first payload byte of the associated
  10849. // mdat
  10850. dataOffset = (32 + // tfhd
  10851. 20 + // tfdt
  10852. 8 + // traf header
  10853. 16 + // mfhd
  10854. 8 + // moof header
  10855. 8); // mdat header
  10856. // audio tracks require less metadata
  10857. if (track.type === 'audio') {
  10858. trackFragmentRun = trun(track, dataOffset);
  10859. return box(types.traf,
  10860. trackFragmentHeader,
  10861. trackFragmentDecodeTime,
  10862. trackFragmentRun);
  10863. }
  10864. // video tracks should contain an independent and disposable samples
  10865. // box (sdtp)
  10866. // generate one and adjust offsets to match
  10867. sampleDependencyTable = sdtp(track);
  10868. trackFragmentRun = trun(track,
  10869. sampleDependencyTable.length + dataOffset);
  10870. return box(types.traf,
  10871. trackFragmentHeader,
  10872. trackFragmentDecodeTime,
  10873. trackFragmentRun,
  10874. sampleDependencyTable);
  10875. };
  10876. /**
  10877. * Generate a track box.
  10878. * @param track {object} a track definition
  10879. * @return {Uint8Array} the track box
  10880. */
  10881. trak = function(track) {
  10882. track.duration = track.duration || 0xffffffff;
  10883. return box(types.trak,
  10884. tkhd(track),
  10885. mdia(track));
  10886. };
  10887. trex = function(track) {
  10888. var result = new Uint8Array([
  10889. 0x00, // version 0
  10890. 0x00, 0x00, 0x00, // flags
  10891. (track.id & 0xFF000000) >> 24,
  10892. (track.id & 0xFF0000) >> 16,
  10893. (track.id & 0xFF00) >> 8,
  10894. (track.id & 0xFF), // track_ID
  10895. 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
  10896. 0x00, 0x00, 0x00, 0x00, // default_sample_duration
  10897. 0x00, 0x00, 0x00, 0x00, // default_sample_size
  10898. 0x00, 0x01, 0x00, 0x01 // default_sample_flags
  10899. ]);
  10900. // the last two bytes of default_sample_flags is the sample
  10901. // degradation priority, a hint about the importance of this sample
  10902. // relative to others. Lower the degradation priority for all sample
  10903. // types other than video.
  10904. if (track.type !== 'video') {
  10905. result[result.length - 1] = 0x00;
  10906. }
  10907. return box(types.trex, result);
  10908. };
  10909. (function() {
  10910. var audioTrun, videoTrun, trunHeader;
  10911. // This method assumes all samples are uniform. That is, if a
  10912. // duration is present for the first sample, it will be present for
  10913. // all subsequent samples.
  10914. // see ISO/IEC 14496-12:2012, Section 8.8.8.1
  10915. trunHeader = function(samples, offset) {
  10916. var durationPresent = 0, sizePresent = 0,
  10917. flagsPresent = 0, compositionTimeOffset = 0;
  10918. // trun flag constants
  10919. if (samples.length) {
  10920. if (samples[0].duration !== undefined) {
  10921. durationPresent = 0x1;
  10922. }
  10923. if (samples[0].size !== undefined) {
  10924. sizePresent = 0x2;
  10925. }
  10926. if (samples[0].flags !== undefined) {
  10927. flagsPresent = 0x4;
  10928. }
  10929. if (samples[0].compositionTimeOffset !== undefined) {
  10930. compositionTimeOffset = 0x8;
  10931. }
  10932. }
  10933. return [
  10934. 0x00, // version 0
  10935. 0x00,
  10936. durationPresent | sizePresent | flagsPresent | compositionTimeOffset,
  10937. 0x01, // flags
  10938. (samples.length & 0xFF000000) >>> 24,
  10939. (samples.length & 0xFF0000) >>> 16,
  10940. (samples.length & 0xFF00) >>> 8,
  10941. samples.length & 0xFF, // sample_count
  10942. (offset & 0xFF000000) >>> 24,
  10943. (offset & 0xFF0000) >>> 16,
  10944. (offset & 0xFF00) >>> 8,
  10945. offset & 0xFF // data_offset
  10946. ];
  10947. };
  10948. videoTrun = function(track, offset) {
  10949. var bytes, samples, sample, i;
  10950. samples = track.samples || [];
  10951. offset += 8 + 12 + (16 * samples.length);
  10952. bytes = trunHeader(samples, offset);
  10953. for (i = 0; i < samples.length; i++) {
  10954. sample = samples[i];
  10955. bytes = bytes.concat([
  10956. (sample.duration & 0xFF000000) >>> 24,
  10957. (sample.duration & 0xFF0000) >>> 16,
  10958. (sample.duration & 0xFF00) >>> 8,
  10959. sample.duration & 0xFF, // sample_duration
  10960. (sample.size & 0xFF000000) >>> 24,
  10961. (sample.size & 0xFF0000) >>> 16,
  10962. (sample.size & 0xFF00) >>> 8,
  10963. sample.size & 0xFF, // sample_size
  10964. (sample.flags.isLeading << 2) | sample.flags.dependsOn,
  10965. (sample.flags.isDependedOn << 6) |
  10966. (sample.flags.hasRedundancy << 4) |
  10967. (sample.flags.paddingValue << 1) |
  10968. sample.flags.isNonSyncSample,
  10969. sample.flags.degradationPriority & 0xF0 << 8,
  10970. sample.flags.degradationPriority & 0x0F, // sample_flags
  10971. (sample.compositionTimeOffset & 0xFF000000) >>> 24,
  10972. (sample.compositionTimeOffset & 0xFF0000) >>> 16,
  10973. (sample.compositionTimeOffset & 0xFF00) >>> 8,
  10974. sample.compositionTimeOffset & 0xFF // sample_composition_time_offset
  10975. ]);
  10976. }
  10977. return box(types.trun, new Uint8Array(bytes));
  10978. };
  10979. audioTrun = function(track, offset) {
  10980. var bytes, samples, sample, i;
  10981. samples = track.samples || [];
  10982. offset += 8 + 12 + (8 * samples.length);
  10983. bytes = trunHeader(samples, offset);
  10984. for (i = 0; i < samples.length; i++) {
  10985. sample = samples[i];
  10986. bytes = bytes.concat([
  10987. (sample.duration & 0xFF000000) >>> 24,
  10988. (sample.duration & 0xFF0000) >>> 16,
  10989. (sample.duration & 0xFF00) >>> 8,
  10990. sample.duration & 0xFF, // sample_duration
  10991. (sample.size & 0xFF000000) >>> 24,
  10992. (sample.size & 0xFF0000) >>> 16,
  10993. (sample.size & 0xFF00) >>> 8,
  10994. sample.size & 0xFF]); // sample_size
  10995. }
  10996. return box(types.trun, new Uint8Array(bytes));
  10997. };
  10998. trun = function(track, offset) {
  10999. if (track.type === 'audio') {
  11000. return audioTrun(track, offset);
  11001. }
  11002. return videoTrun(track, offset);
  11003. };
  11004. }());
  11005. module.exports = {
  11006. ftyp: ftyp,
  11007. mdat: mdat,
  11008. moof: moof,
  11009. moov: moov,
  11010. initSegment: function(tracks) {
  11011. var
  11012. fileType = ftyp(),
  11013. movie = moov(tracks),
  11014. result;
  11015. result = new Uint8Array(fileType.byteLength + movie.byteLength);
  11016. result.set(fileType);
  11017. result.set(movie, fileType.byteLength);
  11018. return result;
  11019. }
  11020. };
  11021. },{}],55:[function(require,module,exports){
  11022. /**
  11023. * mux.js
  11024. *
  11025. * Copyright (c) 2015 Brightcove
  11026. * All rights reserved.
  11027. *
  11028. * Utilities to detect basic properties and metadata about MP4s.
  11029. */
  11030. 'use strict';
  11031. var findBox, parseType, timescale, startTime;
  11032. // Find the data for a box specified by its path
  11033. findBox = function(data, path) {
  11034. var results = [],
  11035. i, size, type, end, subresults;
  11036. if (!path.length) {
  11037. // short-circuit the search for empty paths
  11038. return null;
  11039. }
  11040. for (i = 0; i < data.byteLength;) {
  11041. size = data[i] << 24;
  11042. size |= data[i + 1] << 16;
  11043. size |= data[i + 2] << 8;
  11044. size |= data[i + 3];
  11045. type = parseType(data.subarray(i + 4, i + 8));
  11046. end = size > 1 ? i + size : data.byteLength;
  11047. if (type === path[0]) {
  11048. if (path.length === 1) {
  11049. // this is the end of the path and we've found the box we were
  11050. // looking for
  11051. results.push(data.subarray(i + 8, end));
  11052. } else {
  11053. // recursively search for the next box along the path
  11054. subresults = findBox(data.subarray(i + 8, end), path.slice(1));
  11055. if (subresults.length) {
  11056. results = results.concat(subresults);
  11057. }
  11058. }
  11059. }
  11060. i = end;
  11061. }
  11062. // we've finished searching all of data
  11063. return results;
  11064. };
  11065. /**
  11066. * Returns the string representation of an ASCII encoded four byte buffer.
  11067. * @param buffer {Uint8Array} a four-byte buffer to translate
  11068. * @return {string} the corresponding string
  11069. */
  11070. parseType = function(buffer) {
  11071. var result = '';
  11072. result += String.fromCharCode(buffer[0]);
  11073. result += String.fromCharCode(buffer[1]);
  11074. result += String.fromCharCode(buffer[2]);
  11075. result += String.fromCharCode(buffer[3]);
  11076. return result;
  11077. };
  11078. /**
  11079. * Parses an MP4 initialization segment and extracts the timescale
  11080. * values for any declared tracks. Timescale values indicate the
  11081. * number of clock ticks per second to assume for time-based values
  11082. * elsewhere in the MP4.
  11083. *
  11084. * To determine the start time of an MP4, you need two pieces of
  11085. * information: the timescale unit and the earliest base media decode
  11086. * time. Multiple timescales can be specified within an MP4 but the
  11087. * base media decode time is always expressed in the timescale from
  11088. * the media header box for the track:
  11089. * ```
  11090. * moov > trak > mdia > mdhd.timescale
  11091. * ```
  11092. * @param init {Uint8Array} the bytes of the init segment
  11093. * @return {object} a hash of track ids to timescale values or null if
  11094. * the init segment is malformed.
  11095. */
  11096. timescale = function(init) {
  11097. var
  11098. result = {},
  11099. traks = findBox(init, ['moov', 'trak']);
  11100. // mdhd timescale
  11101. return traks.reduce(function(result, trak) {
  11102. var tkhd, version, index, id, mdhd;
  11103. tkhd = findBox(trak, ['tkhd'])[0];
  11104. if (!tkhd) {
  11105. return null;
  11106. }
  11107. version = tkhd[0];
  11108. index = version === 0 ? 12 : 20;
  11109. id = tkhd[index] << 24 |
  11110. tkhd[index + 1] << 16 |
  11111. tkhd[index + 2] << 8 |
  11112. tkhd[index + 3];
  11113. mdhd = findBox(trak, ['mdia', 'mdhd'])[0];
  11114. if (!mdhd) {
  11115. return null;
  11116. }
  11117. version = mdhd[0];
  11118. index = version === 0 ? 12 : 20;
  11119. result[id] = mdhd[index] << 24 |
  11120. mdhd[index + 1] << 16 |
  11121. mdhd[index + 2] << 8 |
  11122. mdhd[index + 3];
  11123. return result;
  11124. }, result);
  11125. };
  11126. /**
  11127. * Determine the base media decode start time, in seconds, for an MP4
  11128. * fragment. If multiple fragments are specified, the earliest time is
  11129. * returned.
  11130. *
  11131. * The base media decode time can be parsed from track fragment
  11132. * metadata:
  11133. * ```
  11134. * moof > traf > tfdt.baseMediaDecodeTime
  11135. * ```
  11136. * It requires the timescale value from the mdhd to interpret.
  11137. *
  11138. * @param timescale {object} a hash of track ids to timescale values.
  11139. * @return {number} the earliest base media decode start time for the
  11140. * fragment, in seconds
  11141. */
  11142. startTime = function(timescale, fragment) {
  11143. var trafs, baseTimes, result;
  11144. // we need info from two childrend of each track fragment box
  11145. trafs = findBox(fragment, ['moof', 'traf']);
  11146. // determine the start times for each track
  11147. baseTimes = [].concat.apply([], trafs.map(function(traf) {
  11148. return findBox(traf, ['tfhd']).map(function(tfhd) {
  11149. var id, scale, baseTime;
  11150. // get the track id from the tfhd
  11151. id = tfhd[4] << 24 |
  11152. tfhd[5] << 16 |
  11153. tfhd[6] << 8 |
  11154. tfhd[7];
  11155. // assume a 90kHz clock if no timescale was specified
  11156. scale = timescale[id] || 90e3;
  11157. // get the base media decode time from the tfdt
  11158. baseTime = findBox(traf, ['tfdt']).map(function(tfdt) {
  11159. var version, result;
  11160. version = tfdt[0];
  11161. result = tfdt[4] << 24 |
  11162. tfdt[5] << 16 |
  11163. tfdt[6] << 8 |
  11164. tfdt[7];
  11165. if (version === 1) {
  11166. result *= Math.pow(2, 32);
  11167. result += tfdt[8] << 24 |
  11168. tfdt[9] << 16 |
  11169. tfdt[10] << 8 |
  11170. tfdt[11];
  11171. }
  11172. return result;
  11173. })[0];
  11174. baseTime = baseTime || Infinity;
  11175. // convert base time to seconds
  11176. return baseTime / scale;
  11177. });
  11178. }));
  11179. // return the minimum
  11180. result = Math.min.apply(null, baseTimes);
  11181. return isFinite(result) ? result : 0;
  11182. };
  11183. module.exports = {
  11184. parseType: parseType,
  11185. timescale: timescale,
  11186. startTime: startTime
  11187. };
  11188. },{}],56:[function(require,module,exports){
  11189. /**
  11190. * mux.js
  11191. *
  11192. * Copyright (c) 2015 Brightcove
  11193. * All rights reserved.
  11194. *
  11195. * A stream-based mp2t to mp4 converter. This utility can be used to
  11196. * deliver mp4s to a SourceBuffer on platforms that support native
  11197. * Media Source Extensions.
  11198. */
  11199. 'use strict';
  11200. var Stream = require('../utils/stream.js');
  11201. var mp4 = require('./mp4-generator.js');
  11202. var m2ts = require('../m2ts/m2ts.js');
  11203. var AdtsStream = require('../codecs/adts.js');
  11204. var H264Stream = require('../codecs/h264').H264Stream;
  11205. var AacStream = require('../aac');
  11206. var coneOfSilence = require('../data/silence');
  11207. var clock = require('../utils/clock');
  11208. // constants
  11209. var AUDIO_PROPERTIES = [
  11210. 'audioobjecttype',
  11211. 'channelcount',
  11212. 'samplerate',
  11213. 'samplingfrequencyindex',
  11214. 'samplesize'
  11215. ];
  11216. var VIDEO_PROPERTIES = [
  11217. 'width',
  11218. 'height',
  11219. 'profileIdc',
  11220. 'levelIdc',
  11221. 'profileCompatibility'
  11222. ];
  11223. var ONE_SECOND_IN_TS = 90000; // 90kHz clock
  11224. // object types
  11225. var VideoSegmentStream, AudioSegmentStream, Transmuxer, CoalesceStream;
  11226. // Helper functions
  11227. var
  11228. createDefaultSample,
  11229. isLikelyAacData,
  11230. collectDtsInfo,
  11231. clearDtsInfo,
  11232. calculateTrackBaseMediaDecodeTime,
  11233. arrayEquals,
  11234. sumFrameByteLengths;
  11235. /**
  11236. * Default sample object
  11237. * see ISO/IEC 14496-12:2012, section 8.6.4.3
  11238. */
  11239. createDefaultSample = function() {
  11240. return {
  11241. size: 0,
  11242. flags: {
  11243. isLeading: 0,
  11244. dependsOn: 1,
  11245. isDependedOn: 0,
  11246. hasRedundancy: 0,
  11247. degradationPriority: 0
  11248. }
  11249. };
  11250. };
  11251. isLikelyAacData = function(data) {
  11252. if ((data[0] === 'I'.charCodeAt(0)) &&
  11253. (data[1] === 'D'.charCodeAt(0)) &&
  11254. (data[2] === '3'.charCodeAt(0))) {
  11255. return true;
  11256. }
  11257. return false;
  11258. };
  11259. /**
  11260. * Compare two arrays (even typed) for same-ness
  11261. */
  11262. arrayEquals = function(a, b) {
  11263. var
  11264. i;
  11265. if (a.length !== b.length) {
  11266. return false;
  11267. }
  11268. // compare the value of each element in the array
  11269. for (i = 0; i < a.length; i++) {
  11270. if (a[i] !== b[i]) {
  11271. return false;
  11272. }
  11273. }
  11274. return true;
  11275. };
  11276. /**
  11277. * Sum the `byteLength` properties of the data in each AAC frame
  11278. */
  11279. sumFrameByteLengths = function(array) {
  11280. var
  11281. i,
  11282. currentObj,
  11283. sum = 0;
  11284. // sum the byteLength's all each nal unit in the frame
  11285. for (i = 0; i < array.length; i++) {
  11286. currentObj = array[i];
  11287. sum += currentObj.data.byteLength;
  11288. }
  11289. return sum;
  11290. };
  11291. /**
  11292. * Constructs a single-track, ISO BMFF media segment from AAC data
  11293. * events. The output of this stream can be fed to a SourceBuffer
  11294. * configured with a suitable initialization segment.
  11295. */
  11296. AudioSegmentStream = function(track) {
  11297. var
  11298. adtsFrames = [],
  11299. sequenceNumber = 0,
  11300. earliestAllowedDts = 0,
  11301. audioAppendStartTs = 0,
  11302. videoBaseMediaDecodeTime = Infinity;
  11303. AudioSegmentStream.prototype.init.call(this);
  11304. this.push = function(data) {
  11305. collectDtsInfo(track, data);
  11306. if (track) {
  11307. AUDIO_PROPERTIES.forEach(function(prop) {
  11308. track[prop] = data[prop];
  11309. });
  11310. }
  11311. // buffer audio data until end() is called
  11312. adtsFrames.push(data);
  11313. };
  11314. this.setEarliestDts = function(earliestDts) {
  11315. earliestAllowedDts = earliestDts - track.timelineStartInfo.baseMediaDecodeTime;
  11316. };
  11317. this.setVideoBaseMediaDecodeTime = function(baseMediaDecodeTime) {
  11318. videoBaseMediaDecodeTime = baseMediaDecodeTime;
  11319. };
  11320. this.setAudioAppendStart = function(timestamp) {
  11321. audioAppendStartTs = timestamp;
  11322. };
  11323. this.flush = function() {
  11324. var
  11325. frames,
  11326. moof,
  11327. mdat,
  11328. boxes;
  11329. // return early if no audio data has been observed
  11330. if (adtsFrames.length === 0) {
  11331. this.trigger('done', 'AudioSegmentStream');
  11332. return;
  11333. }
  11334. frames = this.trimAdtsFramesByEarliestDts_(adtsFrames);
  11335. track.baseMediaDecodeTime = calculateTrackBaseMediaDecodeTime(track);
  11336. this.prefixWithSilence_(track, frames);
  11337. // we have to build the index from byte locations to
  11338. // samples (that is, adts frames) in the audio data
  11339. track.samples = this.generateSampleTable_(frames);
  11340. // concatenate the audio data to constuct the mdat
  11341. mdat = mp4.mdat(this.concatenateFrameData_(frames));
  11342. adtsFrames = [];
  11343. moof = mp4.moof(sequenceNumber, [track]);
  11344. boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
  11345. // bump the sequence number for next time
  11346. sequenceNumber++;
  11347. boxes.set(moof);
  11348. boxes.set(mdat, moof.byteLength);
  11349. clearDtsInfo(track);
  11350. this.trigger('data', {track: track, boxes: boxes});
  11351. this.trigger('done', 'AudioSegmentStream');
  11352. };
  11353. // Possibly pad (prefix) the audio track with silence if appending this track
  11354. // would lead to the introduction of a gap in the audio buffer
  11355. this.prefixWithSilence_ = function(track, frames) {
  11356. var
  11357. baseMediaDecodeTimeTs,
  11358. frameDuration = 0,
  11359. audioGapDuration = 0,
  11360. audioFillFrameCount = 0,
  11361. audioFillDuration = 0,
  11362. silentFrame,
  11363. i;
  11364. if (!frames.length) {
  11365. return;
  11366. }
  11367. baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate);
  11368. // determine frame clock duration based on sample rate, round up to avoid overfills
  11369. frameDuration = Math.ceil(ONE_SECOND_IN_TS / (track.samplerate / 1024));
  11370. if (audioAppendStartTs && videoBaseMediaDecodeTime) {
  11371. // insert the shortest possible amount (audio gap or audio to video gap)
  11372. audioGapDuration =
  11373. baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime);
  11374. // number of full frames in the audio gap
  11375. audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
  11376. audioFillDuration = audioFillFrameCount * frameDuration;
  11377. }
  11378. // don't attempt to fill gaps smaller than a single frame or larger
  11379. // than a half second
  11380. if (audioFillFrameCount < 1 || audioFillDuration > ONE_SECOND_IN_TS / 2) {
  11381. return;
  11382. }
  11383. silentFrame = coneOfSilence[track.samplerate];
  11384. if (!silentFrame) {
  11385. // we don't have a silent frame pregenerated for the sample rate, so use a frame
  11386. // from the content instead
  11387. silentFrame = frames[0].data;
  11388. }
  11389. for (i = 0; i < audioFillFrameCount; i++) {
  11390. frames.splice(i, 0, {
  11391. data: silentFrame
  11392. });
  11393. }
  11394. track.baseMediaDecodeTime -=
  11395. Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
  11396. };
  11397. // If the audio segment extends before the earliest allowed dts
  11398. // value, remove AAC frames until starts at or after the earliest
  11399. // allowed DTS so that we don't end up with a negative baseMedia-
  11400. // DecodeTime for the audio track
  11401. this.trimAdtsFramesByEarliestDts_ = function(adtsFrames) {
  11402. if (track.minSegmentDts >= earliestAllowedDts) {
  11403. return adtsFrames;
  11404. }
  11405. // We will need to recalculate the earliest segment Dts
  11406. track.minSegmentDts = Infinity;
  11407. return adtsFrames.filter(function(currentFrame) {
  11408. // If this is an allowed frame, keep it and record it's Dts
  11409. if (currentFrame.dts >= earliestAllowedDts) {
  11410. track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
  11411. track.minSegmentPts = track.minSegmentDts;
  11412. return true;
  11413. }
  11414. // Otherwise, discard it
  11415. return false;
  11416. });
  11417. };
  11418. // generate the track's raw mdat data from an array of frames
  11419. this.generateSampleTable_ = function(frames) {
  11420. var
  11421. i,
  11422. currentFrame,
  11423. samples = [];
  11424. for (i = 0; i < frames.length; i++) {
  11425. currentFrame = frames[i];
  11426. samples.push({
  11427. size: currentFrame.data.byteLength,
  11428. duration: 1024 // For AAC audio, all samples contain 1024 samples
  11429. });
  11430. }
  11431. return samples;
  11432. };
  11433. // generate the track's sample table from an array of frames
  11434. this.concatenateFrameData_ = function(frames) {
  11435. var
  11436. i,
  11437. currentFrame,
  11438. dataOffset = 0,
  11439. data = new Uint8Array(sumFrameByteLengths(frames));
  11440. for (i = 0; i < frames.length; i++) {
  11441. currentFrame = frames[i];
  11442. data.set(currentFrame.data, dataOffset);
  11443. dataOffset += currentFrame.data.byteLength;
  11444. }
  11445. return data;
  11446. };
  11447. };
  11448. AudioSegmentStream.prototype = new Stream();
  11449. /**
  11450. * Constructs a single-track, ISO BMFF media segment from H264 data
  11451. * events. The output of this stream can be fed to a SourceBuffer
  11452. * configured with a suitable initialization segment.
  11453. * @param track {object} track metadata configuration
  11454. */
  11455. VideoSegmentStream = function(track) {
  11456. var
  11457. sequenceNumber = 0,
  11458. nalUnits = [],
  11459. config,
  11460. pps;
  11461. VideoSegmentStream.prototype.init.call(this);
  11462. delete track.minPTS;
  11463. this.gopCache_ = [];
  11464. this.push = function(nalUnit) {
  11465. collectDtsInfo(track, nalUnit);
  11466. // record the track config
  11467. if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
  11468. config = nalUnit.config;
  11469. track.sps = [nalUnit.data];
  11470. VIDEO_PROPERTIES.forEach(function(prop) {
  11471. track[prop] = config[prop];
  11472. }, this);
  11473. }
  11474. if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' &&
  11475. !pps) {
  11476. pps = nalUnit.data;
  11477. track.pps = [nalUnit.data];
  11478. }
  11479. // buffer video until flush() is called
  11480. nalUnits.push(nalUnit);
  11481. };
  11482. this.flush = function() {
  11483. var
  11484. frames,
  11485. gopForFusion,
  11486. gops,
  11487. moof,
  11488. mdat,
  11489. boxes;
  11490. // Throw away nalUnits at the start of the byte stream until
  11491. // we find the first AUD
  11492. while (nalUnits.length) {
  11493. if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
  11494. break;
  11495. }
  11496. nalUnits.shift();
  11497. }
  11498. // Return early if no video data has been observed
  11499. if (nalUnits.length === 0) {
  11500. this.resetStream_();
  11501. this.trigger('done', 'VideoSegmentStream');
  11502. return;
  11503. }
  11504. // Organize the raw nal-units into arrays that represent
  11505. // higher-level constructs such as frames and gops
  11506. // (group-of-pictures)
  11507. frames = this.groupNalsIntoFrames_(nalUnits);
  11508. gops = this.groupFramesIntoGops_(frames);
  11509. // If the first frame of this fragment is not a keyframe we have
  11510. // a problem since MSE (on Chrome) requires a leading keyframe.
  11511. //
  11512. // We have two approaches to repairing this situation:
  11513. // 1) GOP-FUSION:
  11514. // This is where we keep track of the GOPS (group-of-pictures)
  11515. // from previous fragments and attempt to find one that we can
  11516. // prepend to the current fragment in order to create a valid
  11517. // fragment.
  11518. // 2) KEYFRAME-PULLING:
  11519. // Here we search for the first keyframe in the fragment and
  11520. // throw away all the frames between the start of the fragment
  11521. // and that keyframe. We then extend the duration and pull the
  11522. // PTS of the keyframe forward so that it covers the time range
  11523. // of the frames that were disposed of.
  11524. //
  11525. // #1 is far prefereable over #2 which can cause "stuttering" but
  11526. // requires more things to be just right.
  11527. if (!gops[0][0].keyFrame) {
  11528. // Search for a gop for fusion from our gopCache
  11529. gopForFusion = this.getGopForFusion_(nalUnits[0], track);
  11530. if (gopForFusion) {
  11531. gops.unshift(gopForFusion);
  11532. // Adjust Gops' metadata to account for the inclusion of the
  11533. // new gop at the beginning
  11534. gops.byteLength += gopForFusion.byteLength;
  11535. gops.nalCount += gopForFusion.nalCount;
  11536. gops.pts = gopForFusion.pts;
  11537. gops.dts = gopForFusion.dts;
  11538. gops.duration += gopForFusion.duration;
  11539. } else {
  11540. // If we didn't find a candidate gop fall back to keyrame-pulling
  11541. gops = this.extendFirstKeyFrame_(gops);
  11542. }
  11543. }
  11544. collectDtsInfo(track, gops);
  11545. // First, we have to build the index from byte locations to
  11546. // samples (that is, frames) in the video data
  11547. track.samples = this.generateSampleTable_(gops);
  11548. // Concatenate the video data and construct the mdat
  11549. mdat = mp4.mdat(this.concatenateNalData_(gops));
  11550. // save all the nals in the last GOP into the gop cache
  11551. this.gopCache_.unshift({
  11552. gop: gops.pop(),
  11553. pps: track.pps,
  11554. sps: track.sps
  11555. });
  11556. // Keep a maximum of 6 GOPs in the cache
  11557. this.gopCache_.length = Math.min(6, this.gopCache_.length);
  11558. // Clear nalUnits
  11559. nalUnits = [];
  11560. track.baseMediaDecodeTime = calculateTrackBaseMediaDecodeTime(track);
  11561. this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
  11562. this.trigger('timelineStartInfo', track.timelineStartInfo);
  11563. moof = mp4.moof(sequenceNumber, [track]);
  11564. // it would be great to allocate this array up front instead of
  11565. // throwing away hundreds of media segment fragments
  11566. boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
  11567. // Bump the sequence number for next time
  11568. sequenceNumber++;
  11569. boxes.set(moof);
  11570. boxes.set(mdat, moof.byteLength);
  11571. this.trigger('data', {track: track, boxes: boxes});
  11572. this.resetStream_();
  11573. // Continue with the flush process now
  11574. this.trigger('done', 'VideoSegmentStream');
  11575. };
  11576. this.resetStream_ = function() {
  11577. clearDtsInfo(track);
  11578. // reset config and pps because they may differ across segments
  11579. // for instance, when we are rendition switching
  11580. config = undefined;
  11581. pps = undefined;
  11582. };
  11583. // Search for a candidate Gop for gop-fusion from the gop cache and
  11584. // return it or return null if no good candidate was found
  11585. this.getGopForFusion_ = function(nalUnit) {
  11586. var
  11587. halfSecond = 45000, // Half-a-second in a 90khz clock
  11588. allowableOverlap = 10000, // About 3 frames @ 30fps
  11589. nearestDistance = Infinity,
  11590. dtsDistance,
  11591. nearestGopObj,
  11592. currentGop,
  11593. currentGopObj,
  11594. i;
  11595. // Search for the GOP nearest to the beginning of this nal unit
  11596. for (i = 0; i < this.gopCache_.length; i++) {
  11597. currentGopObj = this.gopCache_[i];
  11598. currentGop = currentGopObj.gop;
  11599. // Reject Gops with different SPS or PPS
  11600. if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) ||
  11601. !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
  11602. continue;
  11603. }
  11604. // Reject Gops that would require a negative baseMediaDecodeTime
  11605. if (currentGop.dts < track.timelineStartInfo.dts) {
  11606. continue;
  11607. }
  11608. // The distance between the end of the gop and the start of the nalUnit
  11609. dtsDistance = (nalUnit.dts - currentGop.dts) - currentGop.duration;
  11610. // Only consider GOPS that start before the nal unit and end within
  11611. // a half-second of the nal unit
  11612. if (dtsDistance >= -allowableOverlap &&
  11613. dtsDistance <= halfSecond) {
  11614. // Always use the closest GOP we found if there is more than
  11615. // one candidate
  11616. if (!nearestGopObj ||
  11617. nearestDistance > dtsDistance) {
  11618. nearestGopObj = currentGopObj;
  11619. nearestDistance = dtsDistance;
  11620. }
  11621. }
  11622. }
  11623. if (nearestGopObj) {
  11624. return nearestGopObj.gop;
  11625. }
  11626. return null;
  11627. };
  11628. this.extendFirstKeyFrame_ = function(gops) {
  11629. var currentGop;
  11630. if (!gops[0][0].keyFrame && gops.length > 1) {
  11631. // Remove the first GOP
  11632. currentGop = gops.shift();
  11633. gops.byteLength -= currentGop.byteLength;
  11634. gops.nalCount -= currentGop.nalCount;
  11635. // Extend the first frame of what is now the
  11636. // first gop to cover the time period of the
  11637. // frames we just removed
  11638. gops[0][0].dts = currentGop.dts;
  11639. gops[0][0].pts = currentGop.pts;
  11640. gops[0][0].duration += currentGop.duration;
  11641. }
  11642. return gops;
  11643. };
  11644. // Convert an array of nal units into an array of frames with each frame being
  11645. // composed of the nal units that make up that frame
  11646. // Also keep track of cummulative data about the frame from the nal units such
  11647. // as the frame duration, starting pts, etc.
  11648. this.groupNalsIntoFrames_ = function(nalUnits) {
  11649. var
  11650. i,
  11651. currentNal,
  11652. currentFrame = [],
  11653. frames = [];
  11654. currentFrame.byteLength = 0;
  11655. for (i = 0; i < nalUnits.length; i++) {
  11656. currentNal = nalUnits[i];
  11657. // Split on 'aud'-type nal units
  11658. if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
  11659. // Since the very first nal unit is expected to be an AUD
  11660. // only push to the frames array when currentFrame is not empty
  11661. if (currentFrame.length) {
  11662. currentFrame.duration = currentNal.dts - currentFrame.dts;
  11663. frames.push(currentFrame);
  11664. }
  11665. currentFrame = [currentNal];
  11666. currentFrame.byteLength = currentNal.data.byteLength;
  11667. currentFrame.pts = currentNal.pts;
  11668. currentFrame.dts = currentNal.dts;
  11669. } else {
  11670. // Specifically flag key frames for ease of use later
  11671. if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
  11672. currentFrame.keyFrame = true;
  11673. }
  11674. currentFrame.duration = currentNal.dts - currentFrame.dts;
  11675. currentFrame.byteLength += currentNal.data.byteLength;
  11676. currentFrame.push(currentNal);
  11677. }
  11678. }
  11679. // For the last frame, use the duration of the previous frame if we
  11680. // have nothing better to go on
  11681. if (frames.length &&
  11682. (!currentFrame.duration ||
  11683. currentFrame.duration <= 0)) {
  11684. currentFrame.duration = frames[frames.length - 1].duration;
  11685. }
  11686. // Push the final frame
  11687. frames.push(currentFrame);
  11688. return frames;
  11689. };
  11690. // Convert an array of frames into an array of Gop with each Gop being composed
  11691. // of the frames that make up that Gop
  11692. // Also keep track of cummulative data about the Gop from the frames such as the
  11693. // Gop duration, starting pts, etc.
  11694. this.groupFramesIntoGops_ = function(frames) {
  11695. var
  11696. i,
  11697. currentFrame,
  11698. currentGop = [],
  11699. gops = [];
  11700. // We must pre-set some of the values on the Gop since we
  11701. // keep running totals of these values
  11702. currentGop.byteLength = 0;
  11703. currentGop.nalCount = 0;
  11704. currentGop.duration = 0;
  11705. currentGop.pts = frames[0].pts;
  11706. currentGop.dts = frames[0].dts;
  11707. // store some metadata about all the Gops
  11708. gops.byteLength = 0;
  11709. gops.nalCount = 0;
  11710. gops.duration = 0;
  11711. gops.pts = frames[0].pts;
  11712. gops.dts = frames[0].dts;
  11713. for (i = 0; i < frames.length; i++) {
  11714. currentFrame = frames[i];
  11715. if (currentFrame.keyFrame) {
  11716. // Since the very first frame is expected to be an keyframe
  11717. // only push to the gops array when currentGop is not empty
  11718. if (currentGop.length) {
  11719. gops.push(currentGop);
  11720. gops.byteLength += currentGop.byteLength;
  11721. gops.nalCount += currentGop.nalCount;
  11722. gops.duration += currentGop.duration;
  11723. }
  11724. currentGop = [currentFrame];
  11725. currentGop.nalCount = currentFrame.length;
  11726. currentGop.byteLength = currentFrame.byteLength;
  11727. currentGop.pts = currentFrame.pts;
  11728. currentGop.dts = currentFrame.dts;
  11729. currentGop.duration = currentFrame.duration;
  11730. } else {
  11731. currentGop.duration += currentFrame.duration;
  11732. currentGop.nalCount += currentFrame.length;
  11733. currentGop.byteLength += currentFrame.byteLength;
  11734. currentGop.push(currentFrame);
  11735. }
  11736. }
  11737. if (gops.length && currentGop.duration <= 0) {
  11738. currentGop.duration = gops[gops.length - 1].duration;
  11739. }
  11740. gops.byteLength += currentGop.byteLength;
  11741. gops.nalCount += currentGop.nalCount;
  11742. gops.duration += currentGop.duration;
  11743. // push the final Gop
  11744. gops.push(currentGop);
  11745. return gops;
  11746. };
  11747. // generate the track's sample table from an array of gops
  11748. this.generateSampleTable_ = function(gops, baseDataOffset) {
  11749. var
  11750. h, i,
  11751. sample,
  11752. currentGop,
  11753. currentFrame,
  11754. dataOffset = baseDataOffset || 0,
  11755. samples = [];
  11756. for (h = 0; h < gops.length; h++) {
  11757. currentGop = gops[h];
  11758. for (i = 0; i < currentGop.length; i++) {
  11759. currentFrame = currentGop[i];
  11760. sample = createDefaultSample();
  11761. sample.dataOffset = dataOffset;
  11762. sample.compositionTimeOffset = currentFrame.pts - currentFrame.dts;
  11763. sample.duration = currentFrame.duration;
  11764. sample.size = 4 * currentFrame.length; // Space for nal unit size
  11765. sample.size += currentFrame.byteLength;
  11766. if (currentFrame.keyFrame) {
  11767. sample.flags.dependsOn = 2;
  11768. }
  11769. dataOffset += sample.size;
  11770. samples.push(sample);
  11771. }
  11772. }
  11773. return samples;
  11774. };
  11775. // generate the track's raw mdat data from an array of gops
  11776. this.concatenateNalData_ = function(gops) {
  11777. var
  11778. h, i, j,
  11779. currentGop,
  11780. currentFrame,
  11781. currentNal,
  11782. dataOffset = 0,
  11783. nalsByteLength = gops.byteLength,
  11784. numberOfNals = gops.nalCount,
  11785. totalByteLength = nalsByteLength + 4 * numberOfNals,
  11786. data = new Uint8Array(totalByteLength),
  11787. view = new DataView(data.buffer);
  11788. // For each Gop..
  11789. for (h = 0; h < gops.length; h++) {
  11790. currentGop = gops[h];
  11791. // For each Frame..
  11792. for (i = 0; i < currentGop.length; i++) {
  11793. currentFrame = currentGop[i];
  11794. // For each NAL..
  11795. for (j = 0; j < currentFrame.length; j++) {
  11796. currentNal = currentFrame[j];
  11797. view.setUint32(dataOffset, currentNal.data.byteLength);
  11798. dataOffset += 4;
  11799. data.set(currentNal.data, dataOffset);
  11800. dataOffset += currentNal.data.byteLength;
  11801. }
  11802. }
  11803. }
  11804. return data;
  11805. };
  11806. };
  11807. VideoSegmentStream.prototype = new Stream();
  11808. /**
  11809. * Store information about the start and end of the track and the
  11810. * duration for each frame/sample we process in order to calculate
  11811. * the baseMediaDecodeTime
  11812. */
  11813. collectDtsInfo = function(track, data) {
  11814. if (typeof data.pts === 'number') {
  11815. if (track.timelineStartInfo.pts === undefined) {
  11816. track.timelineStartInfo.pts = data.pts;
  11817. }
  11818. if (track.minSegmentPts === undefined) {
  11819. track.minSegmentPts = data.pts;
  11820. } else {
  11821. track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
  11822. }
  11823. if (track.maxSegmentPts === undefined) {
  11824. track.maxSegmentPts = data.pts;
  11825. } else {
  11826. track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
  11827. }
  11828. }
  11829. if (typeof data.dts === 'number') {
  11830. if (track.timelineStartInfo.dts === undefined) {
  11831. track.timelineStartInfo.dts = data.dts;
  11832. }
  11833. if (track.minSegmentDts === undefined) {
  11834. track.minSegmentDts = data.dts;
  11835. } else {
  11836. track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
  11837. }
  11838. if (track.maxSegmentDts === undefined) {
  11839. track.maxSegmentDts = data.dts;
  11840. } else {
  11841. track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
  11842. }
  11843. }
  11844. };
  11845. /**
  11846. * Clear values used to calculate the baseMediaDecodeTime between
  11847. * tracks
  11848. */
  11849. clearDtsInfo = function(track) {
  11850. delete track.minSegmentDts;
  11851. delete track.maxSegmentDts;
  11852. delete track.minSegmentPts;
  11853. delete track.maxSegmentPts;
  11854. };
  11855. /**
  11856. * Calculate the track's baseMediaDecodeTime based on the earliest
  11857. * DTS the transmuxer has ever seen and the minimum DTS for the
  11858. * current track
  11859. */
  11860. calculateTrackBaseMediaDecodeTime = function(track) {
  11861. var
  11862. baseMediaDecodeTime,
  11863. scale,
  11864. // Calculate the distance, in time, that this segment starts from the start
  11865. // of the timeline (earliest time seen since the transmuxer initialized)
  11866. timeSinceStartOfTimeline = track.minSegmentDts - track.timelineStartInfo.dts;
  11867. // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
  11868. // we want the start of the first segment to be placed
  11869. baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime;
  11870. // Add to that the distance this segment is from the very first
  11871. baseMediaDecodeTime += timeSinceStartOfTimeline;
  11872. // baseMediaDecodeTime must not become negative
  11873. baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
  11874. if (track.type === 'audio') {
  11875. // Audio has a different clock equal to the sampling_rate so we need to
  11876. // scale the PTS values into the clock rate of the track
  11877. scale = track.samplerate / ONE_SECOND_IN_TS;
  11878. baseMediaDecodeTime *= scale;
  11879. baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
  11880. }
  11881. return baseMediaDecodeTime;
  11882. };
  11883. /**
  11884. * A Stream that can combine multiple streams (ie. audio & video)
  11885. * into a single output segment for MSE. Also supports audio-only
  11886. * and video-only streams.
  11887. */
  11888. CoalesceStream = function(options, metadataStream) {
  11889. // Number of Tracks per output segment
  11890. // If greater than 1, we combine multiple
  11891. // tracks into a single segment
  11892. this.numberOfTracks = 0;
  11893. this.metadataStream = metadataStream;
  11894. if (typeof options.remux !== 'undefined') {
  11895. this.remuxTracks = !!options.remux;
  11896. } else {
  11897. this.remuxTracks = true;
  11898. }
  11899. this.pendingTracks = [];
  11900. this.videoTrack = null;
  11901. this.pendingBoxes = [];
  11902. this.pendingCaptions = [];
  11903. this.pendingMetadata = [];
  11904. this.pendingBytes = 0;
  11905. this.emittedTracks = 0;
  11906. CoalesceStream.prototype.init.call(this);
  11907. // Take output from multiple
  11908. this.push = function(output) {
  11909. // buffer incoming captions until the associated video segment
  11910. // finishes
  11911. if (output.text) {
  11912. return this.pendingCaptions.push(output);
  11913. }
  11914. // buffer incoming id3 tags until the final flush
  11915. if (output.frames) {
  11916. return this.pendingMetadata.push(output);
  11917. }
  11918. // Add this track to the list of pending tracks and store
  11919. // important information required for the construction of
  11920. // the final segment
  11921. this.pendingTracks.push(output.track);
  11922. this.pendingBoxes.push(output.boxes);
  11923. this.pendingBytes += output.boxes.byteLength;
  11924. if (output.track.type === 'video') {
  11925. this.videoTrack = output.track;
  11926. }
  11927. if (output.track.type === 'audio') {
  11928. this.audioTrack = output.track;
  11929. }
  11930. };
  11931. };
  11932. CoalesceStream.prototype = new Stream();
  11933. CoalesceStream.prototype.flush = function(flushSource) {
  11934. var
  11935. offset = 0,
  11936. event = {
  11937. captions: [],
  11938. metadata: [],
  11939. info: {}
  11940. },
  11941. caption,
  11942. id3,
  11943. initSegment,
  11944. timelineStartPts = 0,
  11945. i;
  11946. if (this.pendingTracks.length < this.numberOfTracks) {
  11947. if (flushSource !== 'VideoSegmentStream' &&
  11948. flushSource !== 'AudioSegmentStream') {
  11949. // Return because we haven't received a flush from a data-generating
  11950. // portion of the segment (meaning that we have only recieved meta-data
  11951. // or captions.)
  11952. return;
  11953. } else if (this.remuxTracks) {
  11954. // Return until we have enough tracks from the pipeline to remux (if we
  11955. // are remuxing audio and video into a single MP4)
  11956. return;
  11957. } else if (this.pendingTracks.length === 0) {
  11958. // In the case where we receive a flush without any data having been
  11959. // received we consider it an emitted track for the purposes of coalescing
  11960. // `done` events.
  11961. // We do this for the case where there is an audio and video track in the
  11962. // segment but no audio data. (seen in several playlists with alternate
  11963. // audio tracks and no audio present in the main TS segments.)
  11964. this.emittedTracks++;
  11965. if (this.emittedTracks >= this.numberOfTracks) {
  11966. this.trigger('done');
  11967. this.emittedTracks = 0;
  11968. }
  11969. return;
  11970. }
  11971. }
  11972. if (this.videoTrack) {
  11973. timelineStartPts = this.videoTrack.timelineStartInfo.pts;
  11974. VIDEO_PROPERTIES.forEach(function(prop) {
  11975. event.info[prop] = this.videoTrack[prop];
  11976. }, this);
  11977. } else if (this.audioTrack) {
  11978. timelineStartPts = this.audioTrack.timelineStartInfo.pts;
  11979. AUDIO_PROPERTIES.forEach(function(prop) {
  11980. event.info[prop] = this.audioTrack[prop];
  11981. }, this);
  11982. }
  11983. if (this.pendingTracks.length === 1) {
  11984. event.type = this.pendingTracks[0].type;
  11985. } else {
  11986. event.type = 'combined';
  11987. }
  11988. this.emittedTracks += this.pendingTracks.length;
  11989. initSegment = mp4.initSegment(this.pendingTracks);
  11990. // Create a new typed array to hold the init segment
  11991. event.initSegment = new Uint8Array(initSegment.byteLength);
  11992. // Create an init segment containing a moov
  11993. // and track definitions
  11994. event.initSegment.set(initSegment);
  11995. // Create a new typed array to hold the moof+mdats
  11996. event.data = new Uint8Array(this.pendingBytes);
  11997. // Append each moof+mdat (one per track) together
  11998. for (i = 0; i < this.pendingBoxes.length; i++) {
  11999. event.data.set(this.pendingBoxes[i], offset);
  12000. offset += this.pendingBoxes[i].byteLength;
  12001. }
  12002. // Translate caption PTS times into second offsets into the
  12003. // video timeline for the segment
  12004. for (i = 0; i < this.pendingCaptions.length; i++) {
  12005. caption = this.pendingCaptions[i];
  12006. caption.startTime = (caption.startPts - timelineStartPts);
  12007. caption.startTime /= 90e3;
  12008. caption.endTime = (caption.endPts - timelineStartPts);
  12009. caption.endTime /= 90e3;
  12010. event.captions.push(caption);
  12011. }
  12012. // Translate ID3 frame PTS times into second offsets into the
  12013. // video timeline for the segment
  12014. for (i = 0; i < this.pendingMetadata.length; i++) {
  12015. id3 = this.pendingMetadata[i];
  12016. id3.cueTime = (id3.pts - timelineStartPts);
  12017. id3.cueTime /= 90e3;
  12018. event.metadata.push(id3);
  12019. }
  12020. // We add this to every single emitted segment even though we only need
  12021. // it for the first
  12022. event.metadata.dispatchType = this.metadataStream.dispatchType;
  12023. // Reset stream state
  12024. this.pendingTracks.length = 0;
  12025. this.videoTrack = null;
  12026. this.pendingBoxes.length = 0;
  12027. this.pendingCaptions.length = 0;
  12028. this.pendingBytes = 0;
  12029. this.pendingMetadata.length = 0;
  12030. // Emit the built segment
  12031. this.trigger('data', event);
  12032. // Only emit `done` if all tracks have been flushed and emitted
  12033. if (this.emittedTracks >= this.numberOfTracks) {
  12034. this.trigger('done');
  12035. this.emittedTracks = 0;
  12036. }
  12037. };
  12038. /**
  12039. * A Stream that expects MP2T binary data as input and produces
  12040. * corresponding media segments, suitable for use with Media Source
  12041. * Extension (MSE) implementations that support the ISO BMFF byte
  12042. * stream format, like Chrome.
  12043. */
  12044. Transmuxer = function(options) {
  12045. var
  12046. self = this,
  12047. hasFlushed = true,
  12048. videoTrack,
  12049. audioTrack;
  12050. Transmuxer.prototype.init.call(this);
  12051. options = options || {};
  12052. this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
  12053. this.transmuxPipeline_ = {};
  12054. this.setupAacPipeline = function() {
  12055. var pipeline = {};
  12056. this.transmuxPipeline_ = pipeline;
  12057. pipeline.type = 'aac';
  12058. pipeline.metadataStream = new m2ts.MetadataStream();
  12059. // set up the parsing pipeline
  12060. pipeline.aacStream = new AacStream();
  12061. pipeline.audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
  12062. pipeline.timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
  12063. pipeline.adtsStream = new AdtsStream();
  12064. pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
  12065. pipeline.headOfPipeline = pipeline.aacStream;
  12066. pipeline.aacStream
  12067. .pipe(pipeline.audioTimestampRolloverStream)
  12068. .pipe(pipeline.adtsStream);
  12069. pipeline.aacStream
  12070. .pipe(pipeline.timedMetadataTimestampRolloverStream)
  12071. .pipe(pipeline.metadataStream)
  12072. .pipe(pipeline.coalesceStream);
  12073. pipeline.metadataStream.on('timestamp', function(frame) {
  12074. pipeline.aacStream.setTimestamp(frame.timeStamp);
  12075. });
  12076. pipeline.aacStream.on('data', function(data) {
  12077. if (data.type === 'timed-metadata' && !pipeline.audioSegmentStream) {
  12078. audioTrack = audioTrack || {
  12079. timelineStartInfo: {
  12080. baseMediaDecodeTime: self.baseMediaDecodeTime
  12081. },
  12082. codec: 'adts',
  12083. type: 'audio'
  12084. };
  12085. // hook up the audio segment stream to the first track with aac data
  12086. pipeline.coalesceStream.numberOfTracks++;
  12087. pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack);
  12088. // Set up the final part of the audio pipeline
  12089. pipeline.adtsStream
  12090. .pipe(pipeline.audioSegmentStream)
  12091. .pipe(pipeline.coalesceStream);
  12092. }
  12093. });
  12094. // Re-emit any data coming from the coalesce stream to the outside world
  12095. pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
  12096. // Let the consumer know we have finished flushing the entire pipeline
  12097. pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
  12098. };
  12099. this.setupTsPipeline = function() {
  12100. var pipeline = {};
  12101. this.transmuxPipeline_ = pipeline;
  12102. pipeline.type = 'ts';
  12103. pipeline.metadataStream = new m2ts.MetadataStream();
  12104. // set up the parsing pipeline
  12105. pipeline.packetStream = new m2ts.TransportPacketStream();
  12106. pipeline.parseStream = new m2ts.TransportParseStream();
  12107. pipeline.elementaryStream = new m2ts.ElementaryStream();
  12108. pipeline.videoTimestampRolloverStream = new m2ts.TimestampRolloverStream('video');
  12109. pipeline.audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
  12110. pipeline.timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
  12111. pipeline.adtsStream = new AdtsStream();
  12112. pipeline.h264Stream = new H264Stream();
  12113. pipeline.captionStream = new m2ts.CaptionStream();
  12114. pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
  12115. pipeline.headOfPipeline = pipeline.packetStream;
  12116. // disassemble MPEG2-TS packets into elementary streams
  12117. pipeline.packetStream
  12118. .pipe(pipeline.parseStream)
  12119. .pipe(pipeline.elementaryStream);
  12120. // !!THIS ORDER IS IMPORTANT!!
  12121. // demux the streams
  12122. pipeline.elementaryStream
  12123. .pipe(pipeline.videoTimestampRolloverStream)
  12124. .pipe(pipeline.h264Stream);
  12125. pipeline.elementaryStream
  12126. .pipe(pipeline.audioTimestampRolloverStream)
  12127. .pipe(pipeline.adtsStream);
  12128. pipeline.elementaryStream
  12129. .pipe(pipeline.timedMetadataTimestampRolloverStream)
  12130. .pipe(pipeline.metadataStream)
  12131. .pipe(pipeline.coalesceStream);
  12132. // Hook up CEA-608/708 caption stream
  12133. pipeline.h264Stream.pipe(pipeline.captionStream)
  12134. .pipe(pipeline.coalesceStream);
  12135. pipeline.elementaryStream.on('data', function(data) {
  12136. var i;
  12137. if (data.type === 'metadata') {
  12138. i = data.tracks.length;
  12139. // scan the tracks listed in the metadata
  12140. while (i--) {
  12141. if (!videoTrack && data.tracks[i].type === 'video') {
  12142. videoTrack = data.tracks[i];
  12143. videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
  12144. } else if (!audioTrack && data.tracks[i].type === 'audio') {
  12145. audioTrack = data.tracks[i];
  12146. audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
  12147. }
  12148. }
  12149. // hook up the video segment stream to the first track with h264 data
  12150. if (videoTrack && !pipeline.videoSegmentStream) {
  12151. pipeline.coalesceStream.numberOfTracks++;
  12152. pipeline.videoSegmentStream = new VideoSegmentStream(videoTrack);
  12153. pipeline.videoSegmentStream.on('timelineStartInfo', function(timelineStartInfo) {
  12154. // When video emits timelineStartInfo data after a flush, we forward that
  12155. // info to the AudioSegmentStream, if it exists, because video timeline
  12156. // data takes precedence.
  12157. if (audioTrack) {
  12158. audioTrack.timelineStartInfo = timelineStartInfo;
  12159. // On the first segment we trim AAC frames that exist before the
  12160. // very earliest DTS we have seen in video because Chrome will
  12161. // interpret any video track with a baseMediaDecodeTime that is
  12162. // non-zero as a gap.
  12163. pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts);
  12164. }
  12165. });
  12166. pipeline.videoSegmentStream.on('baseMediaDecodeTime', function(baseMediaDecodeTime) {
  12167. if (audioTrack) {
  12168. pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
  12169. }
  12170. });
  12171. // Set up the final part of the video pipeline
  12172. pipeline.h264Stream
  12173. .pipe(pipeline.videoSegmentStream)
  12174. .pipe(pipeline.coalesceStream);
  12175. }
  12176. if (audioTrack && !pipeline.audioSegmentStream) {
  12177. // hook up the audio segment stream to the first track with aac data
  12178. pipeline.coalesceStream.numberOfTracks++;
  12179. pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack);
  12180. // Set up the final part of the audio pipeline
  12181. pipeline.adtsStream
  12182. .pipe(pipeline.audioSegmentStream)
  12183. .pipe(pipeline.coalesceStream);
  12184. }
  12185. }
  12186. });
  12187. // Re-emit any data coming from the coalesce stream to the outside world
  12188. pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
  12189. // Let the consumer know we have finished flushing the entire pipeline
  12190. pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
  12191. };
  12192. // hook up the segment streams once track metadata is delivered
  12193. this.setBaseMediaDecodeTime = function(baseMediaDecodeTime) {
  12194. var pipeline = this.transmuxPipeline_;
  12195. this.baseMediaDecodeTime = baseMediaDecodeTime;
  12196. if (audioTrack) {
  12197. audioTrack.timelineStartInfo.dts = undefined;
  12198. audioTrack.timelineStartInfo.pts = undefined;
  12199. clearDtsInfo(audioTrack);
  12200. audioTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
  12201. if (pipeline.audioTimestampRolloverStream) {
  12202. pipeline.audioTimestampRolloverStream.discontinuity();
  12203. }
  12204. }
  12205. if (videoTrack) {
  12206. if (pipeline.videoSegmentStream) {
  12207. pipeline.videoSegmentStream.gopCache_ = [];
  12208. pipeline.videoTimestampRolloverStream.discontinuity();
  12209. }
  12210. videoTrack.timelineStartInfo.dts = undefined;
  12211. videoTrack.timelineStartInfo.pts = undefined;
  12212. clearDtsInfo(videoTrack);
  12213. videoTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
  12214. }
  12215. if (pipeline.timedMetadataTimestampRolloverStream) {
  12216. pipeline.timedMetadataTimestampRolloverStream.discontinuity();
  12217. }
  12218. };
  12219. this.setAudioAppendStart = function(timestamp) {
  12220. if (audioTrack) {
  12221. this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
  12222. }
  12223. };
  12224. // feed incoming data to the front of the parsing pipeline
  12225. this.push = function(data) {
  12226. if (hasFlushed) {
  12227. var isAac = isLikelyAacData(data);
  12228. if (isAac && this.transmuxPipeline_.type !== 'aac') {
  12229. this.setupAacPipeline();
  12230. } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
  12231. this.setupTsPipeline();
  12232. }
  12233. hasFlushed = false;
  12234. }
  12235. this.transmuxPipeline_.headOfPipeline.push(data);
  12236. };
  12237. // flush any buffered data
  12238. this.flush = function() {
  12239. hasFlushed = true;
  12240. // Start at the top of the pipeline and flush all pending work
  12241. this.transmuxPipeline_.headOfPipeline.flush();
  12242. };
  12243. };
  12244. Transmuxer.prototype = new Stream();
  12245. module.exports = {
  12246. Transmuxer: Transmuxer,
  12247. VideoSegmentStream: VideoSegmentStream,
  12248. AudioSegmentStream: AudioSegmentStream,
  12249. AUDIO_PROPERTIES: AUDIO_PROPERTIES,
  12250. VIDEO_PROPERTIES: VIDEO_PROPERTIES
  12251. };
  12252. },{"../aac":36,"../codecs/adts.js":38,"../codecs/h264":39,"../data/silence":40,"../m2ts/m2ts.js":48,"../utils/clock":58,"../utils/stream.js":60,"./mp4-generator.js":54}],57:[function(require,module,exports){
  12253. /**
  12254. * mux.js
  12255. *
  12256. * Copyright (c) 2016 Brightcove
  12257. * All rights reserved.
  12258. *
  12259. * Parse mpeg2 transport stream packets to extract basic timing information
  12260. */
  12261. 'use strict';
  12262. var StreamTypes = require('../m2ts/stream-types.js');
  12263. var handleRollover = require('../m2ts/timestamp-rollover-stream.js').handleRollover;
  12264. var probe = {};
  12265. probe.ts = require('../m2ts/probe.js');
  12266. probe.aac = require('../aac/probe.js');
  12267. var
  12268. PES_TIMESCALE = 90000,
  12269. MP2T_PACKET_LENGTH = 188, // bytes
  12270. SYNC_BYTE = 0x47;
  12271. var isLikelyAacData = function(data) {
  12272. if ((data[0] === 'I'.charCodeAt(0)) &&
  12273. (data[1] === 'D'.charCodeAt(0)) &&
  12274. (data[2] === '3'.charCodeAt(0))) {
  12275. return true;
  12276. }
  12277. return false;
  12278. };
  12279. /**
  12280. * walks through segment data looking for pat and pmt packets to parse out
  12281. * program map table information
  12282. */
  12283. var parsePsi_ = function(bytes, pmt) {
  12284. var
  12285. startIndex = 0,
  12286. endIndex = MP2T_PACKET_LENGTH,
  12287. packet, type;
  12288. while (endIndex < bytes.byteLength) {
  12289. // Look for a pair of start and end sync bytes in the data..
  12290. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  12291. // We found a packet
  12292. packet = bytes.subarray(startIndex, endIndex);
  12293. type = probe.ts.parseType(packet, pmt.pid);
  12294. switch (type) {
  12295. case 'pat':
  12296. if (!pmt.pid) {
  12297. pmt.pid = probe.ts.parsePat(packet);
  12298. }
  12299. break;
  12300. case 'pmt':
  12301. if (!pmt.table) {
  12302. pmt.table = probe.ts.parsePmt(packet);
  12303. }
  12304. break;
  12305. default:
  12306. break;
  12307. }
  12308. // Found the pat and pmt, we can stop walking the segment
  12309. if (pmt.pid && pmt.table) {
  12310. return;
  12311. }
  12312. startIndex += MP2T_PACKET_LENGTH;
  12313. endIndex += MP2T_PACKET_LENGTH;
  12314. continue;
  12315. }
  12316. // If we get here, we have somehow become de-synchronized and we need to step
  12317. // forward one byte at a time until we find a pair of sync bytes that denote
  12318. // a packet
  12319. startIndex++;
  12320. endIndex++;
  12321. }
  12322. };
  12323. /**
  12324. * walks through the segment data from the start and end to get timing information
  12325. * for the first and last audio pes packets
  12326. */
  12327. var parseAudioPes_ = function(bytes, pmt, result) {
  12328. var
  12329. startIndex = 0,
  12330. endIndex = MP2T_PACKET_LENGTH,
  12331. packet, type, pesType, pusi, parsed;
  12332. var endLoop = false;
  12333. // Start walking from start of segment to get first audio packet
  12334. while (endIndex < bytes.byteLength) {
  12335. // Look for a pair of start and end sync bytes in the data..
  12336. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  12337. // We found a packet
  12338. packet = bytes.subarray(startIndex, endIndex);
  12339. type = probe.ts.parseType(packet, pmt.pid);
  12340. switch (type) {
  12341. case 'pes':
  12342. pesType = probe.ts.parsePesType(packet, pmt.table);
  12343. pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
  12344. if (pesType === 'audio' && pusi) {
  12345. parsed = probe.ts.parsePesTime(packet);
  12346. parsed.type = 'audio';
  12347. result.audio.push(parsed);
  12348. endLoop = true;
  12349. }
  12350. break;
  12351. default:
  12352. break;
  12353. }
  12354. if (endLoop) {
  12355. break;
  12356. }
  12357. startIndex += MP2T_PACKET_LENGTH;
  12358. endIndex += MP2T_PACKET_LENGTH;
  12359. continue;
  12360. }
  12361. // If we get here, we have somehow become de-synchronized and we need to step
  12362. // forward one byte at a time until we find a pair of sync bytes that denote
  12363. // a packet
  12364. startIndex++;
  12365. endIndex++;
  12366. }
  12367. // Start walking from end of segment to get last audio packet
  12368. endIndex = bytes.byteLength;
  12369. startIndex = endIndex - MP2T_PACKET_LENGTH;
  12370. endLoop = false;
  12371. while (startIndex >= 0) {
  12372. // Look for a pair of start and end sync bytes in the data..
  12373. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  12374. // We found a packet
  12375. packet = bytes.subarray(startIndex, endIndex);
  12376. type = probe.ts.parseType(packet, pmt.pid);
  12377. switch (type) {
  12378. case 'pes':
  12379. pesType = probe.ts.parsePesType(packet, pmt.table);
  12380. pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
  12381. if (pesType === 'audio' && pusi) {
  12382. parsed = probe.ts.parsePesTime(packet);
  12383. parsed.type = 'audio';
  12384. result.audio.push(parsed);
  12385. endLoop = true;
  12386. }
  12387. break;
  12388. default:
  12389. break;
  12390. }
  12391. if (endLoop) {
  12392. break;
  12393. }
  12394. startIndex -= MP2T_PACKET_LENGTH;
  12395. endIndex -= MP2T_PACKET_LENGTH;
  12396. continue;
  12397. }
  12398. // If we get here, we have somehow become de-synchronized and we need to step
  12399. // forward one byte at a time until we find a pair of sync bytes that denote
  12400. // a packet
  12401. startIndex--;
  12402. endIndex--;
  12403. }
  12404. };
  12405. /**
  12406. * walks through the segment data from the start and end to get timing information
  12407. * for the first and last video pes packets as well as timing information for the first
  12408. * key frame.
  12409. */
  12410. var parseVideoPes_ = function(bytes, pmt, result) {
  12411. var
  12412. startIndex = 0,
  12413. endIndex = MP2T_PACKET_LENGTH,
  12414. packet, type, pesType, pusi, parsed, frame, i, pes;
  12415. var endLoop = false;
  12416. var currentFrame = {
  12417. data: [],
  12418. size: 0
  12419. };
  12420. // Start walking from start of segment to get first video packet
  12421. while (endIndex < bytes.byteLength) {
  12422. // Look for a pair of start and end sync bytes in the data..
  12423. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  12424. // We found a packet
  12425. packet = bytes.subarray(startIndex, endIndex);
  12426. type = probe.ts.parseType(packet, pmt.pid);
  12427. switch (type) {
  12428. case 'pes':
  12429. pesType = probe.ts.parsePesType(packet, pmt.table);
  12430. pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
  12431. if (pesType === 'video') {
  12432. if (pusi && !endLoop) {
  12433. parsed = probe.ts.parsePesTime(packet);
  12434. parsed.type = 'video';
  12435. result.video.push(parsed);
  12436. endLoop = true;
  12437. }
  12438. if (!result.firstKeyFrame) {
  12439. if (pusi) {
  12440. if (currentFrame.size !== 0) {
  12441. frame = new Uint8Array(currentFrame.size);
  12442. i = 0;
  12443. while (currentFrame.data.length) {
  12444. pes = currentFrame.data.shift();
  12445. frame.set(pes, i);
  12446. i += pes.byteLength;
  12447. }
  12448. if (probe.ts.videoPacketContainsKeyFrame(frame)) {
  12449. result.firstKeyFrame = probe.ts.parsePesTime(frame);
  12450. result.firstKeyFrame.type = 'video';
  12451. }
  12452. currentFrame.size = 0;
  12453. }
  12454. }
  12455. currentFrame.data.push(packet);
  12456. currentFrame.size += packet.byteLength;
  12457. }
  12458. }
  12459. break;
  12460. default:
  12461. break;
  12462. }
  12463. if (endLoop && result.firstKeyFrame) {
  12464. break;
  12465. }
  12466. startIndex += MP2T_PACKET_LENGTH;
  12467. endIndex += MP2T_PACKET_LENGTH;
  12468. continue;
  12469. }
  12470. // If we get here, we have somehow become de-synchronized and we need to step
  12471. // forward one byte at a time until we find a pair of sync bytes that denote
  12472. // a packet
  12473. startIndex++;
  12474. endIndex++;
  12475. }
  12476. // Start walking from end of segment to get last video packet
  12477. endIndex = bytes.byteLength;
  12478. startIndex = endIndex - MP2T_PACKET_LENGTH;
  12479. endLoop = false;
  12480. while (startIndex >= 0) {
  12481. // Look for a pair of start and end sync bytes in the data..
  12482. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  12483. // We found a packet
  12484. packet = bytes.subarray(startIndex, endIndex);
  12485. type = probe.ts.parseType(packet, pmt.pid);
  12486. switch (type) {
  12487. case 'pes':
  12488. pesType = probe.ts.parsePesType(packet, pmt.table);
  12489. pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
  12490. if (pesType === 'video' && pusi) {
  12491. parsed = probe.ts.parsePesTime(packet);
  12492. parsed.type = 'video';
  12493. result.video.push(parsed);
  12494. endLoop = true;
  12495. }
  12496. break;
  12497. default:
  12498. break;
  12499. }
  12500. if (endLoop) {
  12501. break;
  12502. }
  12503. startIndex -= MP2T_PACKET_LENGTH;
  12504. endIndex -= MP2T_PACKET_LENGTH;
  12505. continue;
  12506. }
  12507. // If we get here, we have somehow become de-synchronized and we need to step
  12508. // forward one byte at a time until we find a pair of sync bytes that denote
  12509. // a packet
  12510. startIndex--;
  12511. endIndex--;
  12512. }
  12513. };
  12514. /**
  12515. * Adjusts the timestamp information for the segment to account for
  12516. * rollover and convert to seconds based on pes packet timescale (90khz clock)
  12517. */
  12518. var adjustTimestamp_ = function(segmentInfo, baseTimestamp) {
  12519. if (segmentInfo.audio && segmentInfo.audio.length) {
  12520. var audioBaseTimestamp = baseTimestamp;
  12521. if (typeof audioBaseTimestamp === 'undefined') {
  12522. audioBaseTimestamp = segmentInfo.audio[0].dts;
  12523. }
  12524. segmentInfo.audio.forEach(function(info) {
  12525. info.dts = handleRollover(info.dts, audioBaseTimestamp);
  12526. info.pts = handleRollover(info.pts, audioBaseTimestamp);
  12527. // time in seconds
  12528. info.dtsTime = info.dts / PES_TIMESCALE;
  12529. info.ptsTime = info.pts / PES_TIMESCALE;
  12530. });
  12531. }
  12532. if (segmentInfo.video && segmentInfo.video.length) {
  12533. var videoBaseTimestamp = baseTimestamp;
  12534. if (typeof videoBaseTimestamp === 'undefined') {
  12535. videoBaseTimestamp = segmentInfo.video[0].dts;
  12536. }
  12537. segmentInfo.video.forEach(function(info) {
  12538. info.dts = handleRollover(info.dts, videoBaseTimestamp);
  12539. info.pts = handleRollover(info.pts, videoBaseTimestamp);
  12540. // time in seconds
  12541. info.dtsTime = info.dts / PES_TIMESCALE;
  12542. info.ptsTime = info.pts / PES_TIMESCALE;
  12543. });
  12544. if (segmentInfo.firstKeyFrame) {
  12545. var frame = segmentInfo.firstKeyFrame;
  12546. frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
  12547. frame.pts = handleRollover(frame.pts, videoBaseTimestamp);
  12548. // time in seconds
  12549. frame.dtsTime = frame.dts / PES_TIMESCALE;
  12550. frame.ptsTime = frame.dts / PES_TIMESCALE;
  12551. }
  12552. }
  12553. };
  12554. /**
  12555. * inspects the aac data stream for start and end time information
  12556. */
  12557. var inspectAac_ = function(bytes) {
  12558. var
  12559. endLoop = false,
  12560. audioCount = 0,
  12561. sampleRate = null,
  12562. timestamp = null,
  12563. frameSize = 0,
  12564. byteIndex = 0,
  12565. packet;
  12566. while (bytes.length - byteIndex >= 3) {
  12567. var type = probe.aac.parseType(bytes, byteIndex);
  12568. switch (type) {
  12569. case 'timed-metadata':
  12570. // Exit early because we don't have enough to parse
  12571. // the ID3 tag header
  12572. if (bytes.length - byteIndex < 10) {
  12573. endLoop = true;
  12574. break;
  12575. }
  12576. frameSize = probe.aac.parseId3TagSize(bytes, byteIndex);
  12577. // Exit early if we don't have enough in the buffer
  12578. // to emit a full packet
  12579. if (frameSize > bytes.length) {
  12580. endLoop = true;
  12581. break;
  12582. }
  12583. if (timestamp === null) {
  12584. packet = bytes.subarray(byteIndex, byteIndex + frameSize);
  12585. timestamp = probe.aac.parseAacTimestamp(packet);
  12586. }
  12587. byteIndex += frameSize;
  12588. break;
  12589. case 'audio':
  12590. // Exit early because we don't have enough to parse
  12591. // the ADTS frame header
  12592. if (bytes.length - byteIndex < 7) {
  12593. endLoop = true;
  12594. break;
  12595. }
  12596. frameSize = probe.aac.parseAdtsSize(bytes, byteIndex);
  12597. // Exit early if we don't have enough in the buffer
  12598. // to emit a full packet
  12599. if (frameSize > bytes.length) {
  12600. endLoop = true;
  12601. break;
  12602. }
  12603. if (sampleRate === null) {
  12604. packet = bytes.subarray(byteIndex, byteIndex + frameSize);
  12605. sampleRate = probe.aac.parseSampleRate(packet);
  12606. }
  12607. audioCount++;
  12608. byteIndex += frameSize;
  12609. break;
  12610. default:
  12611. byteIndex++;
  12612. break;
  12613. }
  12614. if (endLoop) {
  12615. return null;
  12616. }
  12617. }
  12618. if (sampleRate === null || timestamp === null) {
  12619. return null;
  12620. }
  12621. var audioTimescale = PES_TIMESCALE / sampleRate;
  12622. var result = {
  12623. audio: [
  12624. {
  12625. type: 'audio',
  12626. dts: timestamp,
  12627. pts: timestamp
  12628. },
  12629. {
  12630. type: 'audio',
  12631. dts: timestamp + (audioCount * 1024 * audioTimescale),
  12632. pts: timestamp + (audioCount * 1024 * audioTimescale)
  12633. }
  12634. ]
  12635. };
  12636. return result;
  12637. };
  12638. /**
  12639. * inspects the transport stream segment data for start and end time information
  12640. * of the audio and video tracks (when present) as well as the first key frame's
  12641. * start time.
  12642. */
  12643. var inspectTs_ = function(bytes) {
  12644. var pmt = {
  12645. pid: null,
  12646. table: null
  12647. };
  12648. var result = {};
  12649. parsePsi_(bytes, pmt);
  12650. for (var pid in pmt.table) {
  12651. if (pmt.table.hasOwnProperty(pid)) {
  12652. var type = pmt.table[pid];
  12653. switch (type) {
  12654. case StreamTypes.H264_STREAM_TYPE:
  12655. result.video = [];
  12656. parseVideoPes_(bytes, pmt, result);
  12657. if (result.video.length === 0) {
  12658. delete result.video;
  12659. }
  12660. break;
  12661. case StreamTypes.ADTS_STREAM_TYPE:
  12662. result.audio = [];
  12663. parseAudioPes_(bytes, pmt, result);
  12664. if (result.audio.length === 0) {
  12665. delete result.audio;
  12666. }
  12667. break;
  12668. default:
  12669. break;
  12670. }
  12671. }
  12672. }
  12673. return result;
  12674. };
  12675. /**
  12676. * Inspects segment byte data and returns an object with start and end timing information
  12677. *
  12678. * @param {Uint8Array} bytes The segment byte data
  12679. * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
  12680. * timestamps for rollover. This value must be in 90khz clock.
  12681. * @return {Object} Object containing start and end frame timing info of segment.
  12682. */
  12683. var inspect = function(bytes, baseTimestamp) {
  12684. var isAacData = isLikelyAacData(bytes);
  12685. var result;
  12686. if (isAacData) {
  12687. result = inspectAac_(bytes);
  12688. } else {
  12689. result = inspectTs_(bytes);
  12690. }
  12691. if (!result || (!result.audio && !result.video)) {
  12692. return null;
  12693. }
  12694. adjustTimestamp_(result, baseTimestamp);
  12695. return result;
  12696. };
  12697. module.exports = {
  12698. inspect: inspect
  12699. };
  12700. },{"../aac/probe.js":37,"../m2ts/probe.js":50,"../m2ts/stream-types.js":51,"../m2ts/timestamp-rollover-stream.js":52}],58:[function(require,module,exports){
  12701. var
  12702. ONE_SECOND_IN_TS = 90000, // 90kHz clock
  12703. secondsToVideoTs,
  12704. secondsToAudioTs,
  12705. videoTsToSeconds,
  12706. audioTsToSeconds,
  12707. audioTsToVideoTs,
  12708. videoTsToAudioTs;
  12709. secondsToVideoTs = function(seconds) {
  12710. return seconds * ONE_SECOND_IN_TS;
  12711. };
  12712. secondsToAudioTs = function(seconds, sampleRate) {
  12713. return seconds * sampleRate;
  12714. };
  12715. videoTsToSeconds = function(timestamp) {
  12716. return timestamp / ONE_SECOND_IN_TS;
  12717. };
  12718. audioTsToSeconds = function(timestamp, sampleRate) {
  12719. return timestamp / sampleRate;
  12720. };
  12721. audioTsToVideoTs = function(timestamp, sampleRate) {
  12722. return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
  12723. };
  12724. videoTsToAudioTs = function(timestamp, sampleRate) {
  12725. return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
  12726. };
  12727. module.exports = {
  12728. secondsToVideoTs: secondsToVideoTs,
  12729. secondsToAudioTs: secondsToAudioTs,
  12730. videoTsToSeconds: videoTsToSeconds,
  12731. audioTsToSeconds: audioTsToSeconds,
  12732. audioTsToVideoTs: audioTsToVideoTs,
  12733. videoTsToAudioTs: videoTsToAudioTs
  12734. };
  12735. },{}],59:[function(require,module,exports){
  12736. 'use strict';
  12737. var ExpGolomb;
  12738. /**
  12739. * Parser for exponential Golomb codes, a variable-bitwidth number encoding
  12740. * scheme used by h264.
  12741. */
  12742. ExpGolomb = function(workingData) {
  12743. var
  12744. // the number of bytes left to examine in workingData
  12745. workingBytesAvailable = workingData.byteLength,
  12746. // the current word being examined
  12747. workingWord = 0, // :uint
  12748. // the number of bits left to examine in the current word
  12749. workingBitsAvailable = 0; // :uint;
  12750. // ():uint
  12751. this.length = function() {
  12752. return (8 * workingBytesAvailable);
  12753. };
  12754. // ():uint
  12755. this.bitsAvailable = function() {
  12756. return (8 * workingBytesAvailable) + workingBitsAvailable;
  12757. };
  12758. // ():void
  12759. this.loadWord = function() {
  12760. var
  12761. position = workingData.byteLength - workingBytesAvailable,
  12762. workingBytes = new Uint8Array(4),
  12763. availableBytes = Math.min(4, workingBytesAvailable);
  12764. if (availableBytes === 0) {
  12765. throw new Error('no bytes available');
  12766. }
  12767. workingBytes.set(workingData.subarray(position,
  12768. position + availableBytes));
  12769. workingWord = new DataView(workingBytes.buffer).getUint32(0);
  12770. // track the amount of workingData that has been processed
  12771. workingBitsAvailable = availableBytes * 8;
  12772. workingBytesAvailable -= availableBytes;
  12773. };
  12774. // (count:int):void
  12775. this.skipBits = function(count) {
  12776. var skipBytes; // :int
  12777. if (workingBitsAvailable > count) {
  12778. workingWord <<= count;
  12779. workingBitsAvailable -= count;
  12780. } else {
  12781. count -= workingBitsAvailable;
  12782. skipBytes = Math.floor(count / 8);
  12783. count -= (skipBytes * 8);
  12784. workingBytesAvailable -= skipBytes;
  12785. this.loadWord();
  12786. workingWord <<= count;
  12787. workingBitsAvailable -= count;
  12788. }
  12789. };
  12790. // (size:int):uint
  12791. this.readBits = function(size) {
  12792. var
  12793. bits = Math.min(workingBitsAvailable, size), // :uint
  12794. valu = workingWord >>> (32 - bits); // :uint
  12795. // if size > 31, handle error
  12796. workingBitsAvailable -= bits;
  12797. if (workingBitsAvailable > 0) {
  12798. workingWord <<= bits;
  12799. } else if (workingBytesAvailable > 0) {
  12800. this.loadWord();
  12801. }
  12802. bits = size - bits;
  12803. if (bits > 0) {
  12804. return valu << bits | this.readBits(bits);
  12805. }
  12806. return valu;
  12807. };
  12808. // ():uint
  12809. this.skipLeadingZeros = function() {
  12810. var leadingZeroCount; // :uint
  12811. for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
  12812. if ((workingWord & (0x80000000 >>> leadingZeroCount)) !== 0) {
  12813. // the first bit of working word is 1
  12814. workingWord <<= leadingZeroCount;
  12815. workingBitsAvailable -= leadingZeroCount;
  12816. return leadingZeroCount;
  12817. }
  12818. }
  12819. // we exhausted workingWord and still have not found a 1
  12820. this.loadWord();
  12821. return leadingZeroCount + this.skipLeadingZeros();
  12822. };
  12823. // ():void
  12824. this.skipUnsignedExpGolomb = function() {
  12825. this.skipBits(1 + this.skipLeadingZeros());
  12826. };
  12827. // ():void
  12828. this.skipExpGolomb = function() {
  12829. this.skipBits(1 + this.skipLeadingZeros());
  12830. };
  12831. // ():uint
  12832. this.readUnsignedExpGolomb = function() {
  12833. var clz = this.skipLeadingZeros(); // :uint
  12834. return this.readBits(clz + 1) - 1;
  12835. };
  12836. // ():int
  12837. this.readExpGolomb = function() {
  12838. var valu = this.readUnsignedExpGolomb(); // :int
  12839. if (0x01 & valu) {
  12840. // the number is odd if the low order bit is set
  12841. return (1 + valu) >>> 1; // add 1 to make it even, and divide by 2
  12842. }
  12843. return -1 * (valu >>> 1); // divide by two then make it negative
  12844. };
  12845. // Some convenience functions
  12846. // :Boolean
  12847. this.readBoolean = function() {
  12848. return this.readBits(1) === 1;
  12849. };
  12850. // ():int
  12851. this.readUnsignedByte = function() {
  12852. return this.readBits(8);
  12853. };
  12854. this.loadWord();
  12855. };
  12856. module.exports = ExpGolomb;
  12857. },{}],60:[function(require,module,exports){
  12858. /**
  12859. * mux.js
  12860. *
  12861. * Copyright (c) 2014 Brightcove
  12862. * All rights reserved.
  12863. *
  12864. * A lightweight readable stream implemention that handles event dispatching.
  12865. * Objects that inherit from streams should call init in their constructors.
  12866. */
  12867. 'use strict';
  12868. var Stream = function() {
  12869. this.init = function() {
  12870. var listeners = {};
  12871. /**
  12872. * Add a listener for a specified event type.
  12873. * @param type {string} the event name
  12874. * @param listener {function} the callback to be invoked when an event of
  12875. * the specified type occurs
  12876. */
  12877. this.on = function(type, listener) {
  12878. if (!listeners[type]) {
  12879. listeners[type] = [];
  12880. }
  12881. listeners[type] = listeners[type].concat(listener);
  12882. };
  12883. /**
  12884. * Remove a listener for a specified event type.
  12885. * @param type {string} the event name
  12886. * @param listener {function} a function previously registered for this
  12887. * type of event through `on`
  12888. */
  12889. this.off = function(type, listener) {
  12890. var index;
  12891. if (!listeners[type]) {
  12892. return false;
  12893. }
  12894. index = listeners[type].indexOf(listener);
  12895. listeners[type] = listeners[type].slice();
  12896. listeners[type].splice(index, 1);
  12897. return index > -1;
  12898. };
  12899. /**
  12900. * Trigger an event of the specified type on this stream. Any additional
  12901. * arguments to this function are passed as parameters to event listeners.
  12902. * @param type {string} the event name
  12903. */
  12904. this.trigger = function(type) {
  12905. var callbacks, i, length, args;
  12906. callbacks = listeners[type];
  12907. if (!callbacks) {
  12908. return;
  12909. }
  12910. // Slicing the arguments on every invocation of this method
  12911. // can add a significant amount of overhead. Avoid the
  12912. // intermediate object creation for the common case of a
  12913. // single callback argument
  12914. if (arguments.length === 2) {
  12915. length = callbacks.length;
  12916. for (i = 0; i < length; ++i) {
  12917. callbacks[i].call(this, arguments[1]);
  12918. }
  12919. } else {
  12920. args = [];
  12921. i = arguments.length;
  12922. for (i = 1; i < arguments.length; ++i) {
  12923. args.push(arguments[i]);
  12924. }
  12925. length = callbacks.length;
  12926. for (i = 0; i < length; ++i) {
  12927. callbacks[i].apply(this, args);
  12928. }
  12929. }
  12930. };
  12931. /**
  12932. * Destroys the stream and cleans up.
  12933. */
  12934. this.dispose = function() {
  12935. listeners = {};
  12936. };
  12937. };
  12938. };
  12939. /**
  12940. * Forwards all `data` events on this stream to the destination stream. The
  12941. * destination stream should provide a method `push` to receive the data
  12942. * events as they arrive.
  12943. * @param destination {stream} the stream that will receive all `data` events
  12944. * @param autoFlush {boolean} if false, we will not call `flush` on the destination
  12945. * when the current stream emits a 'done' event
  12946. * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
  12947. */
  12948. Stream.prototype.pipe = function(destination) {
  12949. this.on('data', function(data) {
  12950. destination.push(data);
  12951. });
  12952. this.on('done', function(flushSource) {
  12953. destination.flush(flushSource);
  12954. });
  12955. return destination;
  12956. };
  12957. // Default stream functions that are expected to be overridden to perform
  12958. // actual work. These are provided by the prototype as a sort of no-op
  12959. // implementation so that we don't have to check for their existence in the
  12960. // `pipe` function above.
  12961. Stream.prototype.push = function(data) {
  12962. this.trigger('data', data);
  12963. };
  12964. Stream.prototype.flush = function(flushSource) {
  12965. this.trigger('done', flushSource);
  12966. };
  12967. module.exports = Stream;
  12968. },{}],61:[function(require,module,exports){
  12969. /* jshint ignore:start */
  12970. (function(root) {
  12971. /* jshint ignore:end */
  12972. var URLToolkit = {
  12973. // build an absolute URL from a relative one using the provided baseURL
  12974. // if relativeURL is an absolute URL it will be returned as is.
  12975. buildAbsoluteURL: function(baseURL, relativeURL) {
  12976. // remove any remaining space and CRLF
  12977. relativeURL = relativeURL.trim();
  12978. if (/^[a-z]+:/i.test(relativeURL)) {
  12979. // complete url, not relative
  12980. return relativeURL;
  12981. }
  12982. var relativeURLQuery = null;
  12983. var relativeURLHash = null;
  12984. var relativeURLHashSplit = /^([^#]*)(.*)$/.exec(relativeURL);
  12985. if (relativeURLHashSplit) {
  12986. relativeURLHash = relativeURLHashSplit[2];
  12987. relativeURL = relativeURLHashSplit[1];
  12988. }
  12989. var relativeURLQuerySplit = /^([^\?]*)(.*)$/.exec(relativeURL);
  12990. if (relativeURLQuerySplit) {
  12991. relativeURLQuery = relativeURLQuerySplit[2];
  12992. relativeURL = relativeURLQuerySplit[1];
  12993. }
  12994. var baseURLHashSplit = /^([^#]*)(.*)$/.exec(baseURL);
  12995. if (baseURLHashSplit) {
  12996. baseURL = baseURLHashSplit[1];
  12997. }
  12998. var baseURLQuerySplit = /^([^\?]*)(.*)$/.exec(baseURL);
  12999. if (baseURLQuerySplit) {
  13000. baseURL = baseURLQuerySplit[1];
  13001. }
  13002. var baseURLDomainSplit = /^(([a-z]+:)?\/\/[^:\/]+(:[0-9]+)?)?(\/?.*)$/i.exec(baseURL);
  13003. if (!baseURLDomainSplit) {
  13004. throw new Error('Error trying to parse base URL.');
  13005. }
  13006. // e.g. 'http:', 'https:', ''
  13007. var baseURLProtocol = baseURLDomainSplit[2] || '';
  13008. // e.g. 'http://example.com', '//example.com', ''
  13009. var baseURLProtocolDomain = baseURLDomainSplit[1] || '';
  13010. // e.g. '/a/b/c/playlist.m3u8', 'a/b/c/playlist.m3u8'
  13011. var baseURLPath = baseURLDomainSplit[4];
  13012. if (baseURLPath.indexOf('/') !== 0 && baseURLProtocolDomain !== '') {
  13013. // this handles a base url of http://example.com (missing last slash)
  13014. baseURLPath = '/'+baseURLPath;
  13015. }
  13016. var builtURL = null;
  13017. if (/^\/\//.test(relativeURL)) {
  13018. // relative url starts wth '//' so copy protocol (which may be '' if baseUrl didn't provide one)
  13019. builtURL = baseURLProtocol+'//'+URLToolkit.buildAbsolutePath('', relativeURL.substring(2));
  13020. }
  13021. else if (/^\//.test(relativeURL)) {
  13022. // relative url starts with '/' so start from root of domain
  13023. builtURL = baseURLProtocolDomain+'/'+URLToolkit.buildAbsolutePath('', relativeURL.substring(1));
  13024. }
  13025. else {
  13026. builtURL = URLToolkit.buildAbsolutePath(baseURLProtocolDomain+baseURLPath, relativeURL);
  13027. }
  13028. // put the query and hash parts back
  13029. if (relativeURLQuery) {
  13030. builtURL += relativeURLQuery;
  13031. }
  13032. if (relativeURLHash) {
  13033. builtURL += relativeURLHash;
  13034. }
  13035. return builtURL;
  13036. },
  13037. // build an absolute path using the provided basePath
  13038. // adapted from https://developer.mozilla.org/en-US/docs/Web/API/document/cookie#Using_relative_URLs_in_the_path_parameter
  13039. // this does not handle the case where relativePath is "/" or "//". These cases should be handled outside this.
  13040. buildAbsolutePath: function(basePath, relativePath) {
  13041. var sRelPath = relativePath;
  13042. var nUpLn, sDir = '', sPath = basePath.replace(/[^\/]*$/, sRelPath.replace(/(\/|^)(?:\.?\/+)+/g, '$1'));
  13043. for (var nEnd, nStart = 0; nEnd = sPath.indexOf('/../', nStart), nEnd > -1; nStart = nEnd + nUpLn) {
  13044. nUpLn = /^\/(?:\.\.\/)*/.exec(sPath.slice(nEnd))[0].length;
  13045. sDir = (sDir + sPath.substring(nStart, nEnd)).replace(new RegExp('(?:\\\/+[^\\\/]*){0,' + ((nUpLn - 1) / 3) + '}$'), '/');
  13046. }
  13047. return sDir + sPath.substr(nStart);
  13048. }
  13049. };
  13050. /* jshint ignore:start */
  13051. if(typeof exports === 'object' && typeof module === 'object')
  13052. module.exports = URLToolkit;
  13053. else if(typeof define === 'function' && define.amd)
  13054. define([], function() { return URLToolkit; });
  13055. else if(typeof exports === 'object')
  13056. exports["URLToolkit"] = URLToolkit;
  13057. else
  13058. root["URLToolkit"] = URLToolkit;
  13059. })(this);
  13060. /* jshint ignore:end */
  13061. },{}],62:[function(require,module,exports){
  13062. (function (global){
  13063. /**
  13064. * @file add-text-track-data.js
  13065. */
  13066. 'use strict';
  13067. Object.defineProperty(exports, '__esModule', {
  13068. value: true
  13069. });
  13070. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  13071. var _globalWindow = require('global/window');
  13072. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  13073. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  13074. var _videoJs2 = _interopRequireDefault(_videoJs);
  13075. /**
  13076. * Define properties on a cue for backwards compatability,
  13077. * but warn the user that the way that they are using it
  13078. * is depricated and will be removed at a later date.
  13079. *
  13080. * @param {Cue} cue the cue to add the properties on
  13081. * @private
  13082. */
  13083. var deprecateOldCue = function deprecateOldCue(cue) {
  13084. Object.defineProperties(cue.frame, {
  13085. id: {
  13086. get: function get() {
  13087. _videoJs2['default'].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
  13088. return cue.value.key;
  13089. }
  13090. },
  13091. value: {
  13092. get: function get() {
  13093. _videoJs2['default'].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
  13094. return cue.value.data;
  13095. }
  13096. },
  13097. privateData: {
  13098. get: function get() {
  13099. _videoJs2['default'].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
  13100. return cue.value.data;
  13101. }
  13102. }
  13103. });
  13104. };
  13105. var durationOfVideo = function durationOfVideo(duration) {
  13106. var dur = undefined;
  13107. if (isNaN(duration) || Math.abs(duration) === Infinity) {
  13108. dur = Number.MAX_VALUE;
  13109. } else {
  13110. dur = duration;
  13111. }
  13112. return dur;
  13113. };
  13114. /**
  13115. * Add text track data to a source handler given the captions and
  13116. * metadata from the buffer.
  13117. *
  13118. * @param {Object} sourceHandler the flash or virtual source buffer
  13119. * @param {Array} captionArray an array of caption data
  13120. * @param {Array} cue an array of meta data
  13121. * @private
  13122. */
  13123. var addTextTrackData = function addTextTrackData(sourceHandler, captionArray, metadataArray) {
  13124. var Cue = _globalWindow2['default'].WebKitDataCue || _globalWindow2['default'].VTTCue;
  13125. if (captionArray) {
  13126. captionArray.forEach(function (caption) {
  13127. this.inbandTextTrack_.addCue(new Cue(caption.startTime + this.timestampOffset, caption.endTime + this.timestampOffset, caption.text));
  13128. }, sourceHandler);
  13129. }
  13130. if (metadataArray) {
  13131. (function () {
  13132. var videoDuration = durationOfVideo(sourceHandler.mediaSource_.duration);
  13133. metadataArray.forEach(function (metadata) {
  13134. var time = metadata.cueTime + this.timestampOffset;
  13135. metadata.frames.forEach(function (frame) {
  13136. var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
  13137. cue.frame = frame;
  13138. cue.value = frame;
  13139. deprecateOldCue(cue);
  13140. this.metadataTrack_.addCue(cue);
  13141. }, this);
  13142. }, sourceHandler);
  13143. // Updating the metadeta cues so that
  13144. // the endTime of each cue is the startTime of the next cue
  13145. // the endTime of last cue is the duration of the video
  13146. if (sourceHandler.metadataTrack_ && sourceHandler.metadataTrack_.cues && sourceHandler.metadataTrack_.cues.length) {
  13147. (function () {
  13148. var cues = sourceHandler.metadataTrack_.cues;
  13149. var cuesArray = [];
  13150. // Create a copy of the TextTrackCueList...
  13151. // ...disregarding cues with a falsey value
  13152. for (var i = 0; i < cues.length; i++) {
  13153. if (cues[i]) {
  13154. cuesArray.push(cues[i]);
  13155. }
  13156. }
  13157. // Group cues by their startTime value
  13158. var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
  13159. var timeSlot = obj[cue.startTime] || [];
  13160. timeSlot.push(cue);
  13161. obj[cue.startTime] = timeSlot;
  13162. return obj;
  13163. }, {});
  13164. // Sort startTimes by ascending order
  13165. var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
  13166. return Number(a) - Number(b);
  13167. });
  13168. // Map each cue group's endTime to the next group's startTime
  13169. sortedStartTimes.forEach(function (startTime, idx) {
  13170. var cueGroup = cuesGroupedByStartTime[startTime];
  13171. var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration;
  13172. // Map each cue's endTime the next group's startTime
  13173. cueGroup.forEach(function (cue) {
  13174. cue.endTime = nextTime;
  13175. });
  13176. });
  13177. })();
  13178. }
  13179. })();
  13180. }
  13181. };
  13182. exports['default'] = {
  13183. addTextTrackData: addTextTrackData,
  13184. durationOfVideo: durationOfVideo
  13185. };
  13186. module.exports = exports['default'];
  13187. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  13188. },{"global/window":30}],63:[function(require,module,exports){
  13189. /**
  13190. * Remove the text track from the player if one with matching kind and
  13191. * label properties already exists on the player
  13192. *
  13193. * @param {Object} player the video.js player object
  13194. * @param {String} kind to be considered the text track's `kind` must match
  13195. * @param {String} label to be considered the text track's `label` must match
  13196. * @private
  13197. */
  13198. 'use strict';
  13199. Object.defineProperty(exports, '__esModule', {
  13200. value: true
  13201. });
  13202. var removeExistingTrack = function removeExistingTrack(player, kind, label) {
  13203. var tracks = player.remoteTextTracks() || [];
  13204. for (var i = 0; i < tracks.length; i++) {
  13205. var track = tracks[i];
  13206. if (track.kind === kind && track.label === label) {
  13207. player.removeRemoteTextTrack(track);
  13208. }
  13209. }
  13210. };
  13211. exports.removeExistingTrack = removeExistingTrack;
  13212. /**
  13213. * Cleaup text tracks on video.js if they exist
  13214. *
  13215. * @param {Object} player the video.js player object
  13216. * @private
  13217. */
  13218. var cleanupTextTracks = function cleanupTextTracks(player) {
  13219. removeExistingTrack(player, 'captions', 'cc1');
  13220. removeExistingTrack(player, 'metadata', 'Timed Metadata');
  13221. };
  13222. exports.cleanupTextTracks = cleanupTextTracks;
  13223. },{}],64:[function(require,module,exports){
  13224. /**
  13225. * @file codec-utils.js
  13226. */
  13227. /**
  13228. * Check if a codec string refers to an audio codec.
  13229. *
  13230. * @param {String} codec codec string to check
  13231. * @return {Boolean} if this is an audio codec
  13232. * @private
  13233. */
  13234. 'use strict';
  13235. Object.defineProperty(exports, '__esModule', {
  13236. value: true
  13237. });
  13238. var isAudioCodec = function isAudioCodec(codec) {
  13239. return (/mp4a\.\d+.\d+/i.test(codec)
  13240. );
  13241. };
  13242. /**
  13243. * Check if a codec string refers to a video codec.
  13244. *
  13245. * @param {String} codec codec string to check
  13246. * @return {Boolean} if this is a video codec
  13247. * @private
  13248. */
  13249. var isVideoCodec = function isVideoCodec(codec) {
  13250. return (/avc1\.[\da-f]+/i.test(codec)
  13251. );
  13252. };
  13253. /**
  13254. * Parse a content type header into a type and parameters
  13255. * object
  13256. *
  13257. * @param {String} type the content type header
  13258. * @return {Object} the parsed content-type
  13259. * @private
  13260. */
  13261. var parseContentType = function parseContentType(type) {
  13262. var object = { type: '', parameters: {} };
  13263. var parameters = type.trim().split(';');
  13264. // first parameter should always be content-type
  13265. object.type = parameters.shift().trim();
  13266. parameters.forEach(function (parameter) {
  13267. var pair = parameter.trim().split('=');
  13268. if (pair.length > 1) {
  13269. var _name = pair[0].replace(/"/g, '').trim();
  13270. var value = pair[1].replace(/"/g, '').trim();
  13271. object.parameters[_name] = value;
  13272. }
  13273. });
  13274. return object;
  13275. };
  13276. /**
  13277. * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
  13278. * `avc1.<hhhhhh>`
  13279. *
  13280. * @param {Array} codecs an array of codec strings to fix
  13281. * @return {Array} the translated codec array
  13282. * @private
  13283. */
  13284. var translateLegacyCodecs = function translateLegacyCodecs(codecs) {
  13285. return codecs.map(function (codec) {
  13286. return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
  13287. var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
  13288. var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
  13289. return 'avc1.' + profileHex + '00' + avcLevelHex;
  13290. });
  13291. });
  13292. };
  13293. exports['default'] = {
  13294. isAudioCodec: isAudioCodec,
  13295. parseContentType: parseContentType,
  13296. isVideoCodec: isVideoCodec,
  13297. translateLegacyCodecs: translateLegacyCodecs
  13298. };
  13299. module.exports = exports['default'];
  13300. },{}],65:[function(require,module,exports){
  13301. /**
  13302. * @file create-text-tracks-if-necessary.js
  13303. */
  13304. 'use strict';
  13305. Object.defineProperty(exports, '__esModule', {
  13306. value: true
  13307. });
  13308. var _cleanupTextTracks = require('./cleanup-text-tracks');
  13309. /**
  13310. * Create text tracks on video.js if they exist on a segment.
  13311. *
  13312. * @param {Object} sourceBuffer the VSB or FSB
  13313. * @param {Object} mediaSource the HTML or Flash media source
  13314. * @param {Object} segment the segment that may contain the text track
  13315. * @private
  13316. */
  13317. var createTextTracksIfNecessary = function createTextTracksIfNecessary(sourceBuffer, mediaSource, segment) {
  13318. var player = mediaSource.player_;
  13319. // create an in-band caption track if one is present in the segment
  13320. if (segment.captions && segment.captions.length && !sourceBuffer.inbandTextTrack_) {
  13321. (0, _cleanupTextTracks.removeExistingTrack)(player, 'captions', 'cc1');
  13322. sourceBuffer.inbandTextTrack_ = player.addRemoteTextTrack({
  13323. kind: 'captions',
  13324. label: 'cc1'
  13325. }, false).track;
  13326. }
  13327. if (segment.metadata && segment.metadata.length && !sourceBuffer.metadataTrack_) {
  13328. (0, _cleanupTextTracks.removeExistingTrack)(player, 'metadata', 'Timed Metadata', true);
  13329. sourceBuffer.metadataTrack_ = player.addRemoteTextTrack({
  13330. kind: 'metadata',
  13331. label: 'Timed Metadata'
  13332. }, false).track;
  13333. sourceBuffer.metadataTrack_.inBandMetadataTrackDispatchType = segment.metadata.dispatchType;
  13334. }
  13335. };
  13336. exports['default'] = createTextTracksIfNecessary;
  13337. module.exports = exports['default'];
  13338. },{"./cleanup-text-tracks":63}],66:[function(require,module,exports){
  13339. /**
  13340. * @file flash-constants.js
  13341. */
  13342. /**
  13343. * The maximum size in bytes for append operations to the video.js
  13344. * SWF. Calling through to Flash blocks and can be expensive so
  13345. * we chunk data and pass through 4KB at a time, yielding to the
  13346. * browser between chunks. This gives a theoretical maximum rate of
  13347. * 1MB/s into Flash. Any higher and we begin to drop frames and UI
  13348. * responsiveness suffers.
  13349. *
  13350. * @private
  13351. */
  13352. "use strict";
  13353. Object.defineProperty(exports, "__esModule", {
  13354. value: true
  13355. });
  13356. var flashConstants = {
  13357. // times in milliseconds
  13358. TIME_BETWEEN_CHUNKS: 1,
  13359. BYTES_PER_CHUNK: 1024 * 32
  13360. };
  13361. exports["default"] = flashConstants;
  13362. module.exports = exports["default"];
  13363. },{}],67:[function(require,module,exports){
  13364. (function (global){
  13365. /**
  13366. * @file flash-media-source.js
  13367. */
  13368. 'use strict';
  13369. Object.defineProperty(exports, '__esModule', {
  13370. value: true
  13371. });
  13372. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  13373. var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  13374. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  13375. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  13376. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  13377. var _globalDocument = require('global/document');
  13378. var _globalDocument2 = _interopRequireDefault(_globalDocument);
  13379. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  13380. var _videoJs2 = _interopRequireDefault(_videoJs);
  13381. var _flashSourceBuffer = require('./flash-source-buffer');
  13382. var _flashSourceBuffer2 = _interopRequireDefault(_flashSourceBuffer);
  13383. var _flashConstants = require('./flash-constants');
  13384. var _flashConstants2 = _interopRequireDefault(_flashConstants);
  13385. var _codecUtils = require('./codec-utils');
  13386. var _cleanupTextTracks = require('./cleanup-text-tracks');
  13387. /**
  13388. * A flash implmentation of HTML MediaSources and a polyfill
  13389. * for browsers that don't support native or HTML MediaSources..
  13390. *
  13391. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
  13392. * @class FlashMediaSource
  13393. * @extends videojs.EventTarget
  13394. */
  13395. var FlashMediaSource = (function (_videojs$EventTarget) {
  13396. _inherits(FlashMediaSource, _videojs$EventTarget);
  13397. function FlashMediaSource() {
  13398. var _this = this;
  13399. _classCallCheck(this, FlashMediaSource);
  13400. _get(Object.getPrototypeOf(FlashMediaSource.prototype), 'constructor', this).call(this);
  13401. this.sourceBuffers = [];
  13402. this.readyState = 'closed';
  13403. this.on(['sourceopen', 'webkitsourceopen'], function (event) {
  13404. // find the swf where we will push media data
  13405. _this.swfObj = _globalDocument2['default'].getElementById(event.swfId);
  13406. _this.player_ = (0, _videoJs2['default'])(_this.swfObj.parentNode);
  13407. _this.tech_ = _this.swfObj.tech;
  13408. _this.readyState = 'open';
  13409. _this.tech_.on('seeking', function () {
  13410. var i = _this.sourceBuffers.length;
  13411. while (i--) {
  13412. _this.sourceBuffers[i].abort();
  13413. }
  13414. });
  13415. if (_this.tech_.hls) {
  13416. _this.tech_.hls.on('dispose', function () {
  13417. (0, _cleanupTextTracks.cleanupTextTracks)(_this.player_);
  13418. });
  13419. }
  13420. // trigger load events
  13421. if (_this.swfObj) {
  13422. _this.swfObj.vjs_load();
  13423. }
  13424. });
  13425. }
  13426. /**
  13427. * Set or return the presentation duration.
  13428. *
  13429. * @param {Double} value the duration of the media in seconds
  13430. * @param {Double} the current presentation duration
  13431. * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
  13432. */
  13433. /**
  13434. * We have this function so that the html and flash interfaces
  13435. * are the same.
  13436. *
  13437. * @private
  13438. */
  13439. _createClass(FlashMediaSource, [{
  13440. key: 'addSeekableRange_',
  13441. value: function addSeekableRange_() {}
  13442. // intentional no-op
  13443. /**
  13444. * Create a new flash source buffer and add it to our flash media source.
  13445. *
  13446. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
  13447. * @param {String} type the content-type of the source
  13448. * @return {Object} the flash source buffer
  13449. */
  13450. }, {
  13451. key: 'addSourceBuffer',
  13452. value: function addSourceBuffer(type) {
  13453. var parsedType = (0, _codecUtils.parseContentType)(type);
  13454. var sourceBuffer = undefined;
  13455. // if this is an FLV type, we'll push data to flash
  13456. if (parsedType.type === 'video/mp2t') {
  13457. // Flash source buffers
  13458. sourceBuffer = new _flashSourceBuffer2['default'](this);
  13459. } else {
  13460. throw new Error('NotSupportedError (Video.js)');
  13461. }
  13462. this.sourceBuffers.push(sourceBuffer);
  13463. return sourceBuffer;
  13464. }
  13465. /**
  13466. * Signals the end of the stream.
  13467. *
  13468. * @link https://w3c.github.io/media-source/#widl-MediaSource-endOfStream-void-EndOfStreamError-error
  13469. * @param {String=} error Signals that a playback error
  13470. * has occurred. If specified, it must be either "network" or
  13471. * "decode".
  13472. */
  13473. }, {
  13474. key: 'endOfStream',
  13475. value: function endOfStream(error) {
  13476. if (error === 'network') {
  13477. // MEDIA_ERR_NETWORK
  13478. this.tech_.error(2);
  13479. } else if (error === 'decode') {
  13480. // MEDIA_ERR_DECODE
  13481. this.tech_.error(3);
  13482. }
  13483. if (this.readyState !== 'ended') {
  13484. this.readyState = 'ended';
  13485. this.swfObj.vjs_endOfStream();
  13486. }
  13487. }
  13488. }]);
  13489. return FlashMediaSource;
  13490. })(_videoJs2['default'].EventTarget);
  13491. exports['default'] = FlashMediaSource;
  13492. try {
  13493. Object.defineProperty(FlashMediaSource.prototype, 'duration', {
  13494. /**
  13495. * Return the presentation duration.
  13496. *
  13497. * @return {Double} the duration of the media in seconds
  13498. * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
  13499. */
  13500. get: function get() {
  13501. if (!this.swfObj) {
  13502. return NaN;
  13503. }
  13504. // get the current duration from the SWF
  13505. return this.swfObj.vjs_getProperty('duration');
  13506. },
  13507. /**
  13508. * Set the presentation duration.
  13509. *
  13510. * @param {Double} value the duration of the media in seconds
  13511. * @return {Double} the duration of the media in seconds
  13512. * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
  13513. */
  13514. set: function set(value) {
  13515. var i = undefined;
  13516. var oldDuration = this.swfObj.vjs_getProperty('duration');
  13517. this.swfObj.vjs_setProperty('duration', value);
  13518. if (value < oldDuration) {
  13519. // In MSE, this triggers the range removal algorithm which causes
  13520. // an update to occur
  13521. for (i = 0; i < this.sourceBuffers.length; i++) {
  13522. this.sourceBuffers[i].remove(value, oldDuration);
  13523. }
  13524. }
  13525. return value;
  13526. }
  13527. });
  13528. } catch (e) {
  13529. // IE8 throws if defineProperty is called on a non-DOM node. We
  13530. // don't support IE8 but we shouldn't throw an error if loaded
  13531. // there.
  13532. FlashMediaSource.prototype.duration = NaN;
  13533. }
  13534. for (var property in _flashConstants2['default']) {
  13535. FlashMediaSource[property] = _flashConstants2['default'][property];
  13536. }
  13537. module.exports = exports['default'];
  13538. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  13539. },{"./cleanup-text-tracks":63,"./codec-utils":64,"./flash-constants":66,"./flash-source-buffer":68,"global/document":29}],68:[function(require,module,exports){
  13540. (function (global){
  13541. /**
  13542. * @file flash-source-buffer.js
  13543. */
  13544. 'use strict';
  13545. Object.defineProperty(exports, '__esModule', {
  13546. value: true
  13547. });
  13548. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  13549. var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  13550. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  13551. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  13552. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  13553. var _globalWindow = require('global/window');
  13554. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  13555. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  13556. var _videoJs2 = _interopRequireDefault(_videoJs);
  13557. var _muxJsLibFlv = require('mux.js/lib/flv');
  13558. var _muxJsLibFlv2 = _interopRequireDefault(_muxJsLibFlv);
  13559. var _removeCuesFromTrack = require('./remove-cues-from-track');
  13560. var _removeCuesFromTrack2 = _interopRequireDefault(_removeCuesFromTrack);
  13561. var _createTextTracksIfNecessary = require('./create-text-tracks-if-necessary');
  13562. var _createTextTracksIfNecessary2 = _interopRequireDefault(_createTextTracksIfNecessary);
  13563. var _addTextTrackData = require('./add-text-track-data');
  13564. var _flashTransmuxerWorker = require('./flash-transmuxer-worker');
  13565. var _flashTransmuxerWorker2 = _interopRequireDefault(_flashTransmuxerWorker);
  13566. var _webworkify = require('webworkify');
  13567. var _webworkify2 = _interopRequireDefault(_webworkify);
  13568. var _flashConstants = require('./flash-constants');
  13569. var _flashConstants2 = _interopRequireDefault(_flashConstants);
  13570. /**
  13571. * A wrapper around the setTimeout function that uses
  13572. * the flash constant time between ticks value.
  13573. *
  13574. * @param {Function} func the function callback to run
  13575. * @private
  13576. */
  13577. var scheduleTick = function scheduleTick(func) {
  13578. // Chrome doesn't invoke requestAnimationFrame callbacks
  13579. // in background tabs, so use setTimeout.
  13580. _globalWindow2['default'].setTimeout(func, _flashConstants2['default'].TIME_BETWEEN_CHUNKS);
  13581. };
  13582. /**
  13583. * Generates a random string of max length 6
  13584. *
  13585. * @return {String} the randomly generated string
  13586. * @function generateRandomString
  13587. * @private
  13588. */
  13589. var generateRandomString = function generateRandomString() {
  13590. return Math.random().toString(36).slice(2, 8);
  13591. };
  13592. /**
  13593. * Round a number to a specified number of places much like
  13594. * toFixed but return a number instead of a string representation.
  13595. *
  13596. * @param {Number} num A number
  13597. * @param {Number} places The number of decimal places which to
  13598. * round
  13599. * @private
  13600. */
  13601. var toDecimalPlaces = function toDecimalPlaces(num, places) {
  13602. if (typeof places !== 'number' || places < 0) {
  13603. places = 0;
  13604. }
  13605. var scale = Math.pow(10, places);
  13606. return Math.round(num * scale) / scale;
  13607. };
  13608. /**
  13609. * A SourceBuffer implementation for Flash rather than HTML.
  13610. *
  13611. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
  13612. * @param {Object} mediaSource the flash media source
  13613. * @class FlashSourceBuffer
  13614. * @extends videojs.EventTarget
  13615. */
  13616. var FlashSourceBuffer = (function (_videojs$EventTarget) {
  13617. _inherits(FlashSourceBuffer, _videojs$EventTarget);
  13618. function FlashSourceBuffer(mediaSource) {
  13619. var _this = this;
  13620. _classCallCheck(this, FlashSourceBuffer);
  13621. _get(Object.getPrototypeOf(FlashSourceBuffer.prototype), 'constructor', this).call(this);
  13622. var encodedHeader = undefined;
  13623. // Start off using the globally defined value but refine
  13624. // as we append data into flash
  13625. this.chunkSize_ = _flashConstants2['default'].BYTES_PER_CHUNK;
  13626. // byte arrays queued to be appended
  13627. this.buffer_ = [];
  13628. // the total number of queued bytes
  13629. this.bufferSize_ = 0;
  13630. // to be able to determine the correct position to seek to, we
  13631. // need to retain information about the mapping between the
  13632. // media timeline and PTS values
  13633. this.basePtsOffset_ = NaN;
  13634. this.mediaSource_ = mediaSource;
  13635. this.audioBufferEnd_ = NaN;
  13636. this.videoBufferEnd_ = NaN;
  13637. // indicates whether the asynchronous continuation of an operation
  13638. // is still being processed
  13639. // see https://w3c.github.io/media-source/#widl-SourceBuffer-updating
  13640. this.updating = false;
  13641. this.timestampOffset_ = 0;
  13642. encodedHeader = _globalWindow2['default'].btoa(String.fromCharCode.apply(null, Array.prototype.slice.call(_muxJsLibFlv2['default'].getFlvHeader())));
  13643. // create function names with added randomness for the global callbacks flash will use
  13644. // to get data from javascript into the swf. Random strings are added as a safety
  13645. // measure for pages with multiple players since these functions will be global
  13646. // instead of per instance. When making a call to the swf, the browser generates a
  13647. // try catch code snippet, but just takes the function name and writes out an unquoted
  13648. // call to that function. If the player id has any special characters, this will result
  13649. // in an error, so safePlayerId replaces all special characters to '_'
  13650. var safePlayerId = this.mediaSource_.player_.id().replace(/[^a-zA-Z0-9]/g, '_');
  13651. this.flashEncodedHeaderName_ = 'vjs_flashEncodedHeader_' + safePlayerId + generateRandomString();
  13652. this.flashEncodedDataName_ = 'vjs_flashEncodedData_' + safePlayerId + generateRandomString();
  13653. _globalWindow2['default'][this.flashEncodedHeaderName_] = function () {
  13654. delete _globalWindow2['default'][_this.flashEncodedHeaderName_];
  13655. return encodedHeader;
  13656. };
  13657. this.mediaSource_.swfObj.vjs_appendChunkReady(this.flashEncodedHeaderName_);
  13658. this.transmuxer_ = (0, _webworkify2['default'])(_flashTransmuxerWorker2['default']);
  13659. this.transmuxer_.postMessage({ action: 'init', options: {} });
  13660. this.transmuxer_.onmessage = function (event) {
  13661. if (event.data.action === 'data') {
  13662. _this.receiveBuffer_(event.data.segment);
  13663. }
  13664. };
  13665. this.one('updateend', function () {
  13666. _this.mediaSource_.tech_.trigger('loadedmetadata');
  13667. });
  13668. Object.defineProperty(this, 'timestampOffset', {
  13669. get: function get() {
  13670. return this.timestampOffset_;
  13671. },
  13672. set: function set(val) {
  13673. if (typeof val === 'number' && val >= 0) {
  13674. this.timestampOffset_ = val;
  13675. // We have to tell flash to expect a discontinuity
  13676. this.mediaSource_.swfObj.vjs_discontinuity();
  13677. // the media <-> PTS mapping must be re-established after
  13678. // the discontinuity
  13679. this.basePtsOffset_ = NaN;
  13680. this.audioBufferEnd_ = NaN;
  13681. this.videoBufferEnd_ = NaN;
  13682. this.transmuxer_.postMessage({ action: 'reset' });
  13683. }
  13684. }
  13685. });
  13686. Object.defineProperty(this, 'buffered', {
  13687. get: function get() {
  13688. if (!this.mediaSource_ || !this.mediaSource_.swfObj || !('vjs_getProperty' in this.mediaSource_.swfObj)) {
  13689. return _videoJs2['default'].createTimeRange();
  13690. }
  13691. var buffered = this.mediaSource_.swfObj.vjs_getProperty('buffered');
  13692. if (buffered && buffered.length) {
  13693. buffered[0][0] = toDecimalPlaces(buffered[0][0], 3);
  13694. buffered[0][1] = toDecimalPlaces(buffered[0][1], 3);
  13695. }
  13696. return _videoJs2['default'].createTimeRanges(buffered);
  13697. }
  13698. });
  13699. // On a seek we remove all text track data since flash has no concept
  13700. // of a buffered-range and everything else is reset on seek
  13701. this.mediaSource_.player_.on('seeked', function () {
  13702. (0, _removeCuesFromTrack2['default'])(0, Infinity, _this.metadataTrack_);
  13703. (0, _removeCuesFromTrack2['default'])(0, Infinity, _this.inbandTextTrack_);
  13704. });
  13705. this.mediaSource_.player_.tech_.hls.on('dispose', function () {
  13706. _this.transmuxer_.terminate();
  13707. });
  13708. }
  13709. /**
  13710. * Append bytes to the sourcebuffers buffer, in this case we
  13711. * have to append it to swf object.
  13712. *
  13713. * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
  13714. * @param {Array} bytes
  13715. */
  13716. _createClass(FlashSourceBuffer, [{
  13717. key: 'appendBuffer',
  13718. value: function appendBuffer(bytes) {
  13719. var error = undefined;
  13720. if (this.updating) {
  13721. error = new Error('SourceBuffer.append() cannot be called ' + 'while an update is in progress');
  13722. error.name = 'InvalidStateError';
  13723. error.code = 11;
  13724. throw error;
  13725. }
  13726. this.updating = true;
  13727. this.mediaSource_.readyState = 'open';
  13728. this.trigger({ type: 'update' });
  13729. this.transmuxer_.postMessage({
  13730. action: 'push',
  13731. data: bytes.buffer,
  13732. byteOffset: bytes.byteOffset,
  13733. byteLength: bytes.byteLength
  13734. }, [bytes.buffer]);
  13735. this.transmuxer_.postMessage({ action: 'flush' });
  13736. }
  13737. /**
  13738. * Reset the parser and remove any data queued to be sent to the SWF.
  13739. *
  13740. * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
  13741. */
  13742. }, {
  13743. key: 'abort',
  13744. value: function abort() {
  13745. this.buffer_ = [];
  13746. this.bufferSize_ = 0;
  13747. this.mediaSource_.swfObj.vjs_abort();
  13748. // report any outstanding updates have ended
  13749. if (this.updating) {
  13750. this.updating = false;
  13751. this.trigger({ type: 'updateend' });
  13752. }
  13753. }
  13754. /**
  13755. * Flash cannot remove ranges already buffered in the NetStream
  13756. * but seeking clears the buffer entirely. For most purposes,
  13757. * having this operation act as a no-op is acceptable.
  13758. *
  13759. * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
  13760. * @param {Double} start start of the section to remove
  13761. * @param {Double} end end of the section to remove
  13762. */
  13763. }, {
  13764. key: 'remove',
  13765. value: function remove(start, end) {
  13766. (0, _removeCuesFromTrack2['default'])(start, end, this.metadataTrack_);
  13767. (0, _removeCuesFromTrack2['default'])(start, end, this.inbandTextTrack_);
  13768. this.trigger({ type: 'update' });
  13769. this.trigger({ type: 'updateend' });
  13770. }
  13771. /**
  13772. * Receive a buffer from the flv.
  13773. *
  13774. * @param {Object} segment
  13775. * @private
  13776. */
  13777. }, {
  13778. key: 'receiveBuffer_',
  13779. value: function receiveBuffer_(segment) {
  13780. var _this2 = this;
  13781. // create an in-band caption track if one is present in the segment
  13782. (0, _createTextTracksIfNecessary2['default'])(this, this.mediaSource_, segment);
  13783. (0, _addTextTrackData.addTextTrackData)(this, segment.captions, segment.metadata);
  13784. // Do this asynchronously since convertTagsToData_ can be time consuming
  13785. scheduleTick(function () {
  13786. var flvBytes = _this2.convertTagsToData_(segment);
  13787. if (_this2.buffer_.length === 0) {
  13788. scheduleTick(_this2.processBuffer_.bind(_this2));
  13789. }
  13790. if (flvBytes) {
  13791. _this2.buffer_.push(flvBytes);
  13792. _this2.bufferSize_ += flvBytes.byteLength;
  13793. }
  13794. });
  13795. }
  13796. /**
  13797. * Append a portion of the current buffer to the SWF.
  13798. *
  13799. * @private
  13800. */
  13801. }, {
  13802. key: 'processBuffer_',
  13803. value: function processBuffer_() {
  13804. var _this3 = this;
  13805. var chunkSize = _flashConstants2['default'].BYTES_PER_CHUNK;
  13806. if (!this.buffer_.length) {
  13807. if (this.updating !== false) {
  13808. this.updating = false;
  13809. this.trigger({ type: 'updateend' });
  13810. }
  13811. // do nothing if the buffer is empty
  13812. return;
  13813. }
  13814. // concatenate appends up to the max append size
  13815. var chunk = this.buffer_[0].subarray(0, chunkSize);
  13816. // requeue any bytes that won't make it this round
  13817. if (chunk.byteLength < chunkSize || this.buffer_[0].byteLength === chunkSize) {
  13818. this.buffer_.shift();
  13819. } else {
  13820. this.buffer_[0] = this.buffer_[0].subarray(chunkSize);
  13821. }
  13822. this.bufferSize_ -= chunk.byteLength;
  13823. // base64 encode the bytes
  13824. var binary = [];
  13825. var length = chunk.byteLength;
  13826. for (var i = 0; i < length; i++) {
  13827. binary.push(String.fromCharCode(chunk[i]));
  13828. }
  13829. var b64str = _globalWindow2['default'].btoa(binary.join(''));
  13830. _globalWindow2['default'][this.flashEncodedDataName_] = function () {
  13831. // schedule another processBuffer to process any left over data or to
  13832. // trigger updateend
  13833. scheduleTick(_this3.processBuffer_.bind(_this3));
  13834. delete _globalWindow2['default'][_this3.flashEncodedDataName_];
  13835. return b64str;
  13836. };
  13837. // Notify the swf that segment data is ready to be appended
  13838. this.mediaSource_.swfObj.vjs_appendChunkReady(this.flashEncodedDataName_);
  13839. }
  13840. /**
  13841. * Turns an array of flv tags into a Uint8Array representing the
  13842. * flv data. Also removes any tags that are before the current
  13843. * time so that playback begins at or slightly after the right
  13844. * place on a seek
  13845. *
  13846. * @private
  13847. * @param {Object} segmentData object of segment data
  13848. */
  13849. }, {
  13850. key: 'convertTagsToData_',
  13851. value: function convertTagsToData_(segmentData) {
  13852. var segmentByteLength = 0;
  13853. var tech = this.mediaSource_.tech_;
  13854. var videoTargetPts = 0;
  13855. var segment = undefined;
  13856. var videoTags = segmentData.tags.videoTags;
  13857. var audioTags = segmentData.tags.audioTags;
  13858. // Establish the media timeline to PTS translation if we don't
  13859. // have one already
  13860. if (isNaN(this.basePtsOffset_) && (videoTags.length || audioTags.length)) {
  13861. // We know there is at least one video or audio tag, but since we may not have both,
  13862. // we use pts: Infinity for the missing tag. The will force the following Math.min
  13863. // call will to use the proper pts value since it will always be less than Infinity
  13864. var firstVideoTag = videoTags[0] || { pts: Infinity };
  13865. var firstAudioTag = audioTags[0] || { pts: Infinity };
  13866. this.basePtsOffset_ = Math.min(firstAudioTag.pts, firstVideoTag.pts);
  13867. }
  13868. if (tech.seeking()) {
  13869. // Do not use previously saved buffer end values while seeking since buffer
  13870. // is cleared on all seeks
  13871. this.videoBufferEnd_ = NaN;
  13872. this.audioBufferEnd_ = NaN;
  13873. }
  13874. if (isNaN(this.videoBufferEnd_)) {
  13875. if (tech.buffered().length) {
  13876. videoTargetPts = tech.buffered().end(0) - this.timestampOffset;
  13877. }
  13878. // Trim to currentTime if seeking
  13879. if (tech.seeking()) {
  13880. videoTargetPts = Math.max(videoTargetPts, tech.currentTime() - this.timestampOffset);
  13881. }
  13882. // PTS values are represented in milliseconds
  13883. videoTargetPts *= 1e3;
  13884. videoTargetPts += this.basePtsOffset_;
  13885. } else {
  13886. // Add a fudge factor of 0.1 to the last video pts appended since a rendition change
  13887. // could append an overlapping segment, in which case there is a high likelyhood
  13888. // a tag could have a matching pts to videoBufferEnd_, which would cause
  13889. // that tag to get appended by the tag.pts >= targetPts check below even though it
  13890. // is a duplicate of what was previously appended
  13891. videoTargetPts = this.videoBufferEnd_ + 0.1;
  13892. }
  13893. // filter complete GOPs with a presentation time less than the seek target/end of buffer
  13894. var currentIndex = videoTags.length;
  13895. // if the last tag is beyond videoTargetPts, then do not search the list for a GOP
  13896. // since our videoTargetPts lies in a future segment
  13897. if (currentIndex && videoTags[currentIndex - 1].pts >= videoTargetPts) {
  13898. // Start by walking backwards from the end of the list until we reach a tag that
  13899. // is equal to or less than videoTargetPts
  13900. while (--currentIndex) {
  13901. var currentTag = videoTags[currentIndex];
  13902. if (currentTag.pts > videoTargetPts) {
  13903. continue;
  13904. }
  13905. // if we see a keyFrame or metadata tag once we've gone below videoTargetPts,
  13906. // exit the loop as this is the start of the GOP that we want to append
  13907. if (currentTag.keyFrame || currentTag.metaDataTag) {
  13908. break;
  13909. }
  13910. }
  13911. // We need to check if there are any metadata tags that come before currentIndex
  13912. // as those will be metadata tags associated with the GOP we are appending
  13913. // There could be 0 to 2 metadata tags that come before the currentIndex depending
  13914. // on what videoTargetPts is and whether the transmuxer prepended metadata tags to this
  13915. // key frame
  13916. while (currentIndex) {
  13917. var nextTag = videoTags[currentIndex - 1];
  13918. if (!nextTag.metaDataTag) {
  13919. break;
  13920. }
  13921. currentIndex--;
  13922. }
  13923. }
  13924. var filteredVideoTags = videoTags.slice(currentIndex);
  13925. var audioTargetPts = undefined;
  13926. if (isNaN(this.audioBufferEnd_)) {
  13927. audioTargetPts = videoTargetPts;
  13928. } else {
  13929. // Add a fudge factor of 0.1 to the last video pts appended since a rendition change
  13930. // could append an overlapping segment, in which case there is a high likelyhood
  13931. // a tag could have a matching pts to videoBufferEnd_, which would cause
  13932. // that tag to get appended by the tag.pts >= targetPts check below even though it
  13933. // is a duplicate of what was previously appended
  13934. audioTargetPts = this.audioBufferEnd_ + 0.1;
  13935. }
  13936. if (filteredVideoTags.length) {
  13937. // If targetPts intersects a GOP and we appended the tags for the GOP that came
  13938. // before targetPts, we want to make sure to trim audio tags at the pts
  13939. // of the first video tag to avoid brief moments of silence
  13940. audioTargetPts = Math.min(audioTargetPts, filteredVideoTags[0].pts);
  13941. }
  13942. // skip tags with a presentation time less than the seek target/end of buffer
  13943. currentIndex = 0;
  13944. while (currentIndex < audioTags.length) {
  13945. if (audioTags[currentIndex].pts >= audioTargetPts) {
  13946. break;
  13947. }
  13948. currentIndex++;
  13949. }
  13950. var filteredAudioTags = audioTags.slice(currentIndex);
  13951. // update the audio and video buffer ends
  13952. if (filteredAudioTags.length) {
  13953. this.audioBufferEnd_ = filteredAudioTags[filteredAudioTags.length - 1].pts;
  13954. }
  13955. if (filteredVideoTags.length) {
  13956. this.videoBufferEnd_ = filteredVideoTags[filteredVideoTags.length - 1].pts;
  13957. }
  13958. var tags = this.getOrderedTags_(filteredVideoTags, filteredAudioTags);
  13959. if (tags.length === 0) {
  13960. return;
  13961. }
  13962. // If we are appending data that comes before our target pts, we want to tell
  13963. // the swf to adjust its notion of current time to account for the extra tags
  13964. // we are appending to complete the GOP that intersects with targetPts
  13965. if (tags[0].pts < videoTargetPts && tech.seeking()) {
  13966. var fudgeFactor = 1 / 30;
  13967. var currentTime = tech.currentTime();
  13968. var diff = (videoTargetPts - tags[0].pts) / 1e3;
  13969. var adjustedTime = currentTime - diff;
  13970. if (adjustedTime < fudgeFactor) {
  13971. adjustedTime = 0;
  13972. }
  13973. try {
  13974. this.mediaSource_.swfObj.vjs_adjustCurrentTime(adjustedTime);
  13975. } catch (e) {
  13976. // no-op for backwards compatability of swf. If adjustCurrentTime fails,
  13977. // the swf may incorrectly report currentTime and buffered ranges
  13978. // but should not affect playback over than the time displayed on the
  13979. // progress bar is inaccurate
  13980. }
  13981. }
  13982. // concatenate the bytes into a single segment
  13983. for (var i = 0; i < tags.length; i++) {
  13984. segmentByteLength += tags[i].bytes.byteLength;
  13985. }
  13986. segment = new Uint8Array(segmentByteLength);
  13987. for (var i = 0, j = 0; i < tags.length; i++) {
  13988. segment.set(tags[i].bytes, j);
  13989. j += tags[i].bytes.byteLength;
  13990. }
  13991. return segment;
  13992. }
  13993. /**
  13994. * Assemble the FLV tags in decoder order.
  13995. *
  13996. * @private
  13997. * @param {Array} videoTags list of video tags
  13998. * @param {Array} audioTags list of audio tags
  13999. */
  14000. }, {
  14001. key: 'getOrderedTags_',
  14002. value: function getOrderedTags_(videoTags, audioTags) {
  14003. var tag = undefined;
  14004. var tags = [];
  14005. while (videoTags.length || audioTags.length) {
  14006. if (!videoTags.length) {
  14007. // only audio tags remain
  14008. tag = audioTags.shift();
  14009. } else if (!audioTags.length) {
  14010. // only video tags remain
  14011. tag = videoTags.shift();
  14012. } else if (audioTags[0].dts < videoTags[0].dts) {
  14013. // audio should be decoded next
  14014. tag = audioTags.shift();
  14015. } else {
  14016. // video should be decoded next
  14017. tag = videoTags.shift();
  14018. }
  14019. tags.push(tag);
  14020. }
  14021. return tags;
  14022. }
  14023. }]);
  14024. return FlashSourceBuffer;
  14025. })(_videoJs2['default'].EventTarget);
  14026. exports['default'] = FlashSourceBuffer;
  14027. module.exports = exports['default'];
  14028. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  14029. },{"./add-text-track-data":62,"./create-text-tracks-if-necessary":65,"./flash-constants":66,"./flash-transmuxer-worker":69,"./remove-cues-from-track":71,"global/window":30,"mux.js/lib/flv":44,"webworkify":75}],69:[function(require,module,exports){
  14030. /**
  14031. * @file flash-transmuxer-worker.js
  14032. */
  14033. 'use strict';
  14034. Object.defineProperty(exports, '__esModule', {
  14035. value: true
  14036. });
  14037. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  14038. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  14039. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  14040. var _globalWindow = require('global/window');
  14041. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  14042. var _muxJsLibFlv = require('mux.js/lib/flv');
  14043. var _muxJsLibFlv2 = _interopRequireDefault(_muxJsLibFlv);
  14044. /**
  14045. * Re-emits transmuxer events by converting them into messages to the
  14046. * world outside the worker.
  14047. *
  14048. * @param {Object} transmuxer the transmuxer to wire events on
  14049. * @private
  14050. */
  14051. var wireTransmuxerEvents = function wireTransmuxerEvents(transmuxer) {
  14052. transmuxer.on('data', function (segment) {
  14053. _globalWindow2['default'].postMessage({
  14054. action: 'data',
  14055. segment: segment
  14056. });
  14057. });
  14058. transmuxer.on('done', function (data) {
  14059. _globalWindow2['default'].postMessage({ action: 'done' });
  14060. });
  14061. };
  14062. /**
  14063. * All incoming messages route through this hash. If no function exists
  14064. * to handle an incoming message, then we ignore the message.
  14065. *
  14066. * @class MessageHandlers
  14067. * @param {Object} options the options to initialize with
  14068. */
  14069. var MessageHandlers = (function () {
  14070. function MessageHandlers(options) {
  14071. _classCallCheck(this, MessageHandlers);
  14072. this.options = options || {};
  14073. this.init();
  14074. }
  14075. /**
  14076. * Our web wroker interface so that things can talk to mux.js
  14077. * that will be running in a web worker. The scope is passed to this by
  14078. * webworkify.
  14079. *
  14080. * @param {Object} self the scope for the web worker
  14081. */
  14082. /**
  14083. * initialize our web worker and wire all the events.
  14084. */
  14085. _createClass(MessageHandlers, [{
  14086. key: 'init',
  14087. value: function init() {
  14088. if (this.transmuxer) {
  14089. this.transmuxer.dispose();
  14090. }
  14091. this.transmuxer = new _muxJsLibFlv2['default'].Transmuxer(this.options);
  14092. wireTransmuxerEvents(this.transmuxer);
  14093. }
  14094. /**
  14095. * Adds data (a ts segment) to the start of the transmuxer pipeline for
  14096. * processing.
  14097. *
  14098. * @param {ArrayBuffer} data data to push into the muxer
  14099. */
  14100. }, {
  14101. key: 'push',
  14102. value: function push(data) {
  14103. // Cast array buffer to correct type for transmuxer
  14104. var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
  14105. this.transmuxer.push(segment);
  14106. }
  14107. /**
  14108. * Recreate the transmuxer so that the next segment added via `push`
  14109. * start with a fresh transmuxer.
  14110. */
  14111. }, {
  14112. key: 'reset',
  14113. value: function reset() {
  14114. this.init();
  14115. }
  14116. /**
  14117. * Forces the pipeline to finish processing the last segment and emit its
  14118. * results.
  14119. */
  14120. }, {
  14121. key: 'flush',
  14122. value: function flush() {
  14123. this.transmuxer.flush();
  14124. }
  14125. }]);
  14126. return MessageHandlers;
  14127. })();
  14128. var FlashTransmuxerWorker = function FlashTransmuxerWorker(self) {
  14129. self.onmessage = function (event) {
  14130. if (event.data.action === 'init' && event.data.options) {
  14131. this.messageHandlers = new MessageHandlers(event.data.options);
  14132. return;
  14133. }
  14134. if (!this.messageHandlers) {
  14135. this.messageHandlers = new MessageHandlers();
  14136. }
  14137. if (event.data && event.data.action && event.data.action !== 'init') {
  14138. if (this.messageHandlers[event.data.action]) {
  14139. this.messageHandlers[event.data.action](event.data);
  14140. }
  14141. }
  14142. };
  14143. };
  14144. exports['default'] = function (self) {
  14145. return new FlashTransmuxerWorker(self);
  14146. };
  14147. module.exports = exports['default'];
  14148. },{"global/window":30,"mux.js/lib/flv":44}],70:[function(require,module,exports){
  14149. (function (global){
  14150. /**
  14151. * @file html-media-source.js
  14152. */
  14153. 'use strict';
  14154. Object.defineProperty(exports, '__esModule', {
  14155. value: true
  14156. });
  14157. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  14158. var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  14159. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  14160. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  14161. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  14162. var _globalWindow = require('global/window');
  14163. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  14164. var _globalDocument = require('global/document');
  14165. var _globalDocument2 = _interopRequireDefault(_globalDocument);
  14166. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  14167. var _videoJs2 = _interopRequireDefault(_videoJs);
  14168. var _virtualSourceBuffer = require('./virtual-source-buffer');
  14169. var _virtualSourceBuffer2 = _interopRequireDefault(_virtualSourceBuffer);
  14170. var _addTextTrackData = require('./add-text-track-data');
  14171. var _codecUtils = require('./codec-utils');
  14172. var _cleanupTextTracks = require('./cleanup-text-tracks');
  14173. /**
  14174. * Our MediaSource implementation in HTML, mimics native
  14175. * MediaSource where/if possible.
  14176. *
  14177. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
  14178. * @class HtmlMediaSource
  14179. * @extends videojs.EventTarget
  14180. */
  14181. var HtmlMediaSource = (function (_videojs$EventTarget) {
  14182. _inherits(HtmlMediaSource, _videojs$EventTarget);
  14183. function HtmlMediaSource() {
  14184. var _this = this;
  14185. _classCallCheck(this, HtmlMediaSource);
  14186. _get(Object.getPrototypeOf(HtmlMediaSource.prototype), 'constructor', this).call(this);
  14187. var property = undefined;
  14188. this.nativeMediaSource_ = new _globalWindow2['default'].MediaSource();
  14189. // delegate to the native MediaSource's methods by default
  14190. for (property in this.nativeMediaSource_) {
  14191. if (!(property in HtmlMediaSource.prototype) && typeof this.nativeMediaSource_[property] === 'function') {
  14192. this[property] = this.nativeMediaSource_[property].bind(this.nativeMediaSource_);
  14193. }
  14194. }
  14195. // emulate `duration` and `seekable` until seeking can be
  14196. // handled uniformly for live streams
  14197. // see https://github.com/w3c/media-source/issues/5
  14198. this.duration_ = NaN;
  14199. Object.defineProperty(this, 'duration', {
  14200. get: function get() {
  14201. if (this.duration_ === Infinity) {
  14202. return this.duration_;
  14203. }
  14204. return this.nativeMediaSource_.duration;
  14205. },
  14206. set: function set(duration) {
  14207. this.duration_ = duration;
  14208. if (duration !== Infinity) {
  14209. this.nativeMediaSource_.duration = duration;
  14210. return;
  14211. }
  14212. }
  14213. });
  14214. Object.defineProperty(this, 'seekable', {
  14215. get: function get() {
  14216. if (this.duration_ === Infinity) {
  14217. return _videoJs2['default'].createTimeRanges([[0, this.nativeMediaSource_.duration]]);
  14218. }
  14219. return this.nativeMediaSource_.seekable;
  14220. }
  14221. });
  14222. Object.defineProperty(this, 'readyState', {
  14223. get: function get() {
  14224. return this.nativeMediaSource_.readyState;
  14225. }
  14226. });
  14227. Object.defineProperty(this, 'activeSourceBuffers', {
  14228. get: function get() {
  14229. return this.activeSourceBuffers_;
  14230. }
  14231. });
  14232. // the list of virtual and native SourceBuffers created by this
  14233. // MediaSource
  14234. this.sourceBuffers = [];
  14235. this.activeSourceBuffers_ = [];
  14236. /**
  14237. * update the list of active source buffers based upon various
  14238. * imformation from HLS and video.js
  14239. *
  14240. * @private
  14241. */
  14242. this.updateActiveSourceBuffers_ = function () {
  14243. // Retain the reference but empty the array
  14244. _this.activeSourceBuffers_.length = 0;
  14245. // By default, the audio in the combined virtual source buffer is enabled
  14246. // and the audio-only source buffer (if it exists) is disabled.
  14247. var combined = false;
  14248. var audioOnly = true;
  14249. // TODO: maybe we can store the sourcebuffers on the track objects?
  14250. // safari may do something like this
  14251. for (var i = 0; i < _this.player_.audioTracks().length; i++) {
  14252. var track = _this.player_.audioTracks()[i];
  14253. if (track.enabled && track.kind !== 'main') {
  14254. // The enabled track is an alternate audio track so disable the audio in
  14255. // the combined source buffer and enable the audio-only source buffer.
  14256. combined = true;
  14257. audioOnly = false;
  14258. break;
  14259. }
  14260. }
  14261. // Since we currently support a max of two source buffers, add all of the source
  14262. // buffers (in order).
  14263. _this.sourceBuffers.forEach(function (sourceBuffer) {
  14264. /* eslinst-disable */
  14265. // TODO once codecs are required, we can switch to using the codecs to determine
  14266. // what stream is the video stream, rather than relying on videoTracks
  14267. /* eslinst-enable */
  14268. sourceBuffer.appendAudioInitSegment_ = true;
  14269. if (sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
  14270. // combined
  14271. sourceBuffer.audioDisabled_ = combined;
  14272. } else if (sourceBuffer.videoCodec_ && !sourceBuffer.audioCodec_) {
  14273. // If the "combined" source buffer is video only, then we do not want
  14274. // disable the audio-only source buffer (this is mostly for demuxed
  14275. // audio and video hls)
  14276. sourceBuffer.audioDisabled_ = true;
  14277. audioOnly = false;
  14278. } else if (!sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
  14279. // audio only
  14280. sourceBuffer.audioDisabled_ = audioOnly;
  14281. if (audioOnly) {
  14282. return;
  14283. }
  14284. }
  14285. _this.activeSourceBuffers_.push(sourceBuffer);
  14286. });
  14287. };
  14288. this.onPlayerMediachange_ = function () {
  14289. _this.sourceBuffers.forEach(function (sourceBuffer) {
  14290. sourceBuffer.appendAudioInitSegment_ = true;
  14291. });
  14292. };
  14293. // Re-emit MediaSource events on the polyfill
  14294. ['sourceopen', 'sourceclose', 'sourceended'].forEach(function (eventName) {
  14295. this.nativeMediaSource_.addEventListener(eventName, this.trigger.bind(this));
  14296. }, this);
  14297. // capture the associated player when the MediaSource is
  14298. // successfully attached
  14299. this.on('sourceopen', function (event) {
  14300. // Get the player this MediaSource is attached to
  14301. var video = _globalDocument2['default'].querySelector('[src="' + _this.url_ + '"]');
  14302. if (!video) {
  14303. return;
  14304. }
  14305. _this.player_ = (0, _videoJs2['default'])(video.parentNode);
  14306. if (_this.player_.audioTracks && _this.player_.audioTracks()) {
  14307. _this.player_.audioTracks().on('change', _this.updateActiveSourceBuffers_);
  14308. _this.player_.audioTracks().on('addtrack', _this.updateActiveSourceBuffers_);
  14309. _this.player_.audioTracks().on('removetrack', _this.updateActiveSourceBuffers_);
  14310. }
  14311. _this.player_.on('mediachange', _this.onPlayerMediachange_);
  14312. });
  14313. this.on('sourceended', function (event) {
  14314. var duration = (0, _addTextTrackData.durationOfVideo)(_this.duration);
  14315. for (var i = 0; i < _this.sourceBuffers.length; i++) {
  14316. var sourcebuffer = _this.sourceBuffers[i];
  14317. var cues = sourcebuffer.metadataTrack_ && sourcebuffer.metadataTrack_.cues;
  14318. if (cues && cues.length) {
  14319. cues[cues.length - 1].endTime = duration;
  14320. }
  14321. }
  14322. });
  14323. // explicitly terminate any WebWorkers that were created
  14324. // by SourceHandlers
  14325. this.on('sourceclose', function (event) {
  14326. this.sourceBuffers.forEach(function (sourceBuffer) {
  14327. if (sourceBuffer.transmuxer_) {
  14328. sourceBuffer.transmuxer_.terminate();
  14329. }
  14330. });
  14331. this.sourceBuffers.length = 0;
  14332. if (!this.player_) {
  14333. return;
  14334. }
  14335. (0, _cleanupTextTracks.cleanupTextTracks)(this.player_);
  14336. if (this.player_.audioTracks && this.player_.audioTracks()) {
  14337. this.player_.audioTracks().off('change', this.updateActiveSourceBuffers_);
  14338. this.player_.audioTracks().off('addtrack', this.updateActiveSourceBuffers_);
  14339. this.player_.audioTracks().off('removetrack', this.updateActiveSourceBuffers_);
  14340. }
  14341. // We can only change this if the player hasn't been disposed of yet
  14342. // because `off` eventually tries to use the el_ property. If it has
  14343. // been disposed of, then don't worry about it because there are no
  14344. // event handlers left to unbind anyway
  14345. if (this.player_.el_) {
  14346. this.player_.off('mediachange', this.onPlayerMediachange_);
  14347. }
  14348. });
  14349. }
  14350. /**
  14351. * Add a range that that can now be seeked to.
  14352. *
  14353. * @param {Double} start where to start the addition
  14354. * @param {Double} end where to end the addition
  14355. * @private
  14356. */
  14357. _createClass(HtmlMediaSource, [{
  14358. key: 'addSeekableRange_',
  14359. value: function addSeekableRange_(start, end) {
  14360. var error = undefined;
  14361. if (this.duration !== Infinity) {
  14362. error = new Error('MediaSource.addSeekableRange() can only be invoked ' + 'when the duration is Infinity');
  14363. error.name = 'InvalidStateError';
  14364. error.code = 11;
  14365. throw error;
  14366. }
  14367. if (end > this.nativeMediaSource_.duration || isNaN(this.nativeMediaSource_.duration)) {
  14368. this.nativeMediaSource_.duration = end;
  14369. }
  14370. }
  14371. /**
  14372. * Add a source buffer to the media source.
  14373. *
  14374. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
  14375. * @param {String} type the content-type of the content
  14376. * @return {Object} the created source buffer
  14377. */
  14378. }, {
  14379. key: 'addSourceBuffer',
  14380. value: function addSourceBuffer(type) {
  14381. var buffer = undefined;
  14382. var parsedType = (0, _codecUtils.parseContentType)(type);
  14383. // Create a VirtualSourceBuffer to transmux MPEG-2 transport
  14384. // stream segments into fragmented MP4s
  14385. if (/^(video|audio)\/mp2t$/i.test(parsedType.type)) {
  14386. var codecs = [];
  14387. if (parsedType.parameters && parsedType.parameters.codecs) {
  14388. codecs = parsedType.parameters.codecs.split(',');
  14389. codecs = (0, _codecUtils.translateLegacyCodecs)(codecs);
  14390. codecs = codecs.filter(function (codec) {
  14391. return (0, _codecUtils.isAudioCodec)(codec) || (0, _codecUtils.isVideoCodec)(codec);
  14392. });
  14393. }
  14394. if (codecs.length === 0) {
  14395. codecs = ['avc1.4d400d', 'mp4a.40.2'];
  14396. }
  14397. buffer = new _virtualSourceBuffer2['default'](this, codecs);
  14398. if (this.sourceBuffers.length !== 0) {
  14399. // If another VirtualSourceBuffer already exists, then we are creating a
  14400. // SourceBuffer for an alternate audio track and therefore we know that
  14401. // the source has both an audio and video track.
  14402. // That means we should trigger the manual creation of the real
  14403. // SourceBuffers instead of waiting for the transmuxer to return data
  14404. this.sourceBuffers[0].createRealSourceBuffers_();
  14405. buffer.createRealSourceBuffers_();
  14406. // Automatically disable the audio on the first source buffer if
  14407. // a second source buffer is ever created
  14408. this.sourceBuffers[0].audioDisabled_ = true;
  14409. }
  14410. } else {
  14411. // delegate to the native implementation
  14412. buffer = this.nativeMediaSource_.addSourceBuffer(type);
  14413. }
  14414. this.sourceBuffers.push(buffer);
  14415. return buffer;
  14416. }
  14417. }]);
  14418. return HtmlMediaSource;
  14419. })(_videoJs2['default'].EventTarget);
  14420. exports['default'] = HtmlMediaSource;
  14421. module.exports = exports['default'];
  14422. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  14423. },{"./add-text-track-data":62,"./cleanup-text-tracks":63,"./codec-utils":64,"./virtual-source-buffer":74,"global/document":29,"global/window":30}],71:[function(require,module,exports){
  14424. /**
  14425. * @file remove-cues-from-track.js
  14426. */
  14427. /**
  14428. * Remove cues from a track on video.js.
  14429. *
  14430. * @param {Double} start start of where we should remove the cue
  14431. * @param {Double} end end of where the we should remove the cue
  14432. * @param {Object} track the text track to remove the cues from
  14433. * @private
  14434. */
  14435. "use strict";
  14436. Object.defineProperty(exports, "__esModule", {
  14437. value: true
  14438. });
  14439. var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
  14440. var i = undefined;
  14441. var cue = undefined;
  14442. if (!track) {
  14443. return;
  14444. }
  14445. if (!track.cues) {
  14446. return;
  14447. }
  14448. i = track.cues.length;
  14449. while (i--) {
  14450. cue = track.cues[i];
  14451. // Remove any overlapping cue
  14452. if (cue.startTime <= end && cue.endTime >= start) {
  14453. track.removeCue(cue);
  14454. }
  14455. }
  14456. };
  14457. exports["default"] = removeCuesFromTrack;
  14458. module.exports = exports["default"];
  14459. },{}],72:[function(require,module,exports){
  14460. /**
  14461. * @file transmuxer-worker.js
  14462. */
  14463. /**
  14464. * videojs-contrib-media-sources
  14465. *
  14466. * Copyright (c) 2015 Brightcove
  14467. * All rights reserved.
  14468. *
  14469. * Handles communication between the browser-world and the mux.js
  14470. * transmuxer running inside of a WebWorker by exposing a simple
  14471. * message-based interface to a Transmuxer object.
  14472. */
  14473. 'use strict';
  14474. Object.defineProperty(exports, '__esModule', {
  14475. value: true
  14476. });
  14477. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  14478. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  14479. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  14480. var _globalWindow = require('global/window');
  14481. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  14482. var _muxJsLibMp4 = require('mux.js/lib/mp4');
  14483. var _muxJsLibMp42 = _interopRequireDefault(_muxJsLibMp4);
  14484. /**
  14485. * Re-emits transmuxer events by converting them into messages to the
  14486. * world outside the worker.
  14487. *
  14488. * @param {Object} transmuxer the transmuxer to wire events on
  14489. * @private
  14490. */
  14491. var wireTransmuxerEvents = function wireTransmuxerEvents(transmuxer) {
  14492. transmuxer.on('data', function (segment) {
  14493. // transfer ownership of the underlying ArrayBuffer
  14494. // instead of doing a copy to save memory
  14495. // ArrayBuffers are transferable but generic TypedArrays are not
  14496. // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
  14497. var initArray = segment.initSegment;
  14498. segment.initSegment = {
  14499. data: initArray.buffer,
  14500. byteOffset: initArray.byteOffset,
  14501. byteLength: initArray.byteLength
  14502. };
  14503. var typedArray = segment.data;
  14504. segment.data = typedArray.buffer;
  14505. _globalWindow2['default'].postMessage({
  14506. action: 'data',
  14507. segment: segment,
  14508. byteOffset: typedArray.byteOffset,
  14509. byteLength: typedArray.byteLength
  14510. }, [segment.data]);
  14511. });
  14512. if (transmuxer.captionStream) {
  14513. transmuxer.captionStream.on('data', function (caption) {
  14514. _globalWindow2['default'].postMessage({
  14515. action: 'caption',
  14516. data: caption
  14517. });
  14518. });
  14519. }
  14520. transmuxer.on('done', function (data) {
  14521. _globalWindow2['default'].postMessage({ action: 'done' });
  14522. });
  14523. };
  14524. /**
  14525. * All incoming messages route through this hash. If no function exists
  14526. * to handle an incoming message, then we ignore the message.
  14527. *
  14528. * @class MessageHandlers
  14529. * @param {Object} options the options to initialize with
  14530. */
  14531. var MessageHandlers = (function () {
  14532. function MessageHandlers(options) {
  14533. _classCallCheck(this, MessageHandlers);
  14534. this.options = options || {};
  14535. this.init();
  14536. }
  14537. /**
  14538. * Our web wroker interface so that things can talk to mux.js
  14539. * that will be running in a web worker. the scope is passed to this by
  14540. * webworkify.
  14541. *
  14542. * @param {Object} self the scope for the web worker
  14543. */
  14544. /**
  14545. * initialize our web worker and wire all the events.
  14546. */
  14547. _createClass(MessageHandlers, [{
  14548. key: 'init',
  14549. value: function init() {
  14550. if (this.transmuxer) {
  14551. this.transmuxer.dispose();
  14552. }
  14553. this.transmuxer = new _muxJsLibMp42['default'].Transmuxer(this.options);
  14554. wireTransmuxerEvents(this.transmuxer);
  14555. }
  14556. /**
  14557. * Adds data (a ts segment) to the start of the transmuxer pipeline for
  14558. * processing.
  14559. *
  14560. * @param {ArrayBuffer} data data to push into the muxer
  14561. */
  14562. }, {
  14563. key: 'push',
  14564. value: function push(data) {
  14565. // Cast array buffer to correct type for transmuxer
  14566. var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
  14567. this.transmuxer.push(segment);
  14568. }
  14569. /**
  14570. * Recreate the transmuxer so that the next segment added via `push`
  14571. * start with a fresh transmuxer.
  14572. */
  14573. }, {
  14574. key: 'reset',
  14575. value: function reset() {
  14576. this.init();
  14577. }
  14578. /**
  14579. * Set the value that will be used as the `baseMediaDecodeTime` time for the
  14580. * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
  14581. * set relative to the first based on the PTS values.
  14582. *
  14583. * @param {Object} data used to set the timestamp offset in the muxer
  14584. */
  14585. }, {
  14586. key: 'setTimestampOffset',
  14587. value: function setTimestampOffset(data) {
  14588. var timestampOffset = data.timestampOffset || 0;
  14589. this.transmuxer.setBaseMediaDecodeTime(Math.round(timestampOffset * 90000));
  14590. }
  14591. }, {
  14592. key: 'setAudioAppendStart',
  14593. value: function setAudioAppendStart(data) {
  14594. this.transmuxer.setAudioAppendStart(Math.ceil(data.appendStart * 90000));
  14595. }
  14596. /**
  14597. * Forces the pipeline to finish processing the last segment and emit it's
  14598. * results.
  14599. *
  14600. * @param {Object} data event data, not really used
  14601. */
  14602. }, {
  14603. key: 'flush',
  14604. value: function flush(data) {
  14605. this.transmuxer.flush();
  14606. }
  14607. }]);
  14608. return MessageHandlers;
  14609. })();
  14610. var TransmuxerWorker = function TransmuxerWorker(self) {
  14611. self.onmessage = function (event) {
  14612. if (event.data.action === 'init' && event.data.options) {
  14613. this.messageHandlers = new MessageHandlers(event.data.options);
  14614. return;
  14615. }
  14616. if (!this.messageHandlers) {
  14617. this.messageHandlers = new MessageHandlers();
  14618. }
  14619. if (event.data && event.data.action && event.data.action !== 'init') {
  14620. if (this.messageHandlers[event.data.action]) {
  14621. this.messageHandlers[event.data.action](event.data);
  14622. }
  14623. }
  14624. };
  14625. };
  14626. exports['default'] = function (self) {
  14627. return new TransmuxerWorker(self);
  14628. };
  14629. module.exports = exports['default'];
  14630. },{"global/window":30,"mux.js/lib/mp4":53}],73:[function(require,module,exports){
  14631. (function (global){
  14632. /**
  14633. * @file videojs-contrib-media-sources.js
  14634. */
  14635. 'use strict';
  14636. Object.defineProperty(exports, '__esModule', {
  14637. value: true
  14638. });
  14639. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  14640. var _globalWindow = require('global/window');
  14641. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  14642. var _flashMediaSource = require('./flash-media-source');
  14643. var _flashMediaSource2 = _interopRequireDefault(_flashMediaSource);
  14644. var _htmlMediaSource = require('./html-media-source');
  14645. var _htmlMediaSource2 = _interopRequireDefault(_htmlMediaSource);
  14646. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  14647. var _videoJs2 = _interopRequireDefault(_videoJs);
  14648. var urlCount = 0;
  14649. // ------------
  14650. // Media Source
  14651. // ------------
  14652. var defaults = {
  14653. // how to determine the MediaSource implementation to use. There
  14654. // are three available modes:
  14655. // - auto: use native MediaSources where available and Flash
  14656. // everywhere else
  14657. // - html5: always use native MediaSources
  14658. // - flash: always use the Flash MediaSource polyfill
  14659. mode: 'auto'
  14660. };
  14661. // store references to the media sources so they can be connected
  14662. // to a video element (a swf object)
  14663. // TODO: can we store this somewhere local to this module?
  14664. _videoJs2['default'].mediaSources = {};
  14665. /**
  14666. * Provide a method for a swf object to notify JS that a
  14667. * media source is now open.
  14668. *
  14669. * @param {String} msObjectURL string referencing the MSE Object URL
  14670. * @param {String} swfId the swf id
  14671. */
  14672. var open = function open(msObjectURL, swfId) {
  14673. var mediaSource = _videoJs2['default'].mediaSources[msObjectURL];
  14674. if (mediaSource) {
  14675. mediaSource.trigger({ type: 'sourceopen', swfId: swfId });
  14676. } else {
  14677. throw new Error('Media Source not found (Video.js)');
  14678. }
  14679. };
  14680. /**
  14681. * Check to see if the native MediaSource object exists and supports
  14682. * an MP4 container with both H.264 video and AAC-LC audio.
  14683. *
  14684. * @return {Boolean} if native media sources are supported
  14685. */
  14686. var supportsNativeMediaSources = function supportsNativeMediaSources() {
  14687. return !!_globalWindow2['default'].MediaSource && !!_globalWindow2['default'].MediaSource.isTypeSupported && _globalWindow2['default'].MediaSource.isTypeSupported('video/mp4;codecs="avc1.4d400d,mp4a.40.2"');
  14688. };
  14689. /**
  14690. * An emulation of the MediaSource API so that we can support
  14691. * native and non-native functionality such as flash and
  14692. * video/mp2t videos. returns an instance of HtmlMediaSource or
  14693. * FlashMediaSource depending on what is supported and what options
  14694. * are passed in.
  14695. *
  14696. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/MediaSource
  14697. * @param {Object} options options to use during setup.
  14698. */
  14699. var MediaSource = function MediaSource(options) {
  14700. var settings = _videoJs2['default'].mergeOptions(defaults, options);
  14701. this.MediaSource = {
  14702. open: open,
  14703. supportsNativeMediaSources: supportsNativeMediaSources
  14704. };
  14705. // determine whether HTML MediaSources should be used
  14706. if (settings.mode === 'html5' || settings.mode === 'auto' && supportsNativeMediaSources()) {
  14707. return new _htmlMediaSource2['default']();
  14708. } else if (_videoJs2['default'].getTech('Flash')) {
  14709. return new _flashMediaSource2['default']();
  14710. }
  14711. throw new Error('Cannot use Flash or Html5 to create a MediaSource for this video');
  14712. };
  14713. exports.MediaSource = MediaSource;
  14714. MediaSource.open = open;
  14715. MediaSource.supportsNativeMediaSources = supportsNativeMediaSources;
  14716. /**
  14717. * A wrapper around the native URL for our MSE object
  14718. * implementation, this object is exposed under videojs.URL
  14719. *
  14720. * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/URL
  14721. */
  14722. var URL = {
  14723. /**
  14724. * A wrapper around the native createObjectURL for our objects.
  14725. * This function maps a native or emulated mediaSource to a blob
  14726. * url so that it can be loaded into video.js
  14727. *
  14728. * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/createObjectURL
  14729. * @param {MediaSource} object the object to create a blob url to
  14730. */
  14731. createObjectURL: function createObjectURL(object) {
  14732. var objectUrlPrefix = 'blob:vjs-media-source/';
  14733. var url = undefined;
  14734. // use the native MediaSource to generate an object URL
  14735. if (object instanceof _htmlMediaSource2['default']) {
  14736. url = _globalWindow2['default'].URL.createObjectURL(object.nativeMediaSource_);
  14737. object.url_ = url;
  14738. return url;
  14739. }
  14740. // if the object isn't an emulated MediaSource, delegate to the
  14741. // native implementation
  14742. if (!(object instanceof _flashMediaSource2['default'])) {
  14743. url = _globalWindow2['default'].URL.createObjectURL(object);
  14744. object.url_ = url;
  14745. return url;
  14746. }
  14747. // build a URL that can be used to map back to the emulated
  14748. // MediaSource
  14749. url = objectUrlPrefix + urlCount;
  14750. urlCount++;
  14751. // setup the mapping back to object
  14752. _videoJs2['default'].mediaSources[url] = object;
  14753. return url;
  14754. }
  14755. };
  14756. exports.URL = URL;
  14757. _videoJs2['default'].MediaSource = MediaSource;
  14758. _videoJs2['default'].URL = URL;
  14759. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  14760. },{"./flash-media-source":67,"./html-media-source":70,"global/window":30}],74:[function(require,module,exports){
  14761. (function (global){
  14762. /**
  14763. * @file virtual-source-buffer.js
  14764. */
  14765. 'use strict';
  14766. Object.defineProperty(exports, '__esModule', {
  14767. value: true
  14768. });
  14769. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  14770. var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  14771. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  14772. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  14773. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  14774. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  14775. var _videoJs2 = _interopRequireDefault(_videoJs);
  14776. var _createTextTracksIfNecessary = require('./create-text-tracks-if-necessary');
  14777. var _createTextTracksIfNecessary2 = _interopRequireDefault(_createTextTracksIfNecessary);
  14778. var _removeCuesFromTrack = require('./remove-cues-from-track');
  14779. var _removeCuesFromTrack2 = _interopRequireDefault(_removeCuesFromTrack);
  14780. var _addTextTrackData = require('./add-text-track-data');
  14781. var _webworkify = require('webworkify');
  14782. var _webworkify2 = _interopRequireDefault(_webworkify);
  14783. var _transmuxerWorker = require('./transmuxer-worker');
  14784. var _transmuxerWorker2 = _interopRequireDefault(_transmuxerWorker);
  14785. var _codecUtils = require('./codec-utils');
  14786. /**
  14787. * VirtualSourceBuffers exist so that we can transmux non native formats
  14788. * into a native format, but keep the same api as a native source buffer.
  14789. * It creates a transmuxer, that works in its own thread (a web worker) and
  14790. * that transmuxer muxes the data into a native format. VirtualSourceBuffer will
  14791. * then send all of that data to the naive sourcebuffer so that it is
  14792. * indestinguishable from a natively supported format.
  14793. *
  14794. * @param {HtmlMediaSource} mediaSource the parent mediaSource
  14795. * @param {Array} codecs array of codecs that we will be dealing with
  14796. * @class VirtualSourceBuffer
  14797. * @extends video.js.EventTarget
  14798. */
  14799. var VirtualSourceBuffer = (function (_videojs$EventTarget) {
  14800. _inherits(VirtualSourceBuffer, _videojs$EventTarget);
  14801. function VirtualSourceBuffer(mediaSource, codecs) {
  14802. var _this = this;
  14803. _classCallCheck(this, VirtualSourceBuffer);
  14804. _get(Object.getPrototypeOf(VirtualSourceBuffer.prototype), 'constructor', this).call(this, _videoJs2['default'].EventTarget);
  14805. this.timestampOffset_ = 0;
  14806. this.pendingBuffers_ = [];
  14807. this.bufferUpdating_ = false;
  14808. this.mediaSource_ = mediaSource;
  14809. this.codecs_ = codecs;
  14810. this.audioCodec_ = null;
  14811. this.videoCodec_ = null;
  14812. this.audioDisabled_ = false;
  14813. this.appendAudioInitSegment_ = true;
  14814. var options = {
  14815. remux: false
  14816. };
  14817. this.codecs_.forEach(function (codec) {
  14818. if ((0, _codecUtils.isAudioCodec)(codec)) {
  14819. _this.audioCodec_ = codec;
  14820. } else if ((0, _codecUtils.isVideoCodec)(codec)) {
  14821. _this.videoCodec_ = codec;
  14822. }
  14823. });
  14824. // append muxed segments to their respective native buffers as
  14825. // soon as they are available
  14826. this.transmuxer_ = (0, _webworkify2['default'])(_transmuxerWorker2['default']);
  14827. this.transmuxer_.postMessage({ action: 'init', options: options });
  14828. this.transmuxer_.onmessage = function (event) {
  14829. if (event.data.action === 'data') {
  14830. return _this.data_(event);
  14831. }
  14832. if (event.data.action === 'done') {
  14833. return _this.done_(event);
  14834. }
  14835. };
  14836. // this timestampOffset is a property with the side-effect of resetting
  14837. // baseMediaDecodeTime in the transmuxer on the setter
  14838. Object.defineProperty(this, 'timestampOffset', {
  14839. get: function get() {
  14840. return this.timestampOffset_;
  14841. },
  14842. set: function set(val) {
  14843. if (typeof val === 'number' && val >= 0) {
  14844. this.timestampOffset_ = val;
  14845. this.appendAudioInitSegment_ = true;
  14846. // We have to tell the transmuxer to set the baseMediaDecodeTime to
  14847. // the desired timestampOffset for the next segment
  14848. this.transmuxer_.postMessage({
  14849. action: 'setTimestampOffset',
  14850. timestampOffset: val
  14851. });
  14852. }
  14853. }
  14854. });
  14855. // setting the append window affects both source buffers
  14856. Object.defineProperty(this, 'appendWindowStart', {
  14857. get: function get() {
  14858. return (this.videoBuffer_ || this.audioBuffer_).appendWindowStart;
  14859. },
  14860. set: function set(start) {
  14861. if (this.videoBuffer_) {
  14862. this.videoBuffer_.appendWindowStart = start;
  14863. }
  14864. if (this.audioBuffer_) {
  14865. this.audioBuffer_.appendWindowStart = start;
  14866. }
  14867. }
  14868. });
  14869. // this buffer is "updating" if either of its native buffers are
  14870. Object.defineProperty(this, 'updating', {
  14871. get: function get() {
  14872. return !!(this.bufferUpdating_ || !this.audioDisabled_ && this.audioBuffer_ && this.audioBuffer_.updating || this.videoBuffer_ && this.videoBuffer_.updating);
  14873. }
  14874. });
  14875. // the buffered property is the intersection of the buffered
  14876. // ranges of the native source buffers
  14877. Object.defineProperty(this, 'buffered', {
  14878. get: function get() {
  14879. var start = null;
  14880. var end = null;
  14881. var arity = 0;
  14882. var extents = [];
  14883. var ranges = [];
  14884. // neither buffer has been created yet
  14885. if (!this.videoBuffer_ && !this.audioBuffer_) {
  14886. return _videoJs2['default'].createTimeRange();
  14887. }
  14888. // only one buffer is configured
  14889. if (!this.videoBuffer_) {
  14890. return this.audioBuffer_.buffered;
  14891. }
  14892. if (!this.audioBuffer_) {
  14893. return this.videoBuffer_.buffered;
  14894. }
  14895. // both buffers are configured
  14896. if (this.audioDisabled_) {
  14897. return this.videoBuffer_.buffered;
  14898. }
  14899. // both buffers are empty
  14900. if (this.videoBuffer_.buffered.length === 0 && this.audioBuffer_.buffered.length === 0) {
  14901. return _videoJs2['default'].createTimeRange();
  14902. }
  14903. // Handle the case where we have both buffers and create an
  14904. // intersection of the two
  14905. var videoBuffered = this.videoBuffer_.buffered;
  14906. var audioBuffered = this.audioBuffer_.buffered;
  14907. var count = videoBuffered.length;
  14908. // A) Gather up all start and end times
  14909. while (count--) {
  14910. extents.push({ time: videoBuffered.start(count), type: 'start' });
  14911. extents.push({ time: videoBuffered.end(count), type: 'end' });
  14912. }
  14913. count = audioBuffered.length;
  14914. while (count--) {
  14915. extents.push({ time: audioBuffered.start(count), type: 'start' });
  14916. extents.push({ time: audioBuffered.end(count), type: 'end' });
  14917. }
  14918. // B) Sort them by time
  14919. extents.sort(function (a, b) {
  14920. return a.time - b.time;
  14921. });
  14922. // C) Go along one by one incrementing arity for start and decrementing
  14923. // arity for ends
  14924. for (count = 0; count < extents.length; count++) {
  14925. if (extents[count].type === 'start') {
  14926. arity++;
  14927. // D) If arity is ever incremented to 2 we are entering an
  14928. // overlapping range
  14929. if (arity === 2) {
  14930. start = extents[count].time;
  14931. }
  14932. } else if (extents[count].type === 'end') {
  14933. arity--;
  14934. // E) If arity is ever decremented to 1 we leaving an
  14935. // overlapping range
  14936. if (arity === 1) {
  14937. end = extents[count].time;
  14938. }
  14939. }
  14940. // F) Record overlapping ranges
  14941. if (start !== null && end !== null) {
  14942. ranges.push([start, end]);
  14943. start = null;
  14944. end = null;
  14945. }
  14946. }
  14947. return _videoJs2['default'].createTimeRanges(ranges);
  14948. }
  14949. });
  14950. }
  14951. /**
  14952. * When we get a data event from the transmuxer
  14953. * we call this function and handle the data that
  14954. * was sent to us
  14955. *
  14956. * @private
  14957. * @param {Event} event the data event from the transmuxer
  14958. */
  14959. _createClass(VirtualSourceBuffer, [{
  14960. key: 'data_',
  14961. value: function data_(event) {
  14962. var segment = event.data.segment;
  14963. // Cast ArrayBuffer to TypedArray
  14964. segment.data = new Uint8Array(segment.data, event.data.byteOffset, event.data.byteLength);
  14965. segment.initSegment = new Uint8Array(segment.initSegment.data, segment.initSegment.byteOffset, segment.initSegment.byteLength);
  14966. (0, _createTextTracksIfNecessary2['default'])(this, this.mediaSource_, segment);
  14967. // Add the segments to the pendingBuffers array
  14968. this.pendingBuffers_.push(segment);
  14969. return;
  14970. }
  14971. /**
  14972. * When we get a done event from the transmuxer
  14973. * we call this function and we process all
  14974. * of the pending data that we have been saving in the
  14975. * data_ function
  14976. *
  14977. * @private
  14978. * @param {Event} event the done event from the transmuxer
  14979. */
  14980. }, {
  14981. key: 'done_',
  14982. value: function done_(event) {
  14983. // All buffers should have been flushed from the muxer
  14984. // start processing anything we have received
  14985. this.processPendingSegments_();
  14986. return;
  14987. }
  14988. /**
  14989. * Create our internal native audio/video source buffers and add
  14990. * event handlers to them with the following conditions:
  14991. * 1. they do not already exist on the mediaSource
  14992. * 2. this VSB has a codec for them
  14993. *
  14994. * @private
  14995. */
  14996. }, {
  14997. key: 'createRealSourceBuffers_',
  14998. value: function createRealSourceBuffers_() {
  14999. var _this2 = this;
  15000. var types = ['audio', 'video'];
  15001. types.forEach(function (type) {
  15002. // Don't create a SourceBuffer of this type if we don't have a
  15003. // codec for it
  15004. if (!_this2[type + 'Codec_']) {
  15005. return;
  15006. }
  15007. // Do nothing if a SourceBuffer of this type already exists
  15008. if (_this2[type + 'Buffer_']) {
  15009. return;
  15010. }
  15011. var buffer = null;
  15012. // If the mediasource already has a SourceBuffer for the codec
  15013. // use that
  15014. if (_this2.mediaSource_[type + 'Buffer_']) {
  15015. buffer = _this2.mediaSource_[type + 'Buffer_'];
  15016. } else {
  15017. buffer = _this2.mediaSource_.nativeMediaSource_.addSourceBuffer(type + '/mp4;codecs="' + _this2[type + 'Codec_'] + '"');
  15018. _this2.mediaSource_[type + 'Buffer_'] = buffer;
  15019. }
  15020. _this2[type + 'Buffer_'] = buffer;
  15021. // Wire up the events to the SourceBuffer
  15022. ['update', 'updatestart', 'updateend'].forEach(function (event) {
  15023. buffer.addEventListener(event, function () {
  15024. // if audio is disabled
  15025. if (type === 'audio' && _this2.audioDisabled_) {
  15026. return;
  15027. }
  15028. var shouldTrigger = types.every(function (t) {
  15029. // skip checking audio's updating status if audio
  15030. // is not enabled
  15031. if (t === 'audio' && _this2.audioDisabled_) {
  15032. return true;
  15033. }
  15034. // if the other type if updating we don't trigger
  15035. if (type !== t && _this2[t + 'Buffer_'] && _this2[t + 'Buffer_'].updating) {
  15036. return false;
  15037. }
  15038. return true;
  15039. });
  15040. if (shouldTrigger) {
  15041. return _this2.trigger(event);
  15042. }
  15043. });
  15044. });
  15045. });
  15046. }
  15047. /**
  15048. * Emulate the native mediasource function, but our function will
  15049. * send all of the proposed segments to the transmuxer so that we
  15050. * can transmux them before we append them to our internal
  15051. * native source buffers in the correct format.
  15052. *
  15053. * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
  15054. * @param {Uint8Array} segment the segment to append to the buffer
  15055. */
  15056. }, {
  15057. key: 'appendBuffer',
  15058. value: function appendBuffer(segment) {
  15059. // Start the internal "updating" state
  15060. this.bufferUpdating_ = true;
  15061. if (this.audioBuffer_ && this.audioBuffer_.buffered.length) {
  15062. var audioBuffered = this.audioBuffer_.buffered;
  15063. this.transmuxer_.postMessage({
  15064. action: 'setAudioAppendStart',
  15065. appendStart: audioBuffered.end(audioBuffered.length - 1)
  15066. });
  15067. }
  15068. this.transmuxer_.postMessage({
  15069. action: 'push',
  15070. // Send the typed-array of data as an ArrayBuffer so that
  15071. // it can be sent as a "Transferable" and avoid the costly
  15072. // memory copy
  15073. data: segment.buffer,
  15074. // To recreate the original typed-array, we need information
  15075. // about what portion of the ArrayBuffer it was a view into
  15076. byteOffset: segment.byteOffset,
  15077. byteLength: segment.byteLength
  15078. }, [segment.buffer]);
  15079. this.transmuxer_.postMessage({ action: 'flush' });
  15080. }
  15081. /**
  15082. * Emulate the native mediasource function and remove parts
  15083. * of the buffer from any of our internal buffers that exist
  15084. *
  15085. * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
  15086. * @param {Double} start position to start the remove at
  15087. * @param {Double} end position to end the remove at
  15088. */
  15089. }, {
  15090. key: 'remove',
  15091. value: function remove(start, end) {
  15092. if (this.videoBuffer_) {
  15093. this.videoBuffer_.remove(start, end);
  15094. }
  15095. if (this.audioBuffer_) {
  15096. this.audioBuffer_.remove(start, end);
  15097. }
  15098. // Remove Metadata Cues (id3)
  15099. (0, _removeCuesFromTrack2['default'])(start, end, this.metadataTrack_);
  15100. // Remove Any Captions
  15101. (0, _removeCuesFromTrack2['default'])(start, end, this.inbandTextTrack_);
  15102. }
  15103. /**
  15104. * Process any segments that the muxer has output
  15105. * Concatenate segments together based on type and append them into
  15106. * their respective sourceBuffers
  15107. *
  15108. * @private
  15109. */
  15110. }, {
  15111. key: 'processPendingSegments_',
  15112. value: function processPendingSegments_() {
  15113. var sortedSegments = {
  15114. video: {
  15115. segments: [],
  15116. bytes: 0
  15117. },
  15118. audio: {
  15119. segments: [],
  15120. bytes: 0
  15121. },
  15122. captions: [],
  15123. metadata: []
  15124. };
  15125. // Sort segments into separate video/audio arrays and
  15126. // keep track of their total byte lengths
  15127. sortedSegments = this.pendingBuffers_.reduce(function (segmentObj, segment) {
  15128. var type = segment.type;
  15129. var data = segment.data;
  15130. var initSegment = segment.initSegment;
  15131. segmentObj[type].segments.push(data);
  15132. segmentObj[type].bytes += data.byteLength;
  15133. segmentObj[type].initSegment = initSegment;
  15134. // Gather any captions into a single array
  15135. if (segment.captions) {
  15136. segmentObj.captions = segmentObj.captions.concat(segment.captions);
  15137. }
  15138. if (segment.info) {
  15139. segmentObj[type].info = segment.info;
  15140. }
  15141. // Gather any metadata into a single array
  15142. if (segment.metadata) {
  15143. segmentObj.metadata = segmentObj.metadata.concat(segment.metadata);
  15144. }
  15145. return segmentObj;
  15146. }, sortedSegments);
  15147. // Create the real source buffers if they don't exist by now since we
  15148. // finally are sure what tracks are contained in the source
  15149. if (!this.videoBuffer_ && !this.audioBuffer_) {
  15150. // Remove any codecs that may have been specified by default but
  15151. // are no longer applicable now
  15152. if (sortedSegments.video.bytes === 0) {
  15153. this.videoCodec_ = null;
  15154. }
  15155. if (sortedSegments.audio.bytes === 0) {
  15156. this.audioCodec_ = null;
  15157. }
  15158. this.createRealSourceBuffers_();
  15159. }
  15160. if (sortedSegments.audio.info) {
  15161. this.mediaSource_.trigger({ type: 'audioinfo', info: sortedSegments.audio.info });
  15162. }
  15163. if (sortedSegments.video.info) {
  15164. this.mediaSource_.trigger({ type: 'videoinfo', info: sortedSegments.video.info });
  15165. }
  15166. if (this.appendAudioInitSegment_) {
  15167. if (!this.audioDisabled_ && this.audioBuffer_) {
  15168. sortedSegments.audio.segments.unshift(sortedSegments.audio.initSegment);
  15169. sortedSegments.audio.bytes += sortedSegments.audio.initSegment.byteLength;
  15170. }
  15171. this.appendAudioInitSegment_ = false;
  15172. }
  15173. // Merge multiple video and audio segments into one and append
  15174. if (this.videoBuffer_) {
  15175. sortedSegments.video.segments.unshift(sortedSegments.video.initSegment);
  15176. sortedSegments.video.bytes += sortedSegments.video.initSegment.byteLength;
  15177. this.concatAndAppendSegments_(sortedSegments.video, this.videoBuffer_);
  15178. // TODO: are video tracks the only ones with text tracks?
  15179. (0, _addTextTrackData.addTextTrackData)(this, sortedSegments.captions, sortedSegments.metadata);
  15180. }
  15181. if (!this.audioDisabled_ && this.audioBuffer_) {
  15182. this.concatAndAppendSegments_(sortedSegments.audio, this.audioBuffer_);
  15183. }
  15184. this.pendingBuffers_.length = 0;
  15185. // We are no longer in the internal "updating" state
  15186. this.bufferUpdating_ = false;
  15187. }
  15188. /**
  15189. * Combine all segments into a single Uint8Array and then append them
  15190. * to the destination buffer
  15191. *
  15192. * @param {Object} segmentObj
  15193. * @param {SourceBuffer} destinationBuffer native source buffer to append data to
  15194. * @private
  15195. */
  15196. }, {
  15197. key: 'concatAndAppendSegments_',
  15198. value: function concatAndAppendSegments_(segmentObj, destinationBuffer) {
  15199. var offset = 0;
  15200. var tempBuffer = undefined;
  15201. if (segmentObj.bytes) {
  15202. tempBuffer = new Uint8Array(segmentObj.bytes);
  15203. // Combine the individual segments into one large typed-array
  15204. segmentObj.segments.forEach(function (segment) {
  15205. tempBuffer.set(segment, offset);
  15206. offset += segment.byteLength;
  15207. });
  15208. try {
  15209. destinationBuffer.appendBuffer(tempBuffer);
  15210. } catch (error) {
  15211. if (this.mediaSource_.player_) {
  15212. this.mediaSource_.player_.error({
  15213. code: -3,
  15214. type: 'APPEND_BUFFER_ERR',
  15215. message: error.message,
  15216. originalError: error
  15217. });
  15218. }
  15219. }
  15220. }
  15221. }
  15222. /**
  15223. * Emulate the native mediasource function. abort any soureBuffer
  15224. * actions and throw out any un-appended data.
  15225. *
  15226. * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
  15227. */
  15228. }, {
  15229. key: 'abort',
  15230. value: function abort() {
  15231. if (this.videoBuffer_) {
  15232. this.videoBuffer_.abort();
  15233. }
  15234. if (this.audioBuffer_) {
  15235. this.audioBuffer_.abort();
  15236. }
  15237. if (this.transmuxer_) {
  15238. this.transmuxer_.postMessage({ action: 'reset' });
  15239. }
  15240. this.pendingBuffers_.length = 0;
  15241. this.bufferUpdating_ = false;
  15242. }
  15243. }]);
  15244. return VirtualSourceBuffer;
  15245. })(_videoJs2['default'].EventTarget);
  15246. exports['default'] = VirtualSourceBuffer;
  15247. module.exports = exports['default'];
  15248. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  15249. },{"./add-text-track-data":62,"./codec-utils":64,"./create-text-tracks-if-necessary":65,"./remove-cues-from-track":71,"./transmuxer-worker":72,"webworkify":75}],75:[function(require,module,exports){
  15250. var bundleFn = arguments[3];
  15251. var sources = arguments[4];
  15252. var cache = arguments[5];
  15253. var stringify = JSON.stringify;
  15254. module.exports = function (fn) {
  15255. var keys = [];
  15256. var wkey;
  15257. var cacheKeys = Object.keys(cache);
  15258. for (var i = 0, l = cacheKeys.length; i < l; i++) {
  15259. var key = cacheKeys[i];
  15260. if (cache[key].exports === fn) {
  15261. wkey = key;
  15262. break;
  15263. }
  15264. }
  15265. if (!wkey) {
  15266. wkey = Math.floor(Math.pow(16, 8) * Math.random()).toString(16);
  15267. var wcache = {};
  15268. for (var i = 0, l = cacheKeys.length; i < l; i++) {
  15269. var key = cacheKeys[i];
  15270. wcache[key] = key;
  15271. }
  15272. sources[wkey] = [
  15273. Function(['require','module','exports'], '(' + fn + ')(self)'),
  15274. wcache
  15275. ];
  15276. }
  15277. var skey = Math.floor(Math.pow(16, 8) * Math.random()).toString(16);
  15278. var scache = {}; scache[wkey] = wkey;
  15279. sources[skey] = [
  15280. Function(['require'],'require(' + stringify(wkey) + ')(self)'),
  15281. scache
  15282. ];
  15283. var src = '(' + bundleFn + ')({'
  15284. + Object.keys(sources).map(function (key) {
  15285. return stringify(key) + ':['
  15286. + sources[key][0]
  15287. + ',' + stringify(sources[key][1]) + ']'
  15288. ;
  15289. }).join(',')
  15290. + '},{},[' + stringify(skey) + '])'
  15291. ;
  15292. var URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
  15293. return new Worker(URL.createObjectURL(
  15294. new Blob([src], { type: 'text/javascript' })
  15295. ));
  15296. };
  15297. },{}],76:[function(require,module,exports){
  15298. (function (global){
  15299. /**
  15300. * @file videojs-contrib-hls.js
  15301. *
  15302. * The main file for the HLS project.
  15303. * License: https://github.com/videojs/videojs-contrib-hls/blob/master/LICENSE
  15304. */
  15305. 'use strict';
  15306. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  15307. var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  15308. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  15309. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  15310. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  15311. var _globalDocument = require('global/document');
  15312. var _globalDocument2 = _interopRequireDefault(_globalDocument);
  15313. var _playlistLoader = require('./playlist-loader');
  15314. var _playlistLoader2 = _interopRequireDefault(_playlistLoader);
  15315. var _playlist = require('./playlist');
  15316. var _playlist2 = _interopRequireDefault(_playlist);
  15317. var _xhr = require('./xhr');
  15318. var _xhr2 = _interopRequireDefault(_xhr);
  15319. var _aesDecrypter = require('aes-decrypter');
  15320. var _binUtils = require('./bin-utils');
  15321. var _binUtils2 = _interopRequireDefault(_binUtils);
  15322. var _videojsContribMediaSources = require('videojs-contrib-media-sources');
  15323. var _m3u8Parser = require('m3u8-parser');
  15324. var _m3u8Parser2 = _interopRequireDefault(_m3u8Parser);
  15325. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  15326. var _videoJs2 = _interopRequireDefault(_videoJs);
  15327. var _masterPlaylistController = require('./master-playlist-controller');
  15328. var _config = require('./config');
  15329. var _config2 = _interopRequireDefault(_config);
  15330. var _renditionMixin = require('./rendition-mixin');
  15331. var _renditionMixin2 = _interopRequireDefault(_renditionMixin);
  15332. var _globalWindow = require('global/window');
  15333. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  15334. var _playbackWatcher = require('./playback-watcher');
  15335. var _playbackWatcher2 = _interopRequireDefault(_playbackWatcher);
  15336. var _reloadSourceOnError = require('./reload-source-on-error');
  15337. var _reloadSourceOnError2 = _interopRequireDefault(_reloadSourceOnError);
  15338. var Hls = {
  15339. PlaylistLoader: _playlistLoader2['default'],
  15340. Playlist: _playlist2['default'],
  15341. Decrypter: _aesDecrypter.Decrypter,
  15342. AsyncStream: _aesDecrypter.AsyncStream,
  15343. decrypt: _aesDecrypter.decrypt,
  15344. utils: _binUtils2['default'],
  15345. xhr: (0, _xhr2['default'])()
  15346. };
  15347. Object.defineProperty(Hls, 'GOAL_BUFFER_LENGTH', {
  15348. get: function get() {
  15349. _videoJs2['default'].log.warn('using Hls.GOAL_BUFFER_LENGTH is UNSAFE be sure ' + 'you know what you are doing');
  15350. return _config2['default'].GOAL_BUFFER_LENGTH;
  15351. },
  15352. set: function set(v) {
  15353. _videoJs2['default'].log.warn('using Hls.GOAL_BUFFER_LENGTH is UNSAFE be sure ' + 'you know what you are doing');
  15354. if (typeof v !== 'number' || v <= 0) {
  15355. _videoJs2['default'].log.warn('value passed to Hls.GOAL_BUFFER_LENGTH ' + 'must be a number and greater than 0');
  15356. return;
  15357. }
  15358. _config2['default'].GOAL_BUFFER_LENGTH = v;
  15359. }
  15360. });
  15361. // A fudge factor to apply to advertised playlist bitrates to account for
  15362. // temporary flucations in client bandwidth
  15363. var BANDWIDTH_VARIANCE = 1.2;
  15364. /**
  15365. * Returns the CSS value for the specified property on an element
  15366. * using `getComputedStyle`. Firefox has a long-standing issue where
  15367. * getComputedStyle() may return null when running in an iframe with
  15368. * `display: none`.
  15369. *
  15370. * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
  15371. * @param {HTMLElement} el the htmlelement to work on
  15372. * @param {string} the proprety to get the style for
  15373. */
  15374. var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
  15375. var result = undefined;
  15376. if (!el) {
  15377. return '';
  15378. }
  15379. result = _globalWindow2['default'].getComputedStyle(el);
  15380. if (!result) {
  15381. return '';
  15382. }
  15383. return result[property];
  15384. };
  15385. /**
  15386. * Updates the selectedIndex of the QualityLevelList when a mediachange happens in hls.
  15387. *
  15388. * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
  15389. * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
  15390. * @function handleHlsMediaChange
  15391. */
  15392. var handleHlsMediaChange = function handleHlsMediaChange(qualityLevels, playlistLoader) {
  15393. var newPlaylist = playlistLoader.media();
  15394. var selectedIndex = -1;
  15395. for (var i = 0; i < qualityLevels.length; i++) {
  15396. if (qualityLevels[i].id === newPlaylist.uri) {
  15397. selectedIndex = i;
  15398. break;
  15399. }
  15400. }
  15401. qualityLevels.selectedIndex_ = selectedIndex;
  15402. qualityLevels.trigger({
  15403. selectedIndex: selectedIndex,
  15404. type: 'change'
  15405. });
  15406. };
  15407. /**
  15408. * Adds quality levels to list once playlist metadata is available
  15409. *
  15410. * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
  15411. * @param {Object} hls Hls object to listen to for media events.
  15412. * @function handleHlsLoadedMetadata
  15413. */
  15414. var handleHlsLoadedMetadata = function handleHlsLoadedMetadata(qualityLevels, hls) {
  15415. hls.representations().forEach(function (rep) {
  15416. qualityLevels.addQualityLevel(rep);
  15417. });
  15418. handleHlsMediaChange(qualityLevels, hls.playlists);
  15419. };
  15420. /**
  15421. * Resuable stable sort function
  15422. *
  15423. * @param {Playlists} array
  15424. * @param {Function} sortFn Different comparators
  15425. * @function stableSort
  15426. */
  15427. var stableSort = function stableSort(array, sortFn) {
  15428. var newArray = array.slice();
  15429. array.sort(function (left, right) {
  15430. var cmp = sortFn(left, right);
  15431. if (cmp === 0) {
  15432. return newArray.indexOf(left) - newArray.indexOf(right);
  15433. }
  15434. return cmp;
  15435. });
  15436. };
  15437. /**
  15438. * Chooses the appropriate media playlist based on the current
  15439. * bandwidth estimate and the player size.
  15440. *
  15441. * @return {Playlist} the highest bitrate playlist less than the currently detected
  15442. * bandwidth, accounting for some amount of bandwidth variance
  15443. */
  15444. Hls.STANDARD_PLAYLIST_SELECTOR = function () {
  15445. var sortedPlaylists = this.playlists.master.playlists.slice();
  15446. var bandwidthPlaylists = [];
  15447. var bandwidthBestVariant = undefined;
  15448. var resolutionPlusOne = undefined;
  15449. var resolutionBestVariant = undefined;
  15450. var width = undefined;
  15451. var height = undefined;
  15452. var systemBandwidth = undefined;
  15453. var haveResolution = undefined;
  15454. var resolutionPlusOneList = [];
  15455. var resolutionPlusOneSmallest = [];
  15456. var resolutionBestVariantList = [];
  15457. stableSort(sortedPlaylists, Hls.comparePlaylistBandwidth);
  15458. // filter out any playlists that have been excluded due to
  15459. // incompatible configurations or playback errors
  15460. sortedPlaylists = sortedPlaylists.filter(_playlist2['default'].isEnabled);
  15461. // filter out any variant that has greater effective bitrate
  15462. // than the current estimated bandwidth
  15463. systemBandwidth = this.systemBandwidth;
  15464. bandwidthPlaylists = sortedPlaylists.filter(function (elem) {
  15465. return elem.attributes && elem.attributes.BANDWIDTH && elem.attributes.BANDWIDTH * BANDWIDTH_VARIANCE < systemBandwidth;
  15466. });
  15467. // get all of the renditions with the same (highest) bandwidth
  15468. // and then taking the very first element
  15469. bandwidthBestVariant = bandwidthPlaylists.filter(function (elem) {
  15470. return elem.attributes.BANDWIDTH === bandwidthPlaylists[bandwidthPlaylists.length - 1].attributes.BANDWIDTH;
  15471. })[0];
  15472. // sort variants by resolution
  15473. stableSort(bandwidthPlaylists, Hls.comparePlaylistResolution);
  15474. width = parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10);
  15475. height = parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10);
  15476. // filter out playlists without resolution information
  15477. haveResolution = bandwidthPlaylists.filter(function (elem) {
  15478. return elem.attributes && elem.attributes.RESOLUTION && elem.attributes.RESOLUTION.width && elem.attributes.RESOLUTION.height;
  15479. });
  15480. // if we have the exact resolution as the player use it
  15481. resolutionBestVariantList = haveResolution.filter(function (elem) {
  15482. return elem.attributes.RESOLUTION.width === width && elem.attributes.RESOLUTION.height === height;
  15483. });
  15484. // ensure that we pick the highest bandwidth variant that have exact resolution
  15485. resolutionBestVariant = resolutionBestVariantList.filter(function (elem) {
  15486. return elem.attributes.BANDWIDTH === resolutionBestVariantList[resolutionBestVariantList.length - 1].attributes.BANDWIDTH;
  15487. })[0];
  15488. // find the smallest variant that is larger than the player
  15489. // if there is no match of exact resolution
  15490. if (!resolutionBestVariant) {
  15491. resolutionPlusOneList = haveResolution.filter(function (elem) {
  15492. return elem.attributes.RESOLUTION.width > width || elem.attributes.RESOLUTION.height > height;
  15493. });
  15494. // find all the variants have the same smallest resolution
  15495. resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (elem) {
  15496. return elem.attributes.RESOLUTION.width === resolutionPlusOneList[0].attributes.RESOLUTION.width && elem.attributes.RESOLUTION.height === resolutionPlusOneList[0].attributes.RESOLUTION.height;
  15497. });
  15498. // ensure that we also pick the highest bandwidth variant that
  15499. // is just-larger-than the video player
  15500. resolutionPlusOne = resolutionPlusOneSmallest.filter(function (elem) {
  15501. return elem.attributes.BANDWIDTH === resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1].attributes.BANDWIDTH;
  15502. })[0];
  15503. }
  15504. // fallback chain of variants
  15505. return resolutionPlusOne || resolutionBestVariant || bandwidthBestVariant || sortedPlaylists[0];
  15506. };
  15507. // HLS is a source handler, not a tech. Make sure attempts to use it
  15508. // as one do not cause exceptions.
  15509. Hls.canPlaySource = function () {
  15510. return _videoJs2['default'].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
  15511. };
  15512. /**
  15513. * Whether the browser has built-in HLS support.
  15514. */
  15515. Hls.supportsNativeHls = (function () {
  15516. var video = _globalDocument2['default'].createElement('video');
  15517. // native HLS is definitely not supported if HTML5 video isn't
  15518. if (!_videoJs2['default'].getTech('Html5').isSupported()) {
  15519. return false;
  15520. }
  15521. // HLS manifests can go by many mime-types
  15522. var canPlay = [
  15523. // Apple santioned
  15524. 'application/vnd.apple.mpegurl',
  15525. // Apple sanctioned for backwards compatibility
  15526. 'audio/mpegurl',
  15527. // Very common
  15528. 'audio/x-mpegurl',
  15529. // Very common
  15530. 'application/x-mpegurl',
  15531. // Included for completeness
  15532. 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
  15533. return canPlay.some(function (canItPlay) {
  15534. return (/maybe|probably/i.test(video.canPlayType(canItPlay))
  15535. );
  15536. });
  15537. })();
  15538. /**
  15539. * HLS is a source handler, not a tech. Make sure attempts to use it
  15540. * as one do not cause exceptions.
  15541. */
  15542. Hls.isSupported = function () {
  15543. return _videoJs2['default'].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
  15544. };
  15545. var USER_AGENT = _globalWindow2['default'].navigator && _globalWindow2['default'].navigator.userAgent || '';
  15546. /**
  15547. * Determines whether the browser supports a change in the audio configuration
  15548. * during playback. Currently only Firefox 48 and below do not support this.
  15549. * window.isSecureContext is a propterty that was added to window in firefox 49,
  15550. * so we can use it to detect Firefox 49+.
  15551. *
  15552. * @return {Boolean} Whether the browser supports audio config change during playback
  15553. */
  15554. Hls.supportsAudioInfoChange_ = function () {
  15555. if (_videoJs2['default'].browser.IS_FIREFOX) {
  15556. var firefoxVersionMap = /Firefox\/([\d.]+)/i.exec(USER_AGENT);
  15557. var version = parseInt(firefoxVersionMap[1], 10);
  15558. return version >= 49;
  15559. }
  15560. return true;
  15561. };
  15562. var Component = _videoJs2['default'].getComponent('Component');
  15563. /**
  15564. * The Hls Handler object, where we orchestrate all of the parts
  15565. * of HLS to interact with video.js
  15566. *
  15567. * @class HlsHandler
  15568. * @extends videojs.Component
  15569. * @param {Object} source the soruce object
  15570. * @param {Tech} tech the parent tech object
  15571. * @param {Object} options optional and required options
  15572. */
  15573. var HlsHandler = (function (_Component) {
  15574. _inherits(HlsHandler, _Component);
  15575. function HlsHandler(source, tech, options) {
  15576. var _this = this;
  15577. _classCallCheck(this, HlsHandler);
  15578. _get(Object.getPrototypeOf(HlsHandler.prototype), 'constructor', this).call(this, tech);
  15579. // tech.player() is deprecated but setup a reference to HLS for
  15580. // backwards-compatibility
  15581. if (tech.options_ && tech.options_.playerId) {
  15582. var _player = (0, _videoJs2['default'])(tech.options_.playerId);
  15583. if (!_player.hasOwnProperty('hls')) {
  15584. Object.defineProperty(_player, 'hls', {
  15585. get: function get() {
  15586. _videoJs2['default'].log.warn('player.hls is deprecated. Use player.tech_.hls instead.');
  15587. return _this;
  15588. }
  15589. });
  15590. }
  15591. }
  15592. // overriding native HLS only works if audio tracks have been emulated
  15593. // error early if we're misconfigured:
  15594. if (_videoJs2['default'].options.hls.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
  15595. throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
  15596. }
  15597. this.tech_ = tech;
  15598. this.source_ = source;
  15599. this.stats = {};
  15600. this.ignoreNextSeekingEvent_ = false;
  15601. // handle global & Source Handler level options
  15602. this.options_ = _videoJs2['default'].mergeOptions(_videoJs2['default'].options.hls || {}, options.hls);
  15603. this.setOptions_();
  15604. // listen for fullscreenchange events for this player so that we
  15605. // can adjust our quality selection quickly
  15606. this.on(_globalDocument2['default'], ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
  15607. var fullscreenElement = _globalDocument2['default'].fullscreenElement || _globalDocument2['default'].webkitFullscreenElement || _globalDocument2['default'].mozFullScreenElement || _globalDocument2['default'].msFullscreenElement;
  15608. if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
  15609. _this.masterPlaylistController_.fastQualityChange_();
  15610. }
  15611. });
  15612. this.on(this.tech_, 'seeking', function () {
  15613. if (this.ignoreNextSeekingEvent_) {
  15614. this.ignoreNextSeekingEvent_ = false;
  15615. return;
  15616. }
  15617. this.setCurrentTime(this.tech_.currentTime());
  15618. });
  15619. this.on(this.tech_, 'error', function () {
  15620. if (this.masterPlaylistController_) {
  15621. this.masterPlaylistController_.pauseLoading();
  15622. }
  15623. });
  15624. this.audioTrackChange_ = function () {
  15625. _this.masterPlaylistController_.setupAudio();
  15626. };
  15627. this.textTrackChange_ = function () {
  15628. _this.masterPlaylistController_.setupSubtitles();
  15629. };
  15630. this.on(this.tech_, 'play', this.play);
  15631. }
  15632. /**
  15633. * The Source Handler object, which informs video.js what additional
  15634. * MIME types are supported and sets up playback. It is registered
  15635. * automatically to the appropriate tech based on the capabilities of
  15636. * the browser it is running in. It is not necessary to use or modify
  15637. * this object in normal usage.
  15638. */
  15639. _createClass(HlsHandler, [{
  15640. key: 'setOptions_',
  15641. value: function setOptions_() {
  15642. var _this2 = this;
  15643. // defaults
  15644. this.options_.withCredentials = this.options_.withCredentials || false;
  15645. if (typeof this.options_.blacklistDuration !== 'number') {
  15646. this.options_.blacklistDuration = 5 * 60;
  15647. }
  15648. // start playlist selection at a reasonable bandwidth for
  15649. // broadband internet
  15650. // 0.5 MB/s
  15651. if (typeof this.options_.bandwidth !== 'number') {
  15652. this.options_.bandwidth = 4194304;
  15653. }
  15654. // grab options passed to player.src
  15655. ['withCredentials', 'bandwidth'].forEach(function (option) {
  15656. if (typeof _this2.source_[option] !== 'undefined') {
  15657. _this2.options_[option] = _this2.source_[option];
  15658. }
  15659. });
  15660. this.bandwidth = this.options_.bandwidth;
  15661. }
  15662. /**
  15663. * called when player.src gets called, handle a new source
  15664. *
  15665. * @param {Object} src the source object to handle
  15666. */
  15667. }, {
  15668. key: 'src',
  15669. value: function src(_src) {
  15670. var _this3 = this;
  15671. // do nothing if the src is falsey
  15672. if (!_src) {
  15673. return;
  15674. }
  15675. this.setOptions_();
  15676. // add master playlist controller options
  15677. this.options_.url = this.source_.src;
  15678. this.options_.tech = this.tech_;
  15679. this.options_.externHls = Hls;
  15680. this.masterPlaylistController_ = new _masterPlaylistController.MasterPlaylistController(this.options_);
  15681. this.playbackWatcher_ = new _playbackWatcher2['default'](_videoJs2['default'].mergeOptions(this.options_, {
  15682. seekable: function seekable() {
  15683. return _this3.seekable();
  15684. }
  15685. }));
  15686. this.masterPlaylistController_.on('error', function () {
  15687. var player = _videoJs2['default'].players[_this3.tech_.options_.playerId];
  15688. player.error(_this3.masterPlaylistController_.error);
  15689. });
  15690. // `this` in selectPlaylist should be the HlsHandler for backwards
  15691. // compatibility with < v2
  15692. this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : Hls.STANDARD_PLAYLIST_SELECTOR.bind(this);
  15693. // re-expose some internal objects for backwards compatibility with < v2
  15694. this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
  15695. this.mediaSource = this.masterPlaylistController_.mediaSource;
  15696. // Proxy assignment of some properties to the master playlist
  15697. // controller. Using a custom property for backwards compatibility
  15698. // with < v2
  15699. Object.defineProperties(this, {
  15700. selectPlaylist: {
  15701. get: function get() {
  15702. return this.masterPlaylistController_.selectPlaylist;
  15703. },
  15704. set: function set(selectPlaylist) {
  15705. this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
  15706. }
  15707. },
  15708. throughput: {
  15709. get: function get() {
  15710. return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
  15711. },
  15712. set: function set(throughput) {
  15713. this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput;
  15714. // By setting `count` to 1 the throughput value becomes the starting value
  15715. // for the cumulative average
  15716. this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
  15717. }
  15718. },
  15719. bandwidth: {
  15720. get: function get() {
  15721. return this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
  15722. },
  15723. set: function set(bandwidth) {
  15724. this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth;
  15725. // setting the bandwidth manually resets the throughput counter
  15726. // `count` is set to zero that current value of `rate` isn't included
  15727. // in the cumulative average
  15728. this.masterPlaylistController_.mainSegmentLoader_.throughput = { rate: 0, count: 0 };
  15729. }
  15730. },
  15731. /**
  15732. * `systemBandwidth` is a combination of two serial processes bit-rates. The first
  15733. * is the network bitrate provided by `bandwidth` and the second is the bitrate of
  15734. * the entire process after that - decryption, transmuxing, and appending - provided
  15735. * by `throughput`.
  15736. *
  15737. * Since the two process are serial, the overall system bandwidth is given by:
  15738. * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
  15739. */
  15740. systemBandwidth: {
  15741. get: function get() {
  15742. var invBandwidth = 1 / (this.bandwidth || 1);
  15743. var invThroughput = undefined;
  15744. if (this.throughput > 0) {
  15745. invThroughput = 1 / this.throughput;
  15746. } else {
  15747. invThroughput = 0;
  15748. }
  15749. var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
  15750. return systemBitrate;
  15751. },
  15752. set: function set() {
  15753. _videoJs2['default'].log.error('The "systemBandwidth" property is read-only');
  15754. }
  15755. }
  15756. });
  15757. Object.defineProperties(this.stats, {
  15758. bandwidth: {
  15759. get: function get() {
  15760. return _this3.bandwidth || 0;
  15761. },
  15762. enumerable: true
  15763. },
  15764. mediaRequests: {
  15765. get: function get() {
  15766. return _this3.masterPlaylistController_.mediaRequests_() || 0;
  15767. },
  15768. enumerable: true
  15769. },
  15770. mediaRequestsAborted: {
  15771. get: function get() {
  15772. return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
  15773. },
  15774. enumerable: true
  15775. },
  15776. mediaRequestsTimedout: {
  15777. get: function get() {
  15778. return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
  15779. },
  15780. enumerable: true
  15781. },
  15782. mediaRequestsErrored: {
  15783. get: function get() {
  15784. return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
  15785. },
  15786. enumerable: true
  15787. },
  15788. mediaTransferDuration: {
  15789. get: function get() {
  15790. return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
  15791. },
  15792. enumerable: true
  15793. },
  15794. mediaBytesTransferred: {
  15795. get: function get() {
  15796. return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
  15797. },
  15798. enumerable: true
  15799. },
  15800. mediaSecondsLoaded: {
  15801. get: function get() {
  15802. return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
  15803. },
  15804. enumerable: true
  15805. }
  15806. });
  15807. this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
  15808. this.masterPlaylistController_.on('sourceopen', function () {
  15809. _this3.tech_.audioTracks().addEventListener('change', _this3.audioTrackChange_);
  15810. _this3.tech_.remoteTextTracks().addEventListener('change', _this3.textTrackChange_);
  15811. });
  15812. this.masterPlaylistController_.on('selectedinitialmedia', function () {
  15813. // Add the manual rendition mix-in to HlsHandler
  15814. (0, _renditionMixin2['default'])(_this3);
  15815. });
  15816. this.masterPlaylistController_.on('audioupdate', function () {
  15817. // clear current audioTracks
  15818. _this3.tech_.clearTracks('audio');
  15819. _this3.masterPlaylistController_.activeAudioGroup().forEach(function (audioTrack) {
  15820. _this3.tech_.audioTracks().addTrack(audioTrack);
  15821. });
  15822. });
  15823. // the bandwidth of the primary segment loader is our best
  15824. // estimate of overall bandwidth
  15825. this.on(this.masterPlaylistController_, 'progress', function () {
  15826. this.tech_.trigger('progress');
  15827. });
  15828. // In the live case, we need to ignore the very first `seeking` event since
  15829. // that will be the result of the seek-to-live behavior
  15830. this.on(this.masterPlaylistController_, 'firstplay', function () {
  15831. this.ignoreNextSeekingEvent_ = true;
  15832. });
  15833. this.tech_.ready(function () {
  15834. return _this3.setupQualityLevels_();
  15835. });
  15836. // do nothing if the tech has been disposed already
  15837. // this can occur if someone sets the src in player.ready(), for instance
  15838. if (!this.tech_.el()) {
  15839. return;
  15840. }
  15841. this.tech_.src(_videoJs2['default'].URL.createObjectURL(this.masterPlaylistController_.mediaSource));
  15842. }
  15843. /**
  15844. * Initializes the quality levels and sets listeners to update them.
  15845. *
  15846. * @method setupQualityLevels_
  15847. * @private
  15848. */
  15849. }, {
  15850. key: 'setupQualityLevels_',
  15851. value: function setupQualityLevels_() {
  15852. var _this4 = this;
  15853. var player = _videoJs2['default'].players[this.tech_.options_.playerId];
  15854. if (player && player.qualityLevels) {
  15855. this.qualityLevels_ = player.qualityLevels();
  15856. this.masterPlaylistController_.on('selectedinitialmedia', function () {
  15857. handleHlsLoadedMetadata(_this4.qualityLevels_, _this4);
  15858. });
  15859. this.playlists.on('mediachange', function () {
  15860. handleHlsMediaChange(_this4.qualityLevels_, _this4.playlists);
  15861. });
  15862. }
  15863. }
  15864. /**
  15865. * a helper for grabbing the active audio group from MasterPlaylistController
  15866. *
  15867. * @private
  15868. */
  15869. }, {
  15870. key: 'activeAudioGroup_',
  15871. value: function activeAudioGroup_() {
  15872. return this.masterPlaylistController_.activeAudioGroup();
  15873. }
  15874. /**
  15875. * Begin playing the video.
  15876. */
  15877. }, {
  15878. key: 'play',
  15879. value: function play() {
  15880. this.masterPlaylistController_.play();
  15881. }
  15882. /**
  15883. * a wrapper around the function in MasterPlaylistController
  15884. */
  15885. }, {
  15886. key: 'setCurrentTime',
  15887. value: function setCurrentTime(currentTime) {
  15888. this.masterPlaylistController_.setCurrentTime(currentTime);
  15889. }
  15890. /**
  15891. * a wrapper around the function in MasterPlaylistController
  15892. */
  15893. }, {
  15894. key: 'duration',
  15895. value: function duration() {
  15896. return this.masterPlaylistController_.duration();
  15897. }
  15898. /**
  15899. * a wrapper around the function in MasterPlaylistController
  15900. */
  15901. }, {
  15902. key: 'seekable',
  15903. value: function seekable() {
  15904. return this.masterPlaylistController_.seekable();
  15905. }
  15906. /**
  15907. * Abort all outstanding work and cleanup.
  15908. */
  15909. }, {
  15910. key: 'dispose',
  15911. value: function dispose() {
  15912. if (this.playbackWatcher_) {
  15913. this.playbackWatcher_.dispose();
  15914. }
  15915. if (this.masterPlaylistController_) {
  15916. this.masterPlaylistController_.dispose();
  15917. }
  15918. if (this.qualityLevels_) {
  15919. this.qualityLevels_.dispose();
  15920. }
  15921. this.tech_.audioTracks().removeEventListener('change', this.audioTrackChange_);
  15922. this.tech_.remoteTextTracks().removeEventListener('change', this.textTrackChange_);
  15923. _get(Object.getPrototypeOf(HlsHandler.prototype), 'dispose', this).call(this);
  15924. }
  15925. }]);
  15926. return HlsHandler;
  15927. })(Component);
  15928. var HlsSourceHandler = function HlsSourceHandler(mode) {
  15929. return {
  15930. canHandleSource: function canHandleSource(srcObj) {
  15931. // this forces video.js to skip this tech/mode if its not the one we have been
  15932. // overriden to use, by returing that we cannot handle the source.
  15933. if (_videoJs2['default'].options.hls && _videoJs2['default'].options.hls.mode && _videoJs2['default'].options.hls.mode !== mode) {
  15934. return false;
  15935. }
  15936. return HlsSourceHandler.canPlayType(srcObj.type);
  15937. },
  15938. handleSource: function handleSource(source, tech, options) {
  15939. if (mode === 'flash') {
  15940. // We need to trigger this asynchronously to give others the chance
  15941. // to bind to the event when a source is set at player creation
  15942. tech.setTimeout(function () {
  15943. tech.trigger('loadstart');
  15944. }, 1);
  15945. }
  15946. var settings = _videoJs2['default'].mergeOptions(options, { hls: { mode: mode } });
  15947. tech.hls = new HlsHandler(source, tech, settings);
  15948. tech.hls.xhr = (0, _xhr2['default'])();
  15949. tech.hls.src(source.src);
  15950. return tech.hls;
  15951. },
  15952. canPlayType: function canPlayType(type) {
  15953. if (HlsSourceHandler.canPlayType(type)) {
  15954. return 'maybe';
  15955. }
  15956. return '';
  15957. }
  15958. };
  15959. };
  15960. /**
  15961. * A comparator function to sort two playlist object by bandwidth.
  15962. *
  15963. * @param {Object} left a media playlist object
  15964. * @param {Object} right a media playlist object
  15965. * @return {Number} Greater than zero if the bandwidth attribute of
  15966. * left is greater than the corresponding attribute of right. Less
  15967. * than zero if the bandwidth of right is greater than left and
  15968. * exactly zero if the two are equal.
  15969. */
  15970. Hls.comparePlaylistBandwidth = function (left, right) {
  15971. var leftBandwidth = undefined;
  15972. var rightBandwidth = undefined;
  15973. if (left.attributes && left.attributes.BANDWIDTH) {
  15974. leftBandwidth = left.attributes.BANDWIDTH;
  15975. }
  15976. leftBandwidth = leftBandwidth || _globalWindow2['default'].Number.MAX_VALUE;
  15977. if (right.attributes && right.attributes.BANDWIDTH) {
  15978. rightBandwidth = right.attributes.BANDWIDTH;
  15979. }
  15980. rightBandwidth = rightBandwidth || _globalWindow2['default'].Number.MAX_VALUE;
  15981. return leftBandwidth - rightBandwidth;
  15982. };
  15983. /**
  15984. * A comparator function to sort two playlist object by resolution (width).
  15985. * @param {Object} left a media playlist object
  15986. * @param {Object} right a media playlist object
  15987. * @return {Number} Greater than zero if the resolution.width attribute of
  15988. * left is greater than the corresponding attribute of right. Less
  15989. * than zero if the resolution.width of right is greater than left and
  15990. * exactly zero if the two are equal.
  15991. */
  15992. Hls.comparePlaylistResolution = function (left, right) {
  15993. var leftWidth = undefined;
  15994. var rightWidth = undefined;
  15995. if (left.attributes && left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
  15996. leftWidth = left.attributes.RESOLUTION.width;
  15997. }
  15998. leftWidth = leftWidth || _globalWindow2['default'].Number.MAX_VALUE;
  15999. if (right.attributes && right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
  16000. rightWidth = right.attributes.RESOLUTION.width;
  16001. }
  16002. rightWidth = rightWidth || _globalWindow2['default'].Number.MAX_VALUE;
  16003. // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
  16004. // have the same media dimensions/ resolution
  16005. if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
  16006. return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
  16007. }
  16008. return leftWidth - rightWidth;
  16009. };
  16010. HlsSourceHandler.canPlayType = function (type) {
  16011. // No support for IE 10 or below
  16012. if (_videoJs2['default'].browser.IE_VERSION && _videoJs2['default'].browser.IE_VERSION <= 10) {
  16013. return false;
  16014. }
  16015. var mpegurlRE = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
  16016. // favor native HLS support if it's available
  16017. if (!_videoJs2['default'].options.hls.overrideNative && Hls.supportsNativeHls) {
  16018. return false;
  16019. }
  16020. return mpegurlRE.test(type);
  16021. };
  16022. if (typeof _videoJs2['default'].MediaSource === 'undefined' || typeof _videoJs2['default'].URL === 'undefined') {
  16023. _videoJs2['default'].MediaSource = _videojsContribMediaSources.MediaSource;
  16024. _videoJs2['default'].URL = _videojsContribMediaSources.URL;
  16025. }
  16026. var flashTech = _videoJs2['default'].getTech('Flash');
  16027. // register source handlers with the appropriate techs
  16028. if (_videojsContribMediaSources.MediaSource.supportsNativeMediaSources()) {
  16029. _videoJs2['default'].getTech('Html5').registerSourceHandler(HlsSourceHandler('html5'), 0);
  16030. }
  16031. if (_globalWindow2['default'].Uint8Array && flashTech) {
  16032. flashTech.registerSourceHandler(HlsSourceHandler('flash'));
  16033. }
  16034. _videoJs2['default'].HlsHandler = HlsHandler;
  16035. _videoJs2['default'].HlsSourceHandler = HlsSourceHandler;
  16036. _videoJs2['default'].Hls = Hls;
  16037. if (!_videoJs2['default'].use) {
  16038. _videoJs2['default'].registerComponent('Hls', Hls);
  16039. }
  16040. _videoJs2['default'].m3u8 = _m3u8Parser2['default'];
  16041. _videoJs2['default'].options.hls = _videoJs2['default'].options.hls || {};
  16042. if (_videoJs2['default'].registerPlugin) {
  16043. _videoJs2['default'].registerPlugin('reloadSourceOnError', _reloadSourceOnError2['default']);
  16044. } else {
  16045. _videoJs2['default'].plugin('reloadSourceOnError', _reloadSourceOnError2['default']);
  16046. }
  16047. module.exports = {
  16048. Hls: Hls,
  16049. HlsHandler: HlsHandler,
  16050. HlsSourceHandler: HlsSourceHandler
  16051. };
  16052. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  16053. },{"./bin-utils":2,"./config":3,"./master-playlist-controller":5,"./playback-watcher":7,"./playlist":9,"./playlist-loader":8,"./reload-source-on-error":11,"./rendition-mixin":12,"./xhr":19,"aes-decrypter":23,"global/document":29,"global/window":30,"m3u8-parser":31,"videojs-contrib-media-sources":73}]},{},[76])(76)
  16054. });