UNPKG

655 kBJavaScriptView Raw
1/**
2 * videojs-contrib-hls
3 * @version 5.8.2
4 * @copyright 2017 Brightcove, Inc
5 * @license Apache-2.0
6 */
7(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.videojsContribHls = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
8/**
9 * @file ad-cue-tags.js
10 */
11'use strict';
12
13Object.defineProperty(exports, '__esModule', {
14 value: true
15});
16
17var _slicedToArray = (function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i['return']) _i['return'](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError('Invalid attempt to destructure non-iterable instance'); } }; })();
18
19function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
20
21var _globalWindow = require('global/window');
22
23var _globalWindow2 = _interopRequireDefault(_globalWindow);
24
25/**
26 * Searches for an ad cue that overlaps with the given mediaTime
27 */
28var findAdCue = function findAdCue(track, mediaTime) {
29 var cues = track.cues;
30
31 for (var i = 0; i < cues.length; i++) {
32 var cue = cues[i];
33
34 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
35 return cue;
36 }
37 }
38 return null;
39};
40
41var updateAdCues = function updateAdCues(media, track) {
42 var offset = arguments.length <= 2 || arguments[2] === undefined ? 0 : arguments[2];
43
44 if (!media.segments) {
45 return;
46 }
47
48 var mediaTime = offset;
49 var cue = undefined;
50
51 for (var i = 0; i < media.segments.length; i++) {
52 var segment = media.segments[i];
53
54 if (!cue) {
55 // Since the cues will span for at least the segment duration, adding a fudge
56 // factor of half segment duration will prevent duplicate cues from being
57 // created when timing info is not exact (e.g. cue start time initialized
58 // at 10.006677, but next call mediaTime is 10.003332 )
59 cue = findAdCue(track, mediaTime + segment.duration / 2);
60 }
61
62 if (cue) {
63 if ('cueIn' in segment) {
64 // Found a CUE-IN so end the cue
65 cue.endTime = mediaTime;
66 cue.adEndTime = mediaTime;
67 mediaTime += segment.duration;
68 cue = null;
69 continue;
70 }
71
72 if (mediaTime < cue.endTime) {
73 // Already processed this mediaTime for this cue
74 mediaTime += segment.duration;
75 continue;
76 }
77
78 // otherwise extend cue until a CUE-IN is found
79 cue.endTime += segment.duration;
80 } else {
81 if ('cueOut' in segment) {
82 cue = new _globalWindow2['default'].VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
83 cue.adStartTime = mediaTime;
84 // Assumes tag format to be
85 // #EXT-X-CUE-OUT:30
86 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
87 track.addCue(cue);
88 }
89
90 if ('cueOutCont' in segment) {
91 // Entered into the middle of an ad cue
92 var adOffset = undefined;
93 var adTotal = undefined;
94
95 // Assumes tag formate to be
96 // #EXT-X-CUE-OUT-CONT:10/30
97
98 var _segment$cueOutCont$split$map = segment.cueOutCont.split('/').map(parseFloat);
99
100 var _segment$cueOutCont$split$map2 = _slicedToArray(_segment$cueOutCont$split$map, 2);
101
102 adOffset = _segment$cueOutCont$split$map2[0];
103 adTotal = _segment$cueOutCont$split$map2[1];
104
105 cue = new _globalWindow2['default'].VTTCue(mediaTime, mediaTime + segment.duration, '');
106 cue.adStartTime = mediaTime - adOffset;
107 cue.adEndTime = cue.adStartTime + adTotal;
108 track.addCue(cue);
109 }
110 }
111 mediaTime += segment.duration;
112 }
113};
114
115exports['default'] = {
116 updateAdCues: updateAdCues,
117 findAdCue: findAdCue
118};
119module.exports = exports['default'];
120},{"global/window":30}],2:[function(require,module,exports){
121/**
122 * @file bin-utils.js
123 */
124
125/**
126 * convert a TimeRange to text
127 *
128 * @param {TimeRange} range the timerange to use for conversion
129 * @param {Number} i the iterator on the range to convert
130 */
131'use strict';
132
133Object.defineProperty(exports, '__esModule', {
134 value: true
135});
136var textRange = function textRange(range, i) {
137 return range.start(i) + '-' + range.end(i);
138};
139
140/**
141 * format a number as hex string
142 *
143 * @param {Number} e The number
144 * @param {Number} i the iterator
145 */
146var formatHexString = function formatHexString(e, i) {
147 var value = e.toString(16);
148
149 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
150};
151var formatAsciiString = function formatAsciiString(e) {
152 if (e >= 0x20 && e < 0x7e) {
153 return String.fromCharCode(e);
154 }
155 return '.';
156};
157
158/**
159 * Creates an object for sending to a web worker modifying properties that are TypedArrays
160 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
161 *
162 * @param {Object} message
163 * Object of properties and values to send to the web worker
164 * @return {Object}
165 * Modified message with TypedArray values expanded
166 * @function createTransferableMessage
167 */
168var createTransferableMessage = function createTransferableMessage(message) {
169 var transferable = {};
170
171 Object.keys(message).forEach(function (key) {
172 var value = message[key];
173
174 if (ArrayBuffer.isView(value)) {
175 transferable[key] = {
176 bytes: value.buffer,
177 byteOffset: value.byteOffset,
178 byteLength: value.byteLength
179 };
180 } else {
181 transferable[key] = value;
182 }
183 });
184
185 return transferable;
186};
187
188/**
189 * Returns a unique string identifier for a media initialization
190 * segment.
191 */
192var initSegmentId = function initSegmentId(initSegment) {
193 var byterange = initSegment.byterange || {
194 length: Infinity,
195 offset: 0
196 };
197
198 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
199};
200
201/**
202 * utils to help dump binary data to the console
203 */
204var utils = {
205 hexDump: function hexDump(data) {
206 var bytes = Array.prototype.slice.call(data);
207 var step = 16;
208 var result = '';
209 var hex = undefined;
210 var ascii = undefined;
211
212 for (var j = 0; j < bytes.length / step; j++) {
213 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
214 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
215 result += hex + ' ' + ascii + '\n';
216 }
217 return result;
218 },
219 tagDump: function tagDump(tag) {
220 return utils.hexDump(tag.bytes);
221 },
222 textRanges: function textRanges(ranges) {
223 var result = '';
224 var i = undefined;
225
226 for (i = 0; i < ranges.length; i++) {
227 result += textRange(ranges, i) + ' ';
228 }
229 return result;
230 },
231 createTransferableMessage: createTransferableMessage,
232 initSegmentId: initSegmentId
233};
234
235exports['default'] = utils;
236module.exports = exports['default'];
237},{}],3:[function(require,module,exports){
238"use strict";
239
240Object.defineProperty(exports, "__esModule", {
241 value: true
242});
243exports["default"] = {
244 GOAL_BUFFER_LENGTH: 30,
245 MAX_GOAL_BUFFER_LENGTH: 60,
246 GOAL_BUFFER_LENGTH_RATE: 1,
247 // A fudge factor to apply to advertised playlist bitrates to account for
248 // temporary flucations in client bandwidth
249 BANDWIDTH_VARIANCE: 1.2,
250 // How much of the buffer must be filled before we consider upswitching
251 BUFFER_LOW_WATER_LINE: 0,
252 MAX_BUFFER_LOW_WATER_LINE: 30,
253 BUFFER_LOW_WATER_LINE_RATE: 1
254};
255module.exports = exports["default"];
256},{}],4:[function(require,module,exports){
257'use strict';
258
259Object.defineProperty(exports, '__esModule', {
260 value: true
261});
262
263function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
264
265var _globalWindow = require('global/window');
266
267var _globalWindow2 = _interopRequireDefault(_globalWindow);
268
269var _aesDecrypter = require('aes-decrypter');
270
271var _binUtils = require('./bin-utils');
272
273/**
274 * Our web worker interface so that things can talk to aes-decrypter
275 * that will be running in a web worker. the scope is passed to this by
276 * webworkify.
277 *
278 * @param {Object} self
279 * the scope for the web worker
280 */
281var DecrypterWorker = function DecrypterWorker(self) {
282 self.onmessage = function (event) {
283 var data = event.data;
284 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
285 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
286 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
287
288 /* eslint-disable no-new, handle-callback-err */
289 new _aesDecrypter.Decrypter(encrypted, key, iv, function (err, bytes) {
290 _globalWindow2['default'].postMessage((0, _binUtils.createTransferableMessage)({
291 source: data.source,
292 decrypted: bytes
293 }), [bytes.buffer]);
294 });
295 /* eslint-enable */
296 };
297};
298
299exports['default'] = function (self) {
300 return new DecrypterWorker(self);
301};
302
303module.exports = exports['default'];
304},{"./bin-utils":2,"aes-decrypter":23,"global/window":30}],5:[function(require,module,exports){
305(function (global){
306/**
307 * @file master-playlist-controller.js
308 */
309'use strict';
310
311Object.defineProperty(exports, '__esModule', {
312 value: true
313});
314
315var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
316
317var _get = function get(_x2, _x3, _x4) { var _again = true; _function: while (_again) { var object = _x2, property = _x3, receiver = _x4; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x2 = parent; _x3 = property; _x4 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
318
319function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
320
321function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
322
323function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
324
325var _playlistLoader = require('./playlist-loader');
326
327var _playlistLoader2 = _interopRequireDefault(_playlistLoader);
328
329var _segmentLoader = require('./segment-loader');
330
331var _segmentLoader2 = _interopRequireDefault(_segmentLoader);
332
333var _vttSegmentLoader = require('./vtt-segment-loader');
334
335var _vttSegmentLoader2 = _interopRequireDefault(_vttSegmentLoader);
336
337var _ranges = require('./ranges');
338
339var _ranges2 = _interopRequireDefault(_ranges);
340
341var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
342
343var _videoJs2 = _interopRequireDefault(_videoJs);
344
345var _adCueTags = require('./ad-cue-tags');
346
347var _adCueTags2 = _interopRequireDefault(_adCueTags);
348
349var _syncController = require('./sync-controller');
350
351var _syncController2 = _interopRequireDefault(_syncController);
352
353var _videojsContribMediaSourcesEs5CodecUtils = require('videojs-contrib-media-sources/es5/codec-utils');
354
355var _webworkify = require('webworkify');
356
357var _webworkify2 = _interopRequireDefault(_webworkify);
358
359var _decrypterWorker = require('./decrypter-worker');
360
361var _decrypterWorker2 = _interopRequireDefault(_decrypterWorker);
362
363var _config = require('./config');
364
365var _config2 = _interopRequireDefault(_config);
366
367var Hls = undefined;
368
369// Default codec parameters if none were provided for video and/or audio
370var defaultCodecs = {
371 videoCodec: 'avc1',
372 videoObjectTypeIndicator: '.4d400d',
373 // AAC-LC
374 audioProfile: '2'
375};
376
377// SegmentLoader stats that need to have each loader's
378// values summed to calculate the final value
379var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred'];
380var sumLoaderStat = function sumLoaderStat(stat) {
381 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
382};
383
384/**
385 * determine if an object a is differnt from
386 * and object b. both only having one dimensional
387 * properties
388 *
389 * @param {Object} a object one
390 * @param {Object} b object two
391 * @return {Boolean} if the object has changed or not
392 */
393var objectChanged = function objectChanged(a, b) {
394 if (typeof a !== typeof b) {
395 return true;
396 }
397 // if we have a different number of elements
398 // something has changed
399 if (Object.keys(a).length !== Object.keys(b).length) {
400 return true;
401 }
402
403 for (var prop in a) {
404 if (a[prop] !== b[prop]) {
405 return true;
406 }
407 }
408 return false;
409};
410
411/**
412 * Parses a codec string to retrieve the number of codecs specified,
413 * the video codec and object type indicator, and the audio profile.
414 *
415 * @private
416 */
417var parseCodecs = function parseCodecs(codecs) {
418 var result = {
419 codecCount: 0
420 };
421 var parsed = undefined;
422
423 result.codecCount = codecs.split(',').length;
424 result.codecCount = result.codecCount || 2;
425
426 // parse the video codec
427 parsed = /(^|\s|,)+(avc1)([^ ,]*)/i.exec(codecs);
428 if (parsed) {
429 result.videoCodec = parsed[2];
430 result.videoObjectTypeIndicator = parsed[3];
431 }
432
433 // parse the last field of the audio codec
434 result.audioProfile = /(^|\s|,)+mp4a.[0-9A-Fa-f]+\.([0-9A-Fa-f]+)/i.exec(codecs);
435 result.audioProfile = result.audioProfile && result.audioProfile[2];
436
437 return result;
438};
439
440/**
441 * Replace codecs in the codec string with the old apple-style `avc1.<dd>.<dd>` to the
442 * standard `avc1.<hhhhhh>`.
443 *
444 * @param codecString {String} the codec string
445 * @return {String} the codec string with old apple-style codecs replaced
446 *
447 * @private
448 */
449var mapLegacyAvcCodecs_ = function mapLegacyAvcCodecs_(codecString) {
450 return codecString.replace(/avc1\.(\d+)\.(\d+)/i, function (match) {
451 return (0, _videojsContribMediaSourcesEs5CodecUtils.translateLegacyCodecs)([match])[0];
452 });
453};
454
455exports.mapLegacyAvcCodecs_ = mapLegacyAvcCodecs_;
456/**
457 * Build a media mime-type string from a set of parameters
458 * @param {String} type either 'audio' or 'video'
459 * @param {String} container either 'mp2t' or 'mp4'
460 * @param {Array} codecs an array of codec strings to add
461 * @return {String} a valid media mime-type
462 */
463var makeMimeTypeString = function makeMimeTypeString(type, container, codecs) {
464 // The codecs array is filtered so that falsey values are
465 // dropped and don't cause Array#join to create spurious
466 // commas
467 return type + '/' + container + '; codecs="' + codecs.filter(function (c) {
468 return !!c;
469 }).join(', ') + '"';
470};
471
472/**
473 * Returns the type container based on information in the playlist
474 * @param {Playlist} media the current media playlist
475 * @return {String} a valid media container type
476 */
477var getContainerType = function getContainerType(media) {
478 // An initialization segment means the media playlist is an iframe
479 // playlist or is using the mp4 container. We don't currently
480 // support iframe playlists, so assume this is signalling mp4
481 // fragments.
482 if (media.segments && media.segments.length && media.segments[0].map) {
483 return 'mp4';
484 }
485 return 'mp2t';
486};
487
488/**
489 * Returns a set of codec strings parsed from the playlist or the default
490 * codec strings if no codecs were specified in the playlist
491 * @param {Playlist} media the current media playlist
492 * @return {Object} an object with the video and audio codecs
493 */
494var getCodecs = function getCodecs(media) {
495 // if the codecs were explicitly specified, use them instead of the
496 // defaults
497 var mediaAttributes = media.attributes || {};
498
499 if (mediaAttributes.CODECS) {
500 return parseCodecs(mediaAttributes.CODECS);
501 }
502 return defaultCodecs;
503};
504
505/**
506 * Calculates the MIME type strings for a working configuration of
507 * SourceBuffers to play variant streams in a master playlist. If
508 * there is no possible working configuration, an empty array will be
509 * returned.
510 *
511 * @param master {Object} the m3u8 object for the master playlist
512 * @param media {Object} the m3u8 object for the variant playlist
513 * @return {Array} the MIME type strings. If the array has more than
514 * one entry, the first element should be applied to the video
515 * SourceBuffer and the second to the audio SourceBuffer.
516 *
517 * @private
518 */
519var mimeTypesForPlaylist_ = function mimeTypesForPlaylist_(master, media) {
520 var containerType = getContainerType(media);
521 var codecInfo = getCodecs(media);
522 var mediaAttributes = media.attributes || {};
523 // Default condition for a traditional HLS (no demuxed audio/video)
524 var isMuxed = true;
525 var isMaat = false;
526
527 if (!media) {
528 // Not enough information
529 return [];
530 }
531
532 if (master.mediaGroups.AUDIO && mediaAttributes.AUDIO) {
533 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
534
535 // Handle the case where we are in a multiple-audio track scenario
536 if (audioGroup) {
537 isMaat = true;
538 // Start with the everything demuxed then...
539 isMuxed = false;
540 // ...check to see if any audio group tracks are muxed (ie. lacking a uri)
541 for (var groupId in audioGroup) {
542 if (!audioGroup[groupId].uri) {
543 isMuxed = true;
544 break;
545 }
546 }
547 }
548 }
549
550 // HLS with multiple-audio tracks must always get an audio codec.
551 // Put another way, there is no way to have a video-only multiple-audio HLS!
552 if (isMaat && !codecInfo.audioProfile) {
553 _videoJs2['default'].log.warn('Multiple audio tracks present but no audio codec string is specified. ' + 'Attempting to use the default audio codec (mp4a.40.2)');
554 codecInfo.audioProfile = defaultCodecs.audioProfile;
555 }
556
557 // Generate the final codec strings from the codec object generated above
558 var codecStrings = {};
559
560 if (codecInfo.videoCodec) {
561 codecStrings.video = '' + codecInfo.videoCodec + codecInfo.videoObjectTypeIndicator;
562 }
563
564 if (codecInfo.audioProfile) {
565 codecStrings.audio = 'mp4a.40.' + codecInfo.audioProfile;
566 }
567
568 // Finally, make and return an array with proper mime-types depending on
569 // the configuration
570 var justAudio = makeMimeTypeString('audio', containerType, [codecStrings.audio]);
571 var justVideo = makeMimeTypeString('video', containerType, [codecStrings.video]);
572 var bothVideoAudio = makeMimeTypeString('video', containerType, [codecStrings.video, codecStrings.audio]);
573
574 if (isMaat) {
575 if (!isMuxed && codecStrings.video) {
576 return [justVideo, justAudio];
577 }
578 // There exists the possiblity that this will return a `video/container`
579 // mime-type for the first entry in the array even when there is only audio.
580 // This doesn't appear to be a problem and simplifies the code.
581 return [bothVideoAudio, justAudio];
582 }
583
584 // If there is ano video codec at all, always just return a single
585 // audio/<container> mime-type
586 if (!codecStrings.video) {
587 return [justAudio];
588 }
589
590 // When not using separate audio media groups, audio and video is
591 // *always* muxed
592 return [bothVideoAudio];
593};
594
595exports.mimeTypesForPlaylist_ = mimeTypesForPlaylist_;
596/**
597 * the master playlist controller controller all interactons
598 * between playlists and segmentloaders. At this time this mainly
599 * involves a master playlist and a series of audio playlists
600 * if they are available
601 *
602 * @class MasterPlaylistController
603 * @extends videojs.EventTarget
604 */
605
606var MasterPlaylistController = (function (_videojs$EventTarget) {
607 _inherits(MasterPlaylistController, _videojs$EventTarget);
608
609 function MasterPlaylistController(options) {
610 var _this = this;
611
612 _classCallCheck(this, MasterPlaylistController);
613
614 _get(Object.getPrototypeOf(MasterPlaylistController.prototype), 'constructor', this).call(this);
615
616 var url = options.url;
617 var withCredentials = options.withCredentials;
618 var mode = options.mode;
619 var tech = options.tech;
620 var bandwidth = options.bandwidth;
621 var externHls = options.externHls;
622 var useCueTags = options.useCueTags;
623 var blacklistDuration = options.blacklistDuration;
624
625 if (!url) {
626 throw new Error('A non-empty playlist URL is required');
627 }
628
629 Hls = externHls;
630
631 this.withCredentials = withCredentials;
632 this.tech_ = tech;
633 this.hls_ = tech.hls;
634 this.mode_ = mode;
635 this.useCueTags_ = useCueTags;
636 this.blacklistDuration = blacklistDuration;
637 if (this.useCueTags_) {
638 this.cueTagsTrack_ = this.tech_.addTextTrack('metadata', 'ad-cues');
639 this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
640 }
641
642 this.requestOptions_ = {
643 withCredentials: this.withCredentials,
644 timeout: null
645 };
646
647 this.audioGroups_ = {};
648 this.subtitleGroups_ = { groups: {}, tracks: {} };
649
650 this.mediaSource = new _videoJs2['default'].MediaSource({ mode: mode });
651 this.audioinfo_ = null;
652 this.mediaSource.on('audioinfo', this.handleAudioinfoUpdate_.bind(this));
653
654 // load the media source into the player
655 this.mediaSource.addEventListener('sourceopen', this.handleSourceOpen_.bind(this));
656
657 this.seekable_ = _videoJs2['default'].createTimeRanges();
658 this.hasPlayed_ = function () {
659 return false;
660 };
661
662 this.syncController_ = new _syncController2['default'](options);
663 this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
664 kind: 'metadata',
665 label: 'segment-metadata'
666 }, true).track;
667
668 this.decrypter_ = (0, _webworkify2['default'])(_decrypterWorker2['default']);
669
670 var segmentLoaderSettings = {
671 hls: this.hls_,
672 mediaSource: this.mediaSource,
673 currentTime: this.tech_.currentTime.bind(this.tech_),
674 seekable: function seekable() {
675 return _this.seekable();
676 },
677 seeking: function seeking() {
678 return _this.tech_.seeking();
679 },
680 duration: function duration() {
681 return _this.mediaSource.duration;
682 },
683 hasPlayed: function hasPlayed() {
684 return _this.hasPlayed_();
685 },
686 goalBufferLength: function goalBufferLength() {
687 return _this.goalBufferLength();
688 },
689 bandwidth: bandwidth,
690 syncController: this.syncController_,
691 decrypter: this.decrypter_
692 };
693
694 // setup playlist loaders
695 this.masterPlaylistLoader_ = new _playlistLoader2['default'](url, this.hls_, this.withCredentials);
696 this.setupMasterPlaylistLoaderListeners_();
697 this.audioPlaylistLoader_ = null;
698 this.subtitlePlaylistLoader_ = null;
699
700 // setup segment loaders
701 // combined audio/video or just video when alternate audio track is selected
702 this.mainSegmentLoader_ = new _segmentLoader2['default'](_videoJs2['default'].mergeOptions(segmentLoaderSettings, {
703 segmentMetadataTrack: this.segmentMetadataTrack_,
704 loaderType: 'main'
705 }), options);
706
707 // alternate audio track
708 this.audioSegmentLoader_ = new _segmentLoader2['default'](_videoJs2['default'].mergeOptions(segmentLoaderSettings, {
709 loaderType: 'audio'
710 }), options);
711
712 this.subtitleSegmentLoader_ = new _vttSegmentLoader2['default'](_videoJs2['default'].mergeOptions(segmentLoaderSettings, {
713 loaderType: 'vtt'
714 }), options);
715
716 this.setupSegmentLoaderListeners_();
717
718 // Create SegmentLoader stat-getters
719 loaderStats.forEach(function (stat) {
720 _this[stat + '_'] = sumLoaderStat.bind(_this, stat);
721 });
722
723 this.masterPlaylistLoader_.load();
724 }
725
726 /**
727 * Register event handlers on the master playlist loader. A helper
728 * function for construction time.
729 *
730 * @private
731 */
732
733 _createClass(MasterPlaylistController, [{
734 key: 'setupMasterPlaylistLoaderListeners_',
735 value: function setupMasterPlaylistLoaderListeners_() {
736 var _this2 = this;
737
738 this.masterPlaylistLoader_.on('loadedmetadata', function () {
739 var media = _this2.masterPlaylistLoader_.media();
740 var requestTimeout = _this2.masterPlaylistLoader_.targetDuration * 1.5 * 1000;
741
742 // If we don't have any more available playlists, we don't want to
743 // timeout the request.
744 if (_this2.masterPlaylistLoader_.isLowestEnabledRendition_()) {
745 _this2.requestOptions_.timeout = 0;
746 } else {
747 _this2.requestOptions_.timeout = requestTimeout;
748 }
749
750 // if this isn't a live video and preload permits, start
751 // downloading segments
752 if (media.endList && _this2.tech_.preload() !== 'none') {
753 _this2.mainSegmentLoader_.playlist(media, _this2.requestOptions_);
754 _this2.mainSegmentLoader_.load();
755 }
756
757 _this2.fillAudioTracks_();
758 _this2.setupAudio();
759
760 _this2.fillSubtitleTracks_();
761 _this2.setupSubtitles();
762
763 _this2.triggerPresenceUsage_(_this2.master(), media);
764
765 try {
766 _this2.setupSourceBuffers_();
767 } catch (e) {
768 _videoJs2['default'].log.warn('Failed to create SourceBuffers', e);
769 return _this2.mediaSource.endOfStream('decode');
770 }
771 _this2.setupFirstPlay();
772
773 _this2.trigger('audioupdate');
774 _this2.trigger('selectedinitialmedia');
775 });
776
777 this.masterPlaylistLoader_.on('loadedplaylist', function () {
778 var updatedPlaylist = _this2.masterPlaylistLoader_.media();
779
780 if (!updatedPlaylist) {
781 // select the initial variant
782 _this2.initialMedia_ = _this2.selectPlaylist();
783 _this2.masterPlaylistLoader_.media(_this2.initialMedia_);
784 return;
785 }
786
787 if (_this2.useCueTags_) {
788 _this2.updateAdCues_(updatedPlaylist);
789 }
790
791 // TODO: Create a new event on the PlaylistLoader that signals
792 // that the segments have changed in some way and use that to
793 // update the SegmentLoader instead of doing it twice here and
794 // on `mediachange`
795 _this2.mainSegmentLoader_.playlist(updatedPlaylist, _this2.requestOptions_);
796 _this2.updateDuration();
797
798 // If the player isn't paused, ensure that the segment loader is running,
799 // as it is possible that it was temporarily stopped while waiting for
800 // a playlist (e.g., in case the playlist errored and we re-requested it).
801 if (!_this2.tech_.paused()) {
802 _this2.mainSegmentLoader_.load();
803 }
804
805 if (!updatedPlaylist.endList) {
806 (function () {
807 var addSeekableRange = function addSeekableRange() {
808 var seekable = _this2.seekable();
809
810 if (seekable.length !== 0) {
811 _this2.mediaSource.addSeekableRange_(seekable.start(0), seekable.end(0));
812 }
813 };
814
815 if (_this2.duration() !== Infinity) {
816 (function () {
817 var onDurationchange = function onDurationchange() {
818 if (_this2.duration() === Infinity) {
819 addSeekableRange();
820 } else {
821 _this2.tech_.one('durationchange', onDurationchange);
822 }
823 };
824
825 _this2.tech_.one('durationchange', onDurationchange);
826 })();
827 } else {
828 addSeekableRange();
829 }
830 })();
831 }
832 });
833
834 this.masterPlaylistLoader_.on('error', function () {
835 _this2.blacklistCurrentPlaylist(_this2.masterPlaylistLoader_.error);
836 });
837
838 this.masterPlaylistLoader_.on('mediachanging', function () {
839 _this2.mainSegmentLoader_.abort();
840 _this2.mainSegmentLoader_.pause();
841 });
842
843 this.masterPlaylistLoader_.on('mediachange', function () {
844 var media = _this2.masterPlaylistLoader_.media();
845 var requestTimeout = _this2.masterPlaylistLoader_.targetDuration * 1.5 * 1000;
846 var activeAudioGroup = undefined;
847 var activeTrack = undefined;
848
849 // If we don't have any more available playlists, we don't want to
850 // timeout the request.
851 if (_this2.masterPlaylistLoader_.isLowestEnabledRendition_()) {
852 _this2.requestOptions_.timeout = 0;
853 } else {
854 _this2.requestOptions_.timeout = requestTimeout;
855 }
856
857 // TODO: Create a new event on the PlaylistLoader that signals
858 // that the segments have changed in some way and use that to
859 // update the SegmentLoader instead of doing it twice here and
860 // on `loadedplaylist`
861 _this2.mainSegmentLoader_.playlist(media, _this2.requestOptions_);
862 _this2.mainSegmentLoader_.load();
863
864 // if the audio group has changed, a new audio track has to be
865 // enabled
866 activeAudioGroup = _this2.activeAudioGroup();
867 activeTrack = activeAudioGroup.filter(function (track) {
868 return track.enabled;
869 })[0];
870 if (!activeTrack) {
871 _this2.mediaGroupChanged();
872 _this2.trigger('audioupdate');
873 }
874 _this2.setupSubtitles();
875
876 _this2.tech_.trigger({
877 type: 'mediachange',
878 bubbles: true
879 });
880 });
881
882 this.masterPlaylistLoader_.on('playlistunchanged', function () {
883 var updatedPlaylist = _this2.masterPlaylistLoader_.media();
884 var playlistOutdated = _this2.stuckAtPlaylistEnd_(updatedPlaylist);
885
886 if (playlistOutdated) {
887 // Playlist has stopped updating and we're stuck at its end. Try to
888 // blacklist it and switch to another playlist in the hope that that
889 // one is updating (and give the player a chance to re-adjust to the
890 // safe live point).
891 _this2.blacklistCurrentPlaylist({
892 message: 'Playlist no longer updating.'
893 });
894 // useful for monitoring QoS
895 _this2.tech_.trigger('playliststuck');
896 }
897 });
898
899 this.masterPlaylistLoader_.on('renditiondisabled', function () {
900 _this2.tech_.trigger({ type: 'usage', name: 'hls-rendition-disabled' });
901 });
902 this.masterPlaylistLoader_.on('renditionenabled', function () {
903 _this2.tech_.trigger({ type: 'usage', name: 'hls-rendition-enabled' });
904 });
905 }
906
907 /**
908 * A helper function for triggerring presence usage events once per source
909 *
910 * @private
911 */
912 }, {
913 key: 'triggerPresenceUsage_',
914 value: function triggerPresenceUsage_(master, media) {
915 var mediaGroups = master.mediaGroups || {};
916 var defaultDemuxed = true;
917 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
918
919 for (var mediaGroup in mediaGroups.AUDIO) {
920 for (var label in mediaGroups.AUDIO[mediaGroup]) {
921 var properties = mediaGroups.AUDIO[mediaGroup][label];
922
923 if (!properties.uri) {
924 defaultDemuxed = false;
925 }
926 }
927 }
928
929 if (defaultDemuxed) {
930 this.tech_.trigger({ type: 'usage', name: 'hls-demuxed' });
931 }
932
933 if (Object.keys(mediaGroups.SUBTITLES).length) {
934 this.tech_.trigger({ type: 'usage', name: 'hls-webvtt' });
935 }
936
937 if (Hls.Playlist.isAes(media)) {
938 this.tech_.trigger({ type: 'usage', name: 'hls-aes' });
939 }
940
941 if (Hls.Playlist.isFmp4(media)) {
942 this.tech_.trigger({ type: 'usage', name: 'hls-fmp4' });
943 }
944
945 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
946 this.tech_.trigger({ type: 'usage', name: 'hls-alternate-audio' });
947 }
948
949 if (this.useCueTags_) {
950 this.tech_.trigger({ type: 'usage', name: 'hls-playlist-cue-tags' });
951 }
952 }
953
954 /**
955 * Register event handlers on the segment loaders. A helper function
956 * for construction time.
957 *
958 * @private
959 */
960 }, {
961 key: 'setupSegmentLoaderListeners_',
962 value: function setupSegmentLoaderListeners_() {
963 var _this3 = this;
964
965 this.mainSegmentLoader_.on('bandwidthupdate', function () {
966 var nextPlaylist = _this3.selectPlaylist();
967 var currentPlaylist = _this3.masterPlaylistLoader_.media();
968 var buffered = _this3.tech_.buffered();
969 var forwardBuffer = buffered.length ? buffered.end(buffered.length - 1) - _this3.tech_.currentTime() : 0;
970
971 var bufferLowWaterLine = _this3.bufferLowWaterLine();
972
973 // If the playlist is live, then we want to not take low water line into account.
974 // This is because in LIVE, the player plays 3 segments from the end of the
975 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
976 // in those segments, a viewer will never experience a rendition upswitch.
977 if (!currentPlaylist.endList ||
978 // For the same reason as LIVE, we ignore the low water line when the VOD
979 // duration is below the max potential low water line
980 _this3.duration() < _config2['default'].MAX_BUFFER_LOW_WATER_LINE ||
981 // we want to switch down to lower resolutions quickly to continue playback, but
982 nextPlaylist.attributes.BANDWIDTH < currentPlaylist.attributes.BANDWIDTH ||
983 // ensure we have some buffer before we switch up to prevent us running out of
984 // buffer while loading a higher rendition.
985 forwardBuffer >= bufferLowWaterLine) {
986 _this3.masterPlaylistLoader_.media(nextPlaylist);
987 }
988
989 _this3.tech_.trigger('bandwidthupdate');
990 });
991 this.mainSegmentLoader_.on('progress', function () {
992 _this3.trigger('progress');
993 });
994
995 this.mainSegmentLoader_.on('error', function () {
996 _this3.blacklistCurrentPlaylist(_this3.mainSegmentLoader_.error());
997 });
998
999 this.mainSegmentLoader_.on('syncinfoupdate', function () {
1000 _this3.onSyncInfoUpdate_();
1001 });
1002
1003 this.mainSegmentLoader_.on('timestampoffset', function () {
1004 _this3.tech_.trigger({ type: 'usage', name: 'hls-timestamp-offset' });
1005 });
1006 this.audioSegmentLoader_.on('syncinfoupdate', function () {
1007 _this3.onSyncInfoUpdate_();
1008 });
1009
1010 this.mainSegmentLoader_.on('ended', function () {
1011 _this3.onEndOfStream();
1012 });
1013
1014 this.audioSegmentLoader_.on('ended', function () {
1015 _this3.onEndOfStream();
1016 });
1017
1018 this.audioSegmentLoader_.on('error', function () {
1019 _videoJs2['default'].log.warn('Problem encountered with the current alternate audio track' + '. Switching back to default.');
1020 _this3.audioSegmentLoader_.abort();
1021 _this3.audioPlaylistLoader_ = null;
1022 _this3.setupAudio();
1023 });
1024
1025 this.subtitleSegmentLoader_.on('error', this.handleSubtitleError_.bind(this));
1026 }
1027 }, {
1028 key: 'handleAudioinfoUpdate_',
1029 value: function handleAudioinfoUpdate_(event) {
1030 if (Hls.supportsAudioInfoChange_() || !this.audioInfo_ || !objectChanged(this.audioInfo_, event.info)) {
1031 this.audioInfo_ = event.info;
1032 return;
1033 }
1034
1035 var error = 'had different audio properties (channels, sample rate, etc.) ' + 'or changed in some other way. This behavior is currently ' + 'unsupported in Firefox 48 and below due to an issue: \n\n' + 'https://bugzilla.mozilla.org/show_bug.cgi?id=1247138\n\n';
1036
1037 var enabledIndex = this.activeAudioGroup().map(function (track) {
1038 return track.enabled;
1039 }).indexOf(true);
1040 var enabledTrack = this.activeAudioGroup()[enabledIndex];
1041 var defaultTrack = this.activeAudioGroup().filter(function (track) {
1042 return track.properties_ && track.properties_['default'];
1043 })[0];
1044
1045 // they did not switch audiotracks
1046 // blacklist the current playlist
1047 if (!this.audioPlaylistLoader_) {
1048 error = 'The rendition that we tried to switch to ' + error + 'Unfortunately that means we will have to blacklist ' + 'the current playlist and switch to another. Sorry!';
1049 this.blacklistCurrentPlaylist();
1050 } else {
1051 error = 'The audio track \'' + enabledTrack.label + '\' that we tried to ' + ('switch to ' + error + ' Unfortunately this means we will have to ') + ('return you to the main track \'' + defaultTrack.label + '\'. Sorry!');
1052 defaultTrack.enabled = true;
1053 this.activeAudioGroup().splice(enabledIndex, 1);
1054 this.trigger('audioupdate');
1055 }
1056
1057 _videoJs2['default'].log.warn(error);
1058 this.setupAudio();
1059 }
1060 }, {
1061 key: 'mediaSecondsLoaded_',
1062 value: function mediaSecondsLoaded_() {
1063 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
1064 }
1065
1066 /**
1067 * fill our internal list of HlsAudioTracks with data from
1068 * the master playlist or use a default
1069 *
1070 * @private
1071 */
1072 }, {
1073 key: 'fillAudioTracks_',
1074 value: function fillAudioTracks_() {
1075 var master = this.master();
1076 var mediaGroups = master.mediaGroups || {};
1077
1078 // force a default if we have none or we are not
1079 // in html5 mode (the only mode to support more than one
1080 // audio track)
1081 if (!mediaGroups || !mediaGroups.AUDIO || Object.keys(mediaGroups.AUDIO).length === 0 || this.mode_ !== 'html5') {
1082 // "main" audio group, track name "default"
1083 mediaGroups.AUDIO = { main: { 'default': { 'default': true } } };
1084 }
1085
1086 for (var mediaGroup in mediaGroups.AUDIO) {
1087 if (!this.audioGroups_[mediaGroup]) {
1088 this.audioGroups_[mediaGroup] = [];
1089 }
1090
1091 for (var label in mediaGroups.AUDIO[mediaGroup]) {
1092 var properties = mediaGroups.AUDIO[mediaGroup][label];
1093 var track = new _videoJs2['default'].AudioTrack({
1094 id: label,
1095 kind: this.audioTrackKind_(properties),
1096 enabled: false,
1097 language: properties.language,
1098 label: label
1099 });
1100
1101 track.properties_ = properties;
1102 this.audioGroups_[mediaGroup].push(track);
1103 }
1104 }
1105
1106 // enable the default active track
1107 (this.activeAudioGroup().filter(function (audioTrack) {
1108 return audioTrack.properties_['default'];
1109 })[0] || this.activeAudioGroup()[0]).enabled = true;
1110 }
1111
1112 /**
1113 * Convert the properties of an HLS track into an audioTrackKind.
1114 *
1115 * @private
1116 */
1117 }, {
1118 key: 'audioTrackKind_',
1119 value: function audioTrackKind_(properties) {
1120 var kind = properties['default'] ? 'main' : 'alternative';
1121
1122 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
1123 kind = 'main-desc';
1124 }
1125
1126 return kind;
1127 }
1128
1129 /**
1130 * fill our internal list of Subtitle Tracks with data from
1131 * the master playlist or use a default
1132 *
1133 * @private
1134 */
1135 }, {
1136 key: 'fillSubtitleTracks_',
1137 value: function fillSubtitleTracks_() {
1138 var master = this.master();
1139 var mediaGroups = master.mediaGroups || {};
1140
1141 for (var mediaGroup in mediaGroups.SUBTITLES) {
1142 if (!this.subtitleGroups_.groups[mediaGroup]) {
1143 this.subtitleGroups_.groups[mediaGroup] = [];
1144 }
1145
1146 for (var label in mediaGroups.SUBTITLES[mediaGroup]) {
1147 var properties = mediaGroups.SUBTITLES[mediaGroup][label];
1148
1149 if (!properties.forced) {
1150 this.subtitleGroups_.groups[mediaGroup].push(_videoJs2['default'].mergeOptions({ id: label }, properties));
1151
1152 if (typeof this.subtitleGroups_.tracks[label] === 'undefined') {
1153 var track = this.tech_.addRemoteTextTrack({
1154 id: label,
1155 kind: 'subtitles',
1156 enabled: false,
1157 language: properties.language,
1158 label: label
1159 }, true).track;
1160
1161 this.subtitleGroups_.tracks[label] = track;
1162 }
1163 }
1164 }
1165 }
1166
1167 // Do not enable a default subtitle track. Wait for user interaction instead.
1168 }
1169
1170 /**
1171 * Call load on our SegmentLoaders
1172 */
1173 }, {
1174 key: 'load',
1175 value: function load() {
1176 this.mainSegmentLoader_.load();
1177 if (this.audioPlaylistLoader_) {
1178 this.audioSegmentLoader_.load();
1179 }
1180 if (this.subtitlePlaylistLoader_) {
1181 this.subtitleSegmentLoader_.load();
1182 }
1183 }
1184
1185 /**
1186 * Returns the audio group for the currently active primary
1187 * media playlist.
1188 */
1189 }, {
1190 key: 'activeAudioGroup',
1191 value: function activeAudioGroup() {
1192 var videoPlaylist = this.masterPlaylistLoader_.media();
1193 var result = undefined;
1194
1195 if (videoPlaylist.attributes && videoPlaylist.attributes.AUDIO) {
1196 result = this.audioGroups_[videoPlaylist.attributes.AUDIO];
1197 }
1198
1199 return result || this.audioGroups_.main;
1200 }
1201
1202 /**
1203 * Returns the subtitle group for the currently active primary
1204 * media playlist.
1205 */
1206 }, {
1207 key: 'activeSubtitleGroup_',
1208 value: function activeSubtitleGroup_() {
1209 var videoPlaylist = this.masterPlaylistLoader_.media();
1210 var result = undefined;
1211
1212 if (!videoPlaylist) {
1213 return null;
1214 }
1215
1216 if (videoPlaylist.attributes && videoPlaylist.attributes.SUBTITLES) {
1217 result = this.subtitleGroups_.groups[videoPlaylist.attributes.SUBTITLES];
1218 }
1219
1220 return result || this.subtitleGroups_.groups.main;
1221 }
1222 }, {
1223 key: 'activeSubtitleTrack_',
1224 value: function activeSubtitleTrack_() {
1225 for (var trackName in this.subtitleGroups_.tracks) {
1226 if (this.subtitleGroups_.tracks[trackName].mode === 'showing') {
1227 return this.subtitleGroups_.tracks[trackName];
1228 }
1229 }
1230
1231 return null;
1232 }
1233 }, {
1234 key: 'handleSubtitleError_',
1235 value: function handleSubtitleError_() {
1236 _videoJs2['default'].log.warn('Problem encountered loading the subtitle track' + '. Switching back to default.');
1237
1238 this.subtitleSegmentLoader_.abort();
1239
1240 var track = this.activeSubtitleTrack_();
1241
1242 if (track) {
1243 track.mode = 'disabled';
1244 }
1245
1246 this.setupSubtitles();
1247 }
1248
1249 /**
1250 * Determine the correct audio renditions based on the active
1251 * AudioTrack and initialize a PlaylistLoader and SegmentLoader if
1252 * necessary. This method is only called when the media-group changes
1253 * and performs non-destructive 'resync' of the SegmentLoader(s) since
1254 * the playlist has likely changed
1255 */
1256 }, {
1257 key: 'mediaGroupChanged',
1258 value: function mediaGroupChanged() {
1259 var track = this.getActiveAudioTrack_();
1260
1261 this.stopAudioLoaders_();
1262 this.resyncAudioLoaders_(track);
1263 }
1264
1265 /**
1266 * Determine the correct audio rendition based on the active
1267 * AudioTrack and initialize a PlaylistLoader and SegmentLoader if
1268 * necessary. This method is called once automatically before
1269 * playback begins to enable the default audio track and should be
1270 * invoked again if the track is changed. Performs destructive 'reset'
1271 * on the SegmentLoaders(s) to ensure we start loading audio as
1272 * close to currentTime as possible
1273 */
1274 }, {
1275 key: 'setupAudio',
1276 value: function setupAudio() {
1277 var track = this.getActiveAudioTrack_();
1278
1279 this.stopAudioLoaders_();
1280 this.resetAudioLoaders_(track);
1281 }
1282
1283 /**
1284 * Returns the currently active track or the default track if none
1285 * are active
1286 */
1287 }, {
1288 key: 'getActiveAudioTrack_',
1289 value: function getActiveAudioTrack_() {
1290 // determine whether seperate loaders are required for the audio
1291 // rendition
1292 var audioGroup = this.activeAudioGroup();
1293 var track = audioGroup.filter(function (audioTrack) {
1294 return audioTrack.enabled;
1295 })[0];
1296
1297 if (!track) {
1298 track = audioGroup.filter(function (audioTrack) {
1299 return audioTrack.properties_['default'];
1300 })[0] || audioGroup[0];
1301 track.enabled = true;
1302 }
1303
1304 return track;
1305 }
1306
1307 /**
1308 * Destroy the PlaylistLoader and pause the SegmentLoader specifically
1309 * for audio when switching audio tracks
1310 */
1311 }, {
1312 key: 'stopAudioLoaders_',
1313 value: function stopAudioLoaders_() {
1314 // stop playlist and segment loading for audio
1315 if (this.audioPlaylistLoader_) {
1316 this.audioPlaylistLoader_.dispose();
1317 this.audioPlaylistLoader_ = null;
1318 }
1319 this.audioSegmentLoader_.pause();
1320 }
1321
1322 /**
1323 * Destructive reset of the mainSegmentLoader (when audio is muxed)
1324 * or audioSegmentLoader (when audio is demuxed) to prepare them
1325 * to start loading new data right at currentTime
1326 */
1327 }, {
1328 key: 'resetAudioLoaders_',
1329 value: function resetAudioLoaders_(track) {
1330 if (!track.properties_.resolvedUri) {
1331 this.mainSegmentLoader_.resetEverything();
1332 return;
1333 }
1334
1335 this.audioSegmentLoader_.resetEverything();
1336 this.setupAudioPlaylistLoader_(track);
1337 }
1338
1339 /**
1340 * Non-destructive resync of the audioSegmentLoader (when audio
1341 * is demuxed) to prepare to continue appending new audio data
1342 * at the end of the current buffered region
1343 */
1344 }, {
1345 key: 'resyncAudioLoaders_',
1346 value: function resyncAudioLoaders_(track) {
1347 if (!track.properties_.resolvedUri) {
1348 return;
1349 }
1350
1351 this.audioSegmentLoader_.resyncLoader();
1352 this.setupAudioPlaylistLoader_(track);
1353 }
1354
1355 /**
1356 * Setup a new audioPlaylistLoader and start the audioSegmentLoader
1357 * to begin loading demuxed audio
1358 */
1359 }, {
1360 key: 'setupAudioPlaylistLoader_',
1361 value: function setupAudioPlaylistLoader_(track) {
1362 var _this4 = this;
1363
1364 // startup playlist and segment loaders for the enabled audio
1365 // track
1366 this.audioPlaylistLoader_ = new _playlistLoader2['default'](track.properties_.resolvedUri, this.hls_, this.withCredentials);
1367 this.audioPlaylistLoader_.load();
1368
1369 this.audioPlaylistLoader_.on('loadedmetadata', function () {
1370 var audioPlaylist = _this4.audioPlaylistLoader_.media();
1371
1372 _this4.audioSegmentLoader_.playlist(audioPlaylist, _this4.requestOptions_);
1373
1374 // if the video is already playing, or if this isn't a live video and preload
1375 // permits, start downloading segments
1376 if (!_this4.tech_.paused() || audioPlaylist.endList && _this4.tech_.preload() !== 'none') {
1377 _this4.audioSegmentLoader_.load();
1378 }
1379
1380 if (!audioPlaylist.endList) {
1381 _this4.audioPlaylistLoader_.trigger('firstplay');
1382 }
1383 });
1384
1385 this.audioPlaylistLoader_.on('loadedplaylist', function () {
1386 var updatedPlaylist = undefined;
1387
1388 if (_this4.audioPlaylistLoader_) {
1389 updatedPlaylist = _this4.audioPlaylistLoader_.media();
1390 }
1391
1392 if (!updatedPlaylist) {
1393 // only one playlist to select
1394 _this4.audioPlaylistLoader_.media(_this4.audioPlaylistLoader_.playlists.master.playlists[0]);
1395 return;
1396 }
1397
1398 _this4.audioSegmentLoader_.playlist(updatedPlaylist, _this4.requestOptions_);
1399 });
1400
1401 this.audioPlaylistLoader_.on('error', function () {
1402 _videoJs2['default'].log.warn('Problem encountered loading the alternate audio track' + '. Switching back to default.');
1403 _this4.audioSegmentLoader_.abort();
1404 _this4.setupAudio();
1405 });
1406 }
1407
1408 /**
1409 * Determine the correct subtitle playlist based on the active
1410 * SubtitleTrack and initialize a PlaylistLoader and SegmentLoader if
1411 * necessary. This method is called once automatically before
1412 * playback begins to enable the default subtitle track and should be
1413 * invoked again if the track is changed.
1414 */
1415 }, {
1416 key: 'setupSubtitles',
1417 value: function setupSubtitles() {
1418 var _this5 = this;
1419
1420 var subtitleGroup = this.activeSubtitleGroup_();
1421 var track = this.activeSubtitleTrack_();
1422
1423 this.subtitleSegmentLoader_.pause();
1424
1425 if (!track) {
1426 // stop playlist and segment loading for subtitles
1427 if (this.subtitlePlaylistLoader_) {
1428 this.subtitlePlaylistLoader_.dispose();
1429 this.subtitlePlaylistLoader_ = null;
1430 }
1431 return;
1432 }
1433
1434 var properties = subtitleGroup.filter(function (subtitleProperties) {
1435 return subtitleProperties.id === track.id;
1436 })[0];
1437
1438 // startup playlist and segment loaders for the enabled subtitle track
1439 if (!this.subtitlePlaylistLoader_ ||
1440 // if the media hasn't loaded yet, we don't have the URI to check, so it is
1441 // easiest to simply recreate the playlist loader
1442 !this.subtitlePlaylistLoader_.media() || this.subtitlePlaylistLoader_.media().resolvedUri !== properties.resolvedUri) {
1443
1444 if (this.subtitlePlaylistLoader_) {
1445 this.subtitlePlaylistLoader_.dispose();
1446 }
1447
1448 // reset the segment loader only when the subtitle playlist is changed instead of
1449 // every time setupSubtitles is called since switching subtitle tracks fires
1450 // multiple `change` events on the TextTrackList
1451 this.subtitleSegmentLoader_.resetEverything();
1452
1453 // can't reuse playlistloader because we're only using single renditions and not a
1454 // proper master
1455 this.subtitlePlaylistLoader_ = new _playlistLoader2['default'](properties.resolvedUri, this.hls_, this.withCredentials);
1456
1457 this.subtitlePlaylistLoader_.on('loadedmetadata', function () {
1458 var subtitlePlaylist = _this5.subtitlePlaylistLoader_.media();
1459
1460 _this5.subtitleSegmentLoader_.playlist(subtitlePlaylist, _this5.requestOptions_);
1461 _this5.subtitleSegmentLoader_.track(_this5.activeSubtitleTrack_());
1462
1463 // if the video is already playing, or if this isn't a live video and preload
1464 // permits, start downloading segments
1465 if (!_this5.tech_.paused() || subtitlePlaylist.endList && _this5.tech_.preload() !== 'none') {
1466 _this5.subtitleSegmentLoader_.load();
1467 }
1468 });
1469
1470 this.subtitlePlaylistLoader_.on('loadedplaylist', function () {
1471 var updatedPlaylist = undefined;
1472
1473 if (_this5.subtitlePlaylistLoader_) {
1474 updatedPlaylist = _this5.subtitlePlaylistLoader_.media();
1475 }
1476
1477 if (!updatedPlaylist) {
1478 return;
1479 }
1480
1481 _this5.subtitleSegmentLoader_.playlist(updatedPlaylist, _this5.requestOptions_);
1482 });
1483
1484 this.subtitlePlaylistLoader_.on('error', this.handleSubtitleError_.bind(this));
1485 }
1486
1487 if (this.subtitlePlaylistLoader_.media() && this.subtitlePlaylistLoader_.media().resolvedUri === properties.resolvedUri) {
1488 this.subtitleSegmentLoader_.load();
1489 } else {
1490 this.subtitlePlaylistLoader_.load();
1491 }
1492 }
1493
1494 /**
1495 * Re-tune playback quality level for the current player
1496 * conditions. This method may perform destructive actions, like
1497 * removing already buffered content, to readjust the currently
1498 * active playlist quickly.
1499 *
1500 * @private
1501 */
1502 }, {
1503 key: 'fastQualityChange_',
1504 value: function fastQualityChange_() {
1505 var media = this.selectPlaylist();
1506
1507 if (media !== this.masterPlaylistLoader_.media()) {
1508 this.masterPlaylistLoader_.media(media);
1509
1510 this.mainSegmentLoader_.resetLoader();
1511 // don't need to reset audio as it is reset when media changes
1512 }
1513 }
1514
1515 /**
1516 * Begin playback.
1517 */
1518 }, {
1519 key: 'play',
1520 value: function play() {
1521 if (this.setupFirstPlay()) {
1522 return;
1523 }
1524
1525 if (this.tech_.ended()) {
1526 this.tech_.setCurrentTime(0);
1527 }
1528
1529 if (this.hasPlayed_()) {
1530 this.load();
1531 }
1532
1533 var seekable = this.tech_.seekable();
1534
1535 // if the viewer has paused and we fell out of the live window,
1536 // seek forward to the live point
1537 if (this.tech_.duration() === Infinity) {
1538 if (this.tech_.currentTime() < seekable.start(0)) {
1539 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
1540 }
1541 }
1542 }
1543
1544 /**
1545 * Seek to the latest media position if this is a live video and the
1546 * player and video are loaded and initialized.
1547 */
1548 }, {
1549 key: 'setupFirstPlay',
1550 value: function setupFirstPlay() {
1551 var seekable = undefined;
1552 var media = this.masterPlaylistLoader_.media();
1553
1554 // check that everything is ready to begin buffering in the live
1555 // scenario
1556 // 1) the active media playlist is available
1557 if (media &&
1558 // 2) the player is not paused
1559 !this.tech_.paused() &&
1560 // 3) the player has not started playing
1561 !this.hasPlayed_()) {
1562
1563 // when the video is a live stream
1564 if (!media.endList) {
1565 this.trigger('firstplay');
1566
1567 // seek to the latest media position for live videos
1568 seekable = this.seekable();
1569 if (seekable.length) {
1570 this.tech_.setCurrentTime(seekable.end(0));
1571 }
1572 }
1573 this.hasPlayed_ = function () {
1574 return true;
1575 };
1576 // now that we are ready, load the segment
1577 this.load();
1578 return true;
1579 }
1580 return false;
1581 }
1582
1583 /**
1584 * handle the sourceopen event on the MediaSource
1585 *
1586 * @private
1587 */
1588 }, {
1589 key: 'handleSourceOpen_',
1590 value: function handleSourceOpen_() {
1591 // Only attempt to create the source buffer if none already exist.
1592 // handleSourceOpen is also called when we are "re-opening" a source buffer
1593 // after `endOfStream` has been called (in response to a seek for instance)
1594 try {
1595 this.setupSourceBuffers_();
1596 } catch (e) {
1597 _videoJs2['default'].log.warn('Failed to create Source Buffers', e);
1598 return this.mediaSource.endOfStream('decode');
1599 }
1600
1601 // if autoplay is enabled, begin playback. This is duplicative of
1602 // code in video.js but is required because play() must be invoked
1603 // *after* the media source has opened.
1604 if (this.tech_.autoplay()) {
1605 this.tech_.play();
1606 }
1607
1608 this.trigger('sourceopen');
1609 }
1610
1611 /**
1612 * Calls endOfStream on the media source when all active stream types have called
1613 * endOfStream
1614 *
1615 * @param {string} streamType
1616 * Stream type of the segment loader that called endOfStream
1617 * @private
1618 */
1619 }, {
1620 key: 'onEndOfStream',
1621 value: function onEndOfStream() {
1622 var isEndOfStream = this.mainSegmentLoader_.ended_;
1623
1624 if (this.audioPlaylistLoader_) {
1625 // if the audio playlist loader exists, then alternate audio is active, so we need
1626 // to wait for both the main and audio segment loaders to call endOfStream
1627 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
1628 }
1629
1630 if (isEndOfStream) {
1631 this.mediaSource.endOfStream();
1632 }
1633 }
1634
1635 /**
1636 * Check if a playlist has stopped being updated
1637 * @param {Object} playlist the media playlist object
1638 * @return {boolean} whether the playlist has stopped being updated or not
1639 */
1640 }, {
1641 key: 'stuckAtPlaylistEnd_',
1642 value: function stuckAtPlaylistEnd_(playlist) {
1643 var seekable = this.seekable();
1644
1645 if (!seekable.length) {
1646 // playlist doesn't have enough information to determine whether we are stuck
1647 return false;
1648 }
1649
1650 var expired = this.syncController_.getExpiredTime(playlist, this.mediaSource.duration);
1651
1652 if (expired === null) {
1653 return false;
1654 }
1655
1656 // does not use the safe live end to calculate playlist end, since we
1657 // don't want to say we are stuck while there is still content
1658 var absolutePlaylistEnd = Hls.Playlist.playlistEnd(playlist, expired);
1659 var currentTime = this.tech_.currentTime();
1660 var buffered = this.tech_.buffered();
1661
1662 if (!buffered.length) {
1663 // return true if the playhead reached the absolute end of the playlist
1664 return absolutePlaylistEnd - currentTime <= _ranges2['default'].TIME_FUDGE_FACTOR;
1665 }
1666 var bufferedEnd = buffered.end(buffered.length - 1);
1667
1668 // return true if there is too little buffer left and
1669 // buffer has reached absolute end of playlist
1670 return bufferedEnd - currentTime <= _ranges2['default'].TIME_FUDGE_FACTOR && absolutePlaylistEnd - bufferedEnd <= _ranges2['default'].TIME_FUDGE_FACTOR;
1671 }
1672
1673 /**
1674 * Blacklists a playlist when an error occurs for a set amount of time
1675 * making it unavailable for selection by the rendition selection algorithm
1676 * and then forces a new playlist (rendition) selection.
1677 *
1678 * @param {Object=} error an optional error that may include the playlist
1679 * to blacklist
1680 */
1681 }, {
1682 key: 'blacklistCurrentPlaylist',
1683 value: function blacklistCurrentPlaylist() {
1684 var error = arguments.length <= 0 || arguments[0] === undefined ? {} : arguments[0];
1685
1686 var currentPlaylist = undefined;
1687 var nextPlaylist = undefined;
1688
1689 // If the `error` was generated by the playlist loader, it will contain
1690 // the playlist we were trying to load (but failed) and that should be
1691 // blacklisted instead of the currently selected playlist which is likely
1692 // out-of-date in this scenario
1693 currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
1694
1695 // If there is no current playlist, then an error occurred while we were
1696 // trying to load the master OR while we were disposing of the tech
1697 if (!currentPlaylist) {
1698 this.error = error;
1699
1700 try {
1701 return this.mediaSource.endOfStream('network');
1702 } catch (e) {
1703 return this.trigger('error');
1704 }
1705 }
1706
1707 var isFinalRendition = this.masterPlaylistLoader_.isFinalRendition_();
1708
1709 if (isFinalRendition) {
1710 // Never blacklisting this playlist because it's final rendition
1711 _videoJs2['default'].log.warn('Problem encountered with the current ' + 'HLS playlist. Trying again since it is the final playlist.');
1712
1713 this.tech_.trigger('retryplaylist');
1714 return this.masterPlaylistLoader_.load(isFinalRendition);
1715 }
1716 // Blacklist this playlist
1717 currentPlaylist.excludeUntil = Date.now() + this.blacklistDuration * 1000;
1718 this.tech_.trigger('blacklistplaylist');
1719 this.tech_.trigger({ type: 'usage', name: 'hls-rendition-blacklisted' });
1720
1721 // Select a new playlist
1722 nextPlaylist = this.selectPlaylist();
1723 _videoJs2['default'].log.warn('Problem encountered with the current HLS playlist.' + (error.message ? ' ' + error.message : '') + ' Switching to another playlist.');
1724
1725 return this.masterPlaylistLoader_.media(nextPlaylist);
1726 }
1727
1728 /**
1729 * Pause all segment loaders
1730 */
1731 }, {
1732 key: 'pauseLoading',
1733 value: function pauseLoading() {
1734 this.mainSegmentLoader_.pause();
1735 if (this.audioPlaylistLoader_) {
1736 this.audioSegmentLoader_.pause();
1737 }
1738 if (this.subtitlePlaylistLoader_) {
1739 this.subtitleSegmentLoader_.pause();
1740 }
1741 }
1742
1743 /**
1744 * set the current time on all segment loaders
1745 *
1746 * @param {TimeRange} currentTime the current time to set
1747 * @return {TimeRange} the current time
1748 */
1749 }, {
1750 key: 'setCurrentTime',
1751 value: function setCurrentTime(currentTime) {
1752 var buffered = _ranges2['default'].findRange(this.tech_.buffered(), currentTime);
1753
1754 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
1755 // return immediately if the metadata is not ready yet
1756 return 0;
1757 }
1758
1759 // it's clearly an edge-case but don't thrown an error if asked to
1760 // seek within an empty playlist
1761 if (!this.masterPlaylistLoader_.media().segments) {
1762 return 0;
1763 }
1764
1765 // In flash playback, the segment loaders should be reset on every seek, even
1766 // in buffer seeks
1767 var isFlash = this.mode_ === 'flash' || this.mode_ === 'auto' && !_videoJs2['default'].MediaSource.supportsNativeMediaSources();
1768
1769 // if the seek location is already buffered, continue buffering as
1770 // usual
1771 if (buffered && buffered.length && !isFlash) {
1772 return currentTime;
1773 }
1774
1775 // cancel outstanding requests so we begin buffering at the new
1776 // location
1777 this.mainSegmentLoader_.resetEverything();
1778 this.mainSegmentLoader_.abort();
1779 if (this.audioPlaylistLoader_) {
1780 this.audioSegmentLoader_.resetEverything();
1781 this.audioSegmentLoader_.abort();
1782 }
1783 if (this.subtitlePlaylistLoader_) {
1784 this.subtitleSegmentLoader_.resetEverything();
1785 this.subtitleSegmentLoader_.abort();
1786 }
1787
1788 if (!this.tech_.paused()) {
1789 this.mainSegmentLoader_.load();
1790 if (this.audioPlaylistLoader_) {
1791 this.audioSegmentLoader_.load();
1792 }
1793 if (this.subtitlePlaylistLoader_) {
1794 this.subtitleSegmentLoader_.load();
1795 }
1796 }
1797 }
1798
1799 /**
1800 * get the current duration
1801 *
1802 * @return {TimeRange} the duration
1803 */
1804 }, {
1805 key: 'duration',
1806 value: function duration() {
1807 if (!this.masterPlaylistLoader_) {
1808 return 0;
1809 }
1810
1811 if (this.mediaSource) {
1812 return this.mediaSource.duration;
1813 }
1814
1815 return Hls.Playlist.duration(this.masterPlaylistLoader_.media());
1816 }
1817
1818 /**
1819 * check the seekable range
1820 *
1821 * @return {TimeRange} the seekable range
1822 */
1823 }, {
1824 key: 'seekable',
1825 value: function seekable() {
1826 return this.seekable_;
1827 }
1828 }, {
1829 key: 'onSyncInfoUpdate_',
1830 value: function onSyncInfoUpdate_() {
1831 var mainSeekable = undefined;
1832 var audioSeekable = undefined;
1833
1834 if (!this.masterPlaylistLoader_) {
1835 return;
1836 }
1837
1838 var media = this.masterPlaylistLoader_.media();
1839
1840 if (!media) {
1841 return;
1842 }
1843
1844 var expired = this.syncController_.getExpiredTime(media, this.mediaSource.duration);
1845
1846 if (expired === null) {
1847 // not enough information to update seekable
1848 return;
1849 }
1850
1851 mainSeekable = Hls.Playlist.seekable(media, expired);
1852
1853 if (mainSeekable.length === 0) {
1854 return;
1855 }
1856
1857 if (this.audioPlaylistLoader_) {
1858 media = this.audioPlaylistLoader_.media();
1859 expired = this.syncController_.getExpiredTime(media, this.mediaSource.duration);
1860
1861 if (expired === null) {
1862 return;
1863 }
1864
1865 audioSeekable = Hls.Playlist.seekable(media, expired);
1866
1867 if (audioSeekable.length === 0) {
1868 return;
1869 }
1870 }
1871
1872 if (!audioSeekable) {
1873 // seekable has been calculated based on buffering video data so it
1874 // can be returned directly
1875 this.seekable_ = mainSeekable;
1876 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
1877 // seekables are pretty far off, rely on main
1878 this.seekable_ = mainSeekable;
1879 } else {
1880 this.seekable_ = _videoJs2['default'].createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
1881 }
1882
1883 this.tech_.trigger('seekablechanged');
1884 }
1885
1886 /**
1887 * Update the player duration
1888 */
1889 }, {
1890 key: 'updateDuration',
1891 value: function updateDuration() {
1892 var _this6 = this;
1893
1894 var oldDuration = this.mediaSource.duration;
1895 var newDuration = Hls.Playlist.duration(this.masterPlaylistLoader_.media());
1896 var buffered = this.tech_.buffered();
1897 var setDuration = function setDuration() {
1898 _this6.mediaSource.duration = newDuration;
1899 _this6.tech_.trigger('durationchange');
1900
1901 _this6.mediaSource.removeEventListener('sourceopen', setDuration);
1902 };
1903
1904 if (buffered.length > 0) {
1905 newDuration = Math.max(newDuration, buffered.end(buffered.length - 1));
1906 }
1907
1908 // if the duration has changed, invalidate the cached value
1909 if (oldDuration !== newDuration) {
1910 // update the duration
1911 if (this.mediaSource.readyState !== 'open') {
1912 this.mediaSource.addEventListener('sourceopen', setDuration);
1913 } else {
1914 setDuration();
1915 }
1916 }
1917 }
1918
1919 /**
1920 * dispose of the MasterPlaylistController and everything
1921 * that it controls
1922 */
1923 }, {
1924 key: 'dispose',
1925 value: function dispose() {
1926 this.decrypter_.terminate();
1927 this.masterPlaylistLoader_.dispose();
1928 this.mainSegmentLoader_.dispose();
1929
1930 if (this.audioPlaylistLoader_) {
1931 this.audioPlaylistLoader_.dispose();
1932 }
1933 if (this.subtitlePlaylistLoader_) {
1934 this.subtitlePlaylistLoader_.dispose();
1935 }
1936 this.audioSegmentLoader_.dispose();
1937 this.subtitleSegmentLoader_.dispose();
1938 }
1939
1940 /**
1941 * return the master playlist object if we have one
1942 *
1943 * @return {Object} the master playlist object that we parsed
1944 */
1945 }, {
1946 key: 'master',
1947 value: function master() {
1948 return this.masterPlaylistLoader_.master;
1949 }
1950
1951 /**
1952 * return the currently selected playlist
1953 *
1954 * @return {Object} the currently selected playlist object that we parsed
1955 */
1956 }, {
1957 key: 'media',
1958 value: function media() {
1959 // playlist loader will not return media if it has not been fully loaded
1960 return this.masterPlaylistLoader_.media() || this.initialMedia_;
1961 }
1962
1963 /**
1964 * setup our internal source buffers on our segment Loaders
1965 *
1966 * @private
1967 */
1968 }, {
1969 key: 'setupSourceBuffers_',
1970 value: function setupSourceBuffers_() {
1971 var media = this.masterPlaylistLoader_.media();
1972 var mimeTypes = undefined;
1973
1974 // wait until a media playlist is available and the Media Source is
1975 // attached
1976 if (!media || this.mediaSource.readyState !== 'open') {
1977 return;
1978 }
1979
1980 mimeTypes = mimeTypesForPlaylist_(this.masterPlaylistLoader_.master, media);
1981 if (mimeTypes.length < 1) {
1982 this.error = 'No compatible SourceBuffer configuration for the variant stream:' + media.resolvedUri;
1983 return this.mediaSource.endOfStream('decode');
1984 }
1985 this.mainSegmentLoader_.mimeType(mimeTypes[0]);
1986 if (mimeTypes[1]) {
1987 this.audioSegmentLoader_.mimeType(mimeTypes[1]);
1988 }
1989
1990 // exclude any incompatible variant streams from future playlist
1991 // selection
1992 this.excludeIncompatibleVariants_(media);
1993 }
1994
1995 /**
1996 * Blacklist playlists that are known to be codec or
1997 * stream-incompatible with the SourceBuffer configuration. For
1998 * instance, Media Source Extensions would cause the video element to
1999 * stall waiting for video data if you switched from a variant with
2000 * video and audio to an audio-only one.
2001 *
2002 * @param {Object} media a media playlist compatible with the current
2003 * set of SourceBuffers. Variants in the current master playlist that
2004 * do not appear to have compatible codec or stream configurations
2005 * will be excluded from the default playlist selection algorithm
2006 * indefinitely.
2007 * @private
2008 */
2009 }, {
2010 key: 'excludeIncompatibleVariants_',
2011 value: function excludeIncompatibleVariants_(media) {
2012 var master = this.masterPlaylistLoader_.master;
2013 var codecCount = 2;
2014 var videoCodec = null;
2015 var codecs = undefined;
2016
2017 if (media.attributes && media.attributes.CODECS) {
2018 codecs = parseCodecs(media.attributes.CODECS);
2019 videoCodec = codecs.videoCodec;
2020 codecCount = codecs.codecCount;
2021 }
2022 master.playlists.forEach(function (variant) {
2023 var variantCodecs = {
2024 codecCount: 2,
2025 videoCodec: null
2026 };
2027
2028 if (variant.attributes && variant.attributes.CODECS) {
2029 var codecString = variant.attributes.CODECS;
2030
2031 variantCodecs = parseCodecs(codecString);
2032
2033 if (window.MediaSource && window.MediaSource.isTypeSupported && !window.MediaSource.isTypeSupported('video/mp4; codecs="' + mapLegacyAvcCodecs_(codecString) + '"')) {
2034 variant.excludeUntil = Infinity;
2035 }
2036 }
2037
2038 // if the streams differ in the presence or absence of audio or
2039 // video, they are incompatible
2040 if (variantCodecs.codecCount !== codecCount) {
2041 variant.excludeUntil = Infinity;
2042 }
2043
2044 // if h.264 is specified on the current playlist, some flavor of
2045 // it must be specified on all compatible variants
2046 if (variantCodecs.videoCodec !== videoCodec) {
2047 variant.excludeUntil = Infinity;
2048 }
2049 });
2050 }
2051 }, {
2052 key: 'updateAdCues_',
2053 value: function updateAdCues_(media) {
2054 var offset = 0;
2055 var seekable = this.seekable();
2056
2057 if (seekable.length) {
2058 offset = seekable.start(0);
2059 }
2060
2061 _adCueTags2['default'].updateAdCues(media, this.cueTagsTrack_, offset);
2062 }
2063
2064 /**
2065 * Calculates the desired forward buffer length based on current time
2066 *
2067 * @return {Number} Desired forward buffer length in seconds
2068 */
2069 }, {
2070 key: 'goalBufferLength',
2071 value: function goalBufferLength() {
2072 var currentTime = this.tech_.currentTime();
2073 var initial = _config2['default'].GOAL_BUFFER_LENGTH;
2074 var rate = _config2['default'].GOAL_BUFFER_LENGTH_RATE;
2075 var max = Math.max(initial, _config2['default'].MAX_GOAL_BUFFER_LENGTH);
2076
2077 return Math.min(initial + currentTime * rate, max);
2078 }
2079
2080 /**
2081 * Calculates the desired buffer low water line based on current time
2082 *
2083 * @return {Number} Desired buffer low water line in seconds
2084 */
2085 }, {
2086 key: 'bufferLowWaterLine',
2087 value: function bufferLowWaterLine() {
2088 var currentTime = this.tech_.currentTime();
2089 var initial = _config2['default'].BUFFER_LOW_WATER_LINE;
2090 var rate = _config2['default'].BUFFER_LOW_WATER_LINE_RATE;
2091 var max = Math.max(initial, _config2['default'].MAX_BUFFER_LOW_WATER_LINE);
2092
2093 return Math.min(initial + currentTime * rate, max);
2094 }
2095 }]);
2096
2097 return MasterPlaylistController;
2098})(_videoJs2['default'].EventTarget);
2099
2100exports.MasterPlaylistController = MasterPlaylistController;
2101}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
2102},{"./ad-cue-tags":1,"./config":3,"./decrypter-worker":4,"./playlist-loader":8,"./ranges":11,"./segment-loader":15,"./sync-controller":17,"./vtt-segment-loader":18,"videojs-contrib-media-sources/es5/codec-utils":64,"webworkify":75}],6:[function(require,module,exports){
2103(function (global){
2104'use strict';
2105
2106Object.defineProperty(exports, '__esModule', {
2107 value: true
2108});
2109
2110function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
2111
2112var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
2113
2114var _videoJs2 = _interopRequireDefault(_videoJs);
2115
2116var _binUtils = require('./bin-utils');
2117
2118var REQUEST_ERRORS = {
2119 FAILURE: 2,
2120 TIMEOUT: -101,
2121 ABORTED: -102
2122};
2123
2124exports.REQUEST_ERRORS = REQUEST_ERRORS;
2125/**
2126 * Turns segment byterange into a string suitable for use in
2127 * HTTP Range requests
2128 *
2129 * @param {Object} byterange - an object with two values defining the start and end
2130 * of a byte-range
2131 */
2132var byterangeStr = function byterangeStr(byterange) {
2133 var byterangeStart = undefined;
2134 var byterangeEnd = undefined;
2135
2136 // `byterangeEnd` is one less than `offset + length` because the HTTP range
2137 // header uses inclusive ranges
2138 byterangeEnd = byterange.offset + byterange.length - 1;
2139 byterangeStart = byterange.offset;
2140 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
2141};
2142
2143/**
2144 * Defines headers for use in the xhr request for a particular segment.
2145 *
2146 * @param {Object} segment - a simplified copy of the segmentInfo object
2147 * from SegmentLoader
2148 */
2149var segmentXhrHeaders = function segmentXhrHeaders(segment) {
2150 var headers = {};
2151
2152 if (segment.byterange) {
2153 headers.Range = byterangeStr(segment.byterange);
2154 }
2155 return headers;
2156};
2157
2158/**
2159 * Abort all requests
2160 *
2161 * @param {Object} activeXhrs - an object that tracks all XHR requests
2162 */
2163var abortAll = function abortAll(activeXhrs) {
2164 activeXhrs.forEach(function (xhr) {
2165 xhr.abort();
2166 });
2167};
2168
2169/**
2170 * Gather important bandwidth stats once a request has completed
2171 *
2172 * @param {Object} request - the XHR request from which to gather stats
2173 */
2174var getRequestStats = function getRequestStats(request) {
2175 return {
2176 bandwidth: request.bandwidth,
2177 bytesReceived: request.bytesReceived || 0,
2178 roundTripTime: request.roundTripTime || 0
2179 };
2180};
2181
2182/**
2183 * If possible gather bandwidth stats as a request is in
2184 * progress
2185 *
2186 * @param {Event} progressEvent - an event object from an XHR's progress event
2187 */
2188var getProgressStats = function getProgressStats(progressEvent) {
2189 var request = progressEvent.target;
2190 var roundTripTime = Date.now() - request.requestTime;
2191 var stats = {
2192 bandwidth: Infinity,
2193 bytesReceived: 0,
2194 roundTripTime: roundTripTime || 0
2195 };
2196
2197 stats.bytesReceived = progressEvent.loaded;
2198 // This can result in Infinity if stats.roundTripTime is 0 but that is ok
2199 // because we should only use bandwidth stats on progress to determine when
2200 // abort a request early due to insufficient bandwidth
2201 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
2202
2203 return stats;
2204};
2205
2206/**
2207 * Handle all error conditions in one place and return an object
2208 * with all the information
2209 *
2210 * @param {Error|null} error - if non-null signals an error occured with the XHR
2211 * @param {Object} request - the XHR request that possibly generated the error
2212 */
2213var handleErrors = function handleErrors(error, request) {
2214 if (request.timedout) {
2215 return {
2216 status: request.status,
2217 message: 'HLS request timed-out at URL: ' + request.uri,
2218 code: REQUEST_ERRORS.TIMEOUT,
2219 xhr: request
2220 };
2221 }
2222
2223 if (request.aborted) {
2224 return {
2225 status: request.status,
2226 message: 'HLS request aborted at URL: ' + request.uri,
2227 code: REQUEST_ERRORS.ABORTED,
2228 xhr: request
2229 };
2230 }
2231
2232 if (error) {
2233 return {
2234 status: request.status,
2235 message: 'HLS request errored at URL: ' + request.uri,
2236 code: REQUEST_ERRORS.FAILURE,
2237 xhr: request
2238 };
2239 }
2240
2241 return null;
2242};
2243
2244/**
2245 * Handle responses for key data and convert the key data to the correct format
2246 * for the decryption step later
2247 *
2248 * @param {Object} segment - a simplified copy of the segmentInfo object
2249 * from SegmentLoader
2250 * @param {Function} finishProcessingFn - a callback to execute to continue processing
2251 * this request
2252 */
2253var handleKeyResponse = function handleKeyResponse(segment, finishProcessingFn) {
2254 return function (error, request) {
2255 var response = request.response;
2256 var errorObj = handleErrors(error, request);
2257
2258 if (errorObj) {
2259 return finishProcessingFn(errorObj, segment);
2260 }
2261
2262 if (response.byteLength !== 16) {
2263 return finishProcessingFn({
2264 status: request.status,
2265 message: 'Invalid HLS key at URL: ' + request.uri,
2266 code: REQUEST_ERRORS.FAILURE,
2267 xhr: request
2268 }, segment);
2269 }
2270
2271 var view = new DataView(response);
2272
2273 segment.key.bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
2274 return finishProcessingFn(null, segment);
2275 };
2276};
2277
2278/**
2279 * Handle init-segment responses
2280 *
2281 * @param {Object} segment - a simplified copy of the segmentInfo object
2282 * from SegmentLoader
2283 * @param {Function} finishProcessingFn - a callback to execute to continue processing
2284 * this request
2285 */
2286var handleInitSegmentResponse = function handleInitSegmentResponse(segment, finishProcessingFn) {
2287 return function (error, request) {
2288 var response = request.response;
2289 var errorObj = handleErrors(error, request);
2290
2291 if (errorObj) {
2292 return finishProcessingFn(errorObj, segment);
2293 }
2294
2295 // stop processing if received empty content
2296 if (response.byteLength === 0) {
2297 return finishProcessingFn({
2298 status: request.status,
2299 message: 'Empty HLS segment content at URL: ' + request.uri,
2300 code: REQUEST_ERRORS.FAILURE,
2301 xhr: request
2302 }, segment);
2303 }
2304
2305 segment.map.bytes = new Uint8Array(request.response);
2306 return finishProcessingFn(null, segment);
2307 };
2308};
2309
2310/**
2311 * Response handler for segment-requests being sure to set the correct
2312 * property depending on whether the segment is encryped or not
2313 * Also records and keeps track of stats that are used for ABR purposes
2314 *
2315 * @param {Object} segment - a simplified copy of the segmentInfo object
2316 * from SegmentLoader
2317 * @param {Function} finishProcessingFn - a callback to execute to continue processing
2318 * this request
2319 */
2320var handleSegmentResponse = function handleSegmentResponse(segment, finishProcessingFn) {
2321 return function (error, request) {
2322 var response = request.response;
2323 var errorObj = handleErrors(error, request);
2324
2325 if (errorObj) {
2326 return finishProcessingFn(errorObj, segment);
2327 }
2328
2329 // stop processing if received empty content
2330 if (response.byteLength === 0) {
2331 return finishProcessingFn({
2332 status: request.status,
2333 message: 'Empty HLS segment content at URL: ' + request.uri,
2334 code: REQUEST_ERRORS.FAILURE,
2335 xhr: request
2336 }, segment);
2337 }
2338
2339 segment.stats = getRequestStats(request);
2340
2341 if (segment.key) {
2342 segment.encryptedBytes = new Uint8Array(request.response);
2343 } else {
2344 segment.bytes = new Uint8Array(request.response);
2345 }
2346
2347 return finishProcessingFn(null, segment);
2348 };
2349};
2350
2351/**
2352 * Decrypt the segment via the decryption web worker
2353 *
2354 * @param {WebWorker} decrypter - a WebWorker interface to AES-128 decryption routines
2355 * @param {Object} segment - a simplified copy of the segmentInfo object
2356 * from SegmentLoader
2357 * @param {Function} doneFn - a callback that is executed after decryption has completed
2358 */
2359var decryptSegment = function decryptSegment(decrypter, segment, doneFn) {
2360 var decryptionHandler = function decryptionHandler(event) {
2361 if (event.data.source === segment.requestId) {
2362 decrypter.removeEventListener('message', decryptionHandler);
2363 var decrypted = event.data.decrypted;
2364
2365 segment.bytes = new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength);
2366 return doneFn(null, segment);
2367 }
2368 };
2369
2370 decrypter.addEventListener('message', decryptionHandler);
2371
2372 // this is an encrypted segment
2373 // incrementally decrypt the segment
2374 decrypter.postMessage((0, _binUtils.createTransferableMessage)({
2375 source: segment.requestId,
2376 encrypted: segment.encryptedBytes,
2377 key: segment.key.bytes,
2378 iv: segment.key.iv
2379 }), [segment.encryptedBytes.buffer, segment.key.bytes.buffer]);
2380};
2381
2382/**
2383 * The purpose of this function is to get the most pertinent error from the
2384 * array of errors.
2385 * For instance if a timeout and two aborts occur, then the aborts were
2386 * likely triggered by the timeout so return that error object.
2387 */
2388var getMostImportantError = function getMostImportantError(errors) {
2389 return errors.reduce(function (prev, err) {
2390 return err.code > prev.code ? err : prev;
2391 });
2392};
2393
2394/**
2395 * This function waits for all XHRs to finish (with either success or failure)
2396 * before continueing processing via it's callback. The function gathers errors
2397 * from each request into a single errors array so that the error status for
2398 * each request can be examined later.
2399 *
2400 * @param {Object} activeXhrs - an object that tracks all XHR requests
2401 * @param {WebWorker} decrypter - a WebWorker interface to AES-128 decryption routines
2402 * @param {Function} doneFn - a callback that is executed after all resources have been
2403 * downloaded and any decryption completed
2404 */
2405var waitForCompletion = function waitForCompletion(activeXhrs, decrypter, doneFn) {
2406 var errors = [];
2407 var count = 0;
2408
2409 return function (error, segment) {
2410 if (error) {
2411 // If there are errors, we have to abort any outstanding requests
2412 abortAll(activeXhrs);
2413 errors.push(error);
2414 }
2415 count += 1;
2416
2417 if (count === activeXhrs.length) {
2418 // Keep track of when *all* of the requests have completed
2419 segment.endOfAllRequests = Date.now();
2420
2421 if (errors.length > 0) {
2422 var worstError = getMostImportantError(errors);
2423
2424 return doneFn(worstError, segment);
2425 }
2426 if (segment.encryptedBytes) {
2427 return decryptSegment(decrypter, segment, doneFn);
2428 }
2429 // Otherwise, everything is ready just continue
2430 return doneFn(null, segment);
2431 }
2432 };
2433};
2434
2435/**
2436 * Simple progress event callback handler that gathers some stats before
2437 * executing a provided callback with the `segment` object
2438 *
2439 * @param {Object} segment - a simplified copy of the segmentInfo object
2440 * from SegmentLoader
2441 * @param {Function} progressFn - a callback that is executed each time a progress event
2442 * is received
2443 * @param {Event} event - the progress event object from XMLHttpRequest
2444 */
2445var handleProgress = function handleProgress(segment, progressFn) {
2446 return function (event) {
2447 segment.stats = _videoJs2['default'].mergeOptions(segment.stats, getProgressStats(event));
2448
2449 // record the time that we receive the first byte of data
2450 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
2451 segment.stats.firstBytesReceivedAt = Date.now();
2452 }
2453
2454 return progressFn(event, segment);
2455 };
2456};
2457
2458/**
2459 * Load all resources and does any processing necessary for a media-segment
2460 *
2461 * Features:
2462 * decrypts the media-segment if it has a key uri and an iv
2463 * aborts *all* requests if *any* one request fails
2464 *
2465 * The segment object, at minimum, has the following format:
2466 * {
2467 * resolvedUri: String,
2468 * [byterange]: {
2469 * offset: Number,
2470 * length: Number
2471 * },
2472 * [key]: {
2473 * resolvedUri: String
2474 * [byterange]: {
2475 * offset: Number,
2476 * length: Number
2477 * },
2478 * iv: {
2479 * bytes: Uint32Array
2480 * }
2481 * },
2482 * [map]: {
2483 * resolvedUri: String,
2484 * [byterange]: {
2485 * offset: Number,
2486 * length: Number
2487 * },
2488 * [bytes]: Uint8Array
2489 * }
2490 * }
2491 * ...where [name] denotes optional properties
2492 *
2493 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
2494 * @param {Object} xhrOptions - the base options to provide to all xhr requests
2495 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
2496 * decryption routines
2497 * @param {Object} segment - a simplified copy of the segmentInfo object
2498 * from SegmentLoader
2499 * @param {Function} progressFn - a callback that receives progress events from the main
2500 * segment's xhr request
2501 * @param {Function} doneFn - a callback that is executed only once all requests have
2502 * succeeded or failed
2503 * @returns {Function} a function that, when invoked, immediately aborts all
2504 * outstanding requests
2505 */
2506var mediaSegmentRequest = function mediaSegmentRequest(xhr, xhrOptions, decryptionWorker, segment, progressFn, doneFn) {
2507 var activeXhrs = [];
2508 var finishProcessingFn = waitForCompletion(activeXhrs, decryptionWorker, doneFn);
2509
2510 // optionally, request the decryption key
2511 if (segment.key) {
2512 var keyRequestOptions = _videoJs2['default'].mergeOptions(xhrOptions, {
2513 uri: segment.key.resolvedUri,
2514 responseType: 'arraybuffer'
2515 });
2516 var keyRequestCallback = handleKeyResponse(segment, finishProcessingFn);
2517 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
2518
2519 activeXhrs.push(keyXhr);
2520 }
2521
2522 // optionally, request the associated media init segment
2523 if (segment.map && !segment.map.bytes) {
2524 var initSegmentOptions = _videoJs2['default'].mergeOptions(xhrOptions, {
2525 uri: segment.map.resolvedUri,
2526 responseType: 'arraybuffer',
2527 headers: segmentXhrHeaders(segment.map)
2528 });
2529 var initSegmentRequestCallback = handleInitSegmentResponse(segment, finishProcessingFn);
2530 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
2531
2532 activeXhrs.push(initSegmentXhr);
2533 }
2534
2535 var segmentRequestOptions = _videoJs2['default'].mergeOptions(xhrOptions, {
2536 uri: segment.resolvedUri,
2537 responseType: 'arraybuffer',
2538 headers: segmentXhrHeaders(segment)
2539 });
2540 var segmentRequestCallback = handleSegmentResponse(segment, finishProcessingFn);
2541 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
2542
2543 segmentXhr.addEventListener('progress', handleProgress(segment, progressFn));
2544 activeXhrs.push(segmentXhr);
2545
2546 return function () {
2547 return abortAll(activeXhrs);
2548 };
2549};
2550exports.mediaSegmentRequest = mediaSegmentRequest;
2551}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
2552},{"./bin-utils":2}],7:[function(require,module,exports){
2553(function (global){
2554/**
2555 * @file playback-watcher.js
2556 *
2557 * Playback starts, and now my watch begins. It shall not end until my death. I shall
2558 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
2559 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
2560 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
2561 * my life and honor to the Playback Watch, for this Player and all the Players to come.
2562 */
2563
2564'use strict';
2565
2566Object.defineProperty(exports, '__esModule', {
2567 value: true
2568});
2569
2570var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
2571
2572function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
2573
2574function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
2575
2576var _globalWindow = require('global/window');
2577
2578var _globalWindow2 = _interopRequireDefault(_globalWindow);
2579
2580var _ranges = require('./ranges');
2581
2582var _ranges2 = _interopRequireDefault(_ranges);
2583
2584var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
2585
2586var _videoJs2 = _interopRequireDefault(_videoJs);
2587
2588// Set of events that reset the playback-watcher time check logic and clear the timeout
2589var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
2590
2591/**
2592 * @class PlaybackWatcher
2593 */
2594
2595var PlaybackWatcher = (function () {
2596 /**
2597 * Represents an PlaybackWatcher object.
2598 * @constructor
2599 * @param {object} options an object that includes the tech and settings
2600 */
2601
2602 function PlaybackWatcher(options) {
2603 var _this = this;
2604
2605 _classCallCheck(this, PlaybackWatcher);
2606
2607 this.tech_ = options.tech;
2608 this.seekable = options.seekable;
2609
2610 this.consecutiveUpdates = 0;
2611 this.lastRecordedTime = null;
2612 this.timer_ = null;
2613 this.checkCurrentTimeTimeout_ = null;
2614
2615 if (options.debug) {
2616 this.logger_ = _videoJs2['default'].log.bind(_videoJs2['default'], 'playback-watcher ->');
2617 }
2618 this.logger_('initialize');
2619
2620 var waitingHandler = function waitingHandler() {
2621 return _this.techWaiting_();
2622 };
2623 var cancelTimerHandler = function cancelTimerHandler() {
2624 return _this.cancelTimer_();
2625 };
2626 var fixesBadSeeksHandler = function fixesBadSeeksHandler() {
2627 return _this.fixesBadSeeks_();
2628 };
2629
2630 this.tech_.on('seekablechanged', fixesBadSeeksHandler);
2631 this.tech_.on('waiting', waitingHandler);
2632 this.tech_.on(timerCancelEvents, cancelTimerHandler);
2633 this.monitorCurrentTime_();
2634
2635 // Define the dispose function to clean up our events
2636 this.dispose = function () {
2637 _this.logger_('dispose');
2638 _this.tech_.off('seekablechanged', fixesBadSeeksHandler);
2639 _this.tech_.off('waiting', waitingHandler);
2640 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
2641 if (_this.checkCurrentTimeTimeout_) {
2642 _globalWindow2['default'].clearTimeout(_this.checkCurrentTimeTimeout_);
2643 }
2644 _this.cancelTimer_();
2645 };
2646 }
2647
2648 /**
2649 * Periodically check current time to see if playback stopped
2650 *
2651 * @private
2652 */
2653
2654 _createClass(PlaybackWatcher, [{
2655 key: 'monitorCurrentTime_',
2656 value: function monitorCurrentTime_() {
2657 this.checkCurrentTime_();
2658
2659 if (this.checkCurrentTimeTimeout_) {
2660 _globalWindow2['default'].clearTimeout(this.checkCurrentTimeTimeout_);
2661 }
2662
2663 // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
2664 this.checkCurrentTimeTimeout_ = _globalWindow2['default'].setTimeout(this.monitorCurrentTime_.bind(this), 250);
2665 }
2666
2667 /**
2668 * The purpose of this function is to emulate the "waiting" event on
2669 * browsers that do not emit it when they are waiting for more
2670 * data to continue playback
2671 *
2672 * @private
2673 */
2674 }, {
2675 key: 'checkCurrentTime_',
2676 value: function checkCurrentTime_() {
2677 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
2678 this.consecutiveUpdates = 0;
2679 this.lastRecordedTime = this.tech_.currentTime();
2680 return;
2681 }
2682
2683 if (this.tech_.paused() || this.tech_.seeking()) {
2684 return;
2685 }
2686
2687 var currentTime = this.tech_.currentTime();
2688 var buffered = this.tech_.buffered();
2689
2690 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + 0.1 >= buffered.end(buffered.length - 1))) {
2691 // If current time is at the end of the final buffered region, then any playback
2692 // stall is most likely caused by buffering in a low bandwidth environment. The tech
2693 // should fire a `waiting` event in this scenario, but due to browser and tech
2694 // inconsistencies (e.g. The Flash tech does not fire a `waiting` event when the end
2695 // of the buffer is reached and has fallen off the live window). Calling
2696 // `techWaiting_` here allows us to simulate responding to a native `waiting` event
2697 // when the tech fails to emit one.
2698 return this.techWaiting_();
2699 }
2700
2701 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
2702 this.consecutiveUpdates++;
2703 this.waiting_();
2704 } else if (currentTime === this.lastRecordedTime) {
2705 this.consecutiveUpdates++;
2706 } else {
2707 this.consecutiveUpdates = 0;
2708 this.lastRecordedTime = currentTime;
2709 }
2710 }
2711
2712 /**
2713 * Cancels any pending timers and resets the 'timeupdate' mechanism
2714 * designed to detect that we are stalled
2715 *
2716 * @private
2717 */
2718 }, {
2719 key: 'cancelTimer_',
2720 value: function cancelTimer_() {
2721 this.consecutiveUpdates = 0;
2722
2723 if (this.timer_) {
2724 this.logger_('cancelTimer_');
2725 clearTimeout(this.timer_);
2726 }
2727
2728 this.timer_ = null;
2729 }
2730
2731 /**
2732 * Fixes situations where there's a bad seek
2733 *
2734 * @return {Boolean} whether an action was taken to fix the seek
2735 * @private
2736 */
2737 }, {
2738 key: 'fixesBadSeeks_',
2739 value: function fixesBadSeeks_() {
2740 var seekable = this.seekable();
2741 var currentTime = this.tech_.currentTime();
2742
2743 if (this.tech_.seeking() && this.outsideOfSeekableWindow_(seekable, currentTime)) {
2744 var seekableEnd = seekable.end(seekable.length - 1);
2745
2746 // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
2747 this.logger_('Trying to seek outside of seekable at time ' + currentTime + ' with ' + ('seekable range ' + _ranges2['default'].printableRange(seekable) + '. Seeking to ') + (seekableEnd + '.'));
2748 this.tech_.setCurrentTime(seekableEnd);
2749 return true;
2750 }
2751
2752 return false;
2753 }
2754
2755 /**
2756 * Handler for situations when we determine the player is waiting.
2757 *
2758 * @private
2759 */
2760 }, {
2761 key: 'waiting_',
2762 value: function waiting_() {
2763 if (this.techWaiting_()) {
2764 return;
2765 }
2766
2767 // All tech waiting checks failed. Use last resort correction
2768 var currentTime = this.tech_.currentTime();
2769 var buffered = this.tech_.buffered();
2770 var currentRange = _ranges2['default'].findRange(buffered, currentTime);
2771
2772 // Sometimes the player can stall for unknown reasons within a contiguous buffered
2773 // region with no indication that anything is amiss (seen in Firefox). Seeking to
2774 // currentTime is usually enough to kickstart the player. This checks that the player
2775 // is currently within a buffered region before attempting a corrective seek.
2776 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
2777 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
2778 // make sure there is ~3 seconds of forward buffer before taking any corrective action
2779 // to avoid triggering an `unknownwaiting` event when the network is slow.
2780 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
2781 this.cancelTimer_();
2782 this.tech_.setCurrentTime(currentTime);
2783
2784 this.logger_('Stopped at ' + currentTime + ' while inside a buffered region ' + ('[' + currentRange.start(0) + ' -> ' + currentRange.end(0) + ']. Attempting to resume ') + 'playback by seeking to the current time.');
2785
2786 // unknown waiting corrections may be useful for monitoring QoS
2787 this.tech_.trigger({ type: 'usage', name: 'hls-unknown-waiting' });
2788 return;
2789 }
2790 }
2791
2792 /**
2793 * Handler for situations when the tech fires a `waiting` event
2794 *
2795 * @return {Boolean}
2796 * True if an action (or none) was needed to correct the waiting. False if no
2797 * checks passed
2798 * @private
2799 */
2800 }, {
2801 key: 'techWaiting_',
2802 value: function techWaiting_() {
2803 var seekable = this.seekable();
2804 var currentTime = this.tech_.currentTime();
2805
2806 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
2807 // Tech is seeking or bad seek fixed, no action needed
2808 return true;
2809 }
2810
2811 if (this.tech_.seeking() || this.timer_ !== null) {
2812 // Tech is seeking or already waiting on another action, no action needed
2813 return true;
2814 }
2815
2816 if (this.fellOutOfLiveWindow_(seekable, currentTime)) {
2817 var livePoint = seekable.end(seekable.length - 1);
2818
2819 this.logger_('Fell out of live window at time ' + currentTime + '. Seeking to ' + ('live point (seekable end) ' + livePoint));
2820 this.cancelTimer_();
2821 this.tech_.setCurrentTime(livePoint);
2822
2823 // live window resyncs may be useful for monitoring QoS
2824 this.tech_.trigger({ type: 'usage', name: 'hls-live-resync' });
2825 return true;
2826 }
2827
2828 var buffered = this.tech_.buffered();
2829 var nextRange = _ranges2['default'].findNextRange(buffered, currentTime);
2830
2831 if (this.videoUnderflow_(nextRange, buffered, currentTime)) {
2832 // Even though the video underflowed and was stuck in a gap, the audio overplayed
2833 // the gap, leading currentTime into a buffered range. Seeking to currentTime
2834 // allows the video to catch up to the audio position without losing any audio
2835 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
2836 this.cancelTimer_();
2837 this.tech_.setCurrentTime(currentTime);
2838
2839 // video underflow may be useful for monitoring QoS
2840 this.tech_.trigger({ type: 'usage', name: 'hls-video-underflow' });
2841 return true;
2842 }
2843
2844 // check for gap
2845 if (nextRange.length > 0) {
2846 var difference = nextRange.start(0) - currentTime;
2847
2848 this.logger_('Stopped at ' + currentTime + ', setting timer for ' + difference + ', seeking ' + ('to ' + nextRange.start(0)));
2849
2850 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
2851 return true;
2852 }
2853
2854 // All checks failed. Returning false to indicate failure to correct waiting
2855 return false;
2856 }
2857 }, {
2858 key: 'outsideOfSeekableWindow_',
2859 value: function outsideOfSeekableWindow_(seekable, currentTime) {
2860 if (!seekable.length) {
2861 // we can't make a solid case if there's no seekable, default to false
2862 return false;
2863 }
2864
2865 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
2866 if (currentTime < seekable.start(0) - 0.1 || currentTime > seekable.end(seekable.length - 1) + 0.1) {
2867 return true;
2868 }
2869
2870 return false;
2871 }
2872 }, {
2873 key: 'fellOutOfLiveWindow_',
2874 value: function fellOutOfLiveWindow_(seekable, currentTime) {
2875 if (seekable.length &&
2876 // can't fall before 0 and 0 seekable start identifies VOD stream
2877 seekable.start(0) > 0 && currentTime < seekable.start(0)) {
2878 return true;
2879 }
2880
2881 return false;
2882 }
2883 }, {
2884 key: 'videoUnderflow_',
2885 value: function videoUnderflow_(nextRange, buffered, currentTime) {
2886 if (nextRange.length === 0) {
2887 // Even if there is no available next range, there is still a possibility we are
2888 // stuck in a gap due to video underflow.
2889 var gap = this.gapFromVideoUnderflow_(buffered, currentTime);
2890
2891 if (gap) {
2892 this.logger_('Encountered a gap in video from ' + gap.start + ' to ' + gap.end + '. ' + ('Seeking to current time ' + currentTime));
2893
2894 return true;
2895 }
2896 }
2897
2898 return false;
2899 }
2900
2901 /**
2902 * Timer callback. If playback still has not proceeded, then we seek
2903 * to the start of the next buffered region.
2904 *
2905 * @private
2906 */
2907 }, {
2908 key: 'skipTheGap_',
2909 value: function skipTheGap_(scheduledCurrentTime) {
2910 var buffered = this.tech_.buffered();
2911 var currentTime = this.tech_.currentTime();
2912 var nextRange = _ranges2['default'].findNextRange(buffered, currentTime);
2913
2914 this.cancelTimer_();
2915
2916 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
2917 return;
2918 }
2919
2920 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0));
2921
2922 // only seek if we still have not played
2923 this.tech_.setCurrentTime(nextRange.start(0) + _ranges2['default'].TIME_FUDGE_FACTOR);
2924
2925 this.tech_.trigger({ type: 'usage', name: 'hls-gap-skip' });
2926 }
2927 }, {
2928 key: 'gapFromVideoUnderflow_',
2929 value: function gapFromVideoUnderflow_(buffered, currentTime) {
2930 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
2931 // playing for ~3 seconds after the video gap starts. This is done to account for
2932 // video buffer underflow/underrun (note that this is not done when there is audio
2933 // buffer underflow/underrun -- in that case the video will stop as soon as it
2934 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
2935 // video stalls). The player's time will reflect the playthrough of audio, so the
2936 // time will appear as if we are in a buffered region, even if we are stuck in a
2937 // "gap."
2938 //
2939 // Example:
2940 // video buffer: 0 => 10.1, 10.2 => 20
2941 // audio buffer: 0 => 20
2942 // overall buffer: 0 => 10.1, 10.2 => 20
2943 // current time: 13
2944 //
2945 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
2946 // however, the audio continued playing until it reached ~3 seconds past the gap
2947 // (13 seconds), at which point it stops as well. Since current time is past the
2948 // gap, findNextRange will return no ranges.
2949 //
2950 // To check for this issue, we see if there is a gap that starts somewhere within
2951 // a 3 second range (3 seconds +/- 1 second) back from our current time.
2952 var gaps = _ranges2['default'].findGaps(buffered);
2953
2954 for (var i = 0; i < gaps.length; i++) {
2955 var start = gaps.start(i);
2956 var end = gaps.end(i);
2957
2958 // gap is starts no more than 4 seconds back
2959 if (currentTime - start < 4 && currentTime - start > 2) {
2960 return {
2961 start: start,
2962 end: end
2963 };
2964 }
2965 }
2966
2967 return null;
2968 }
2969
2970 /**
2971 * A debugging logger noop that is set to console.log only if debugging
2972 * is enabled globally
2973 *
2974 * @private
2975 */
2976 }, {
2977 key: 'logger_',
2978 value: function logger_() {}
2979 }]);
2980
2981 return PlaybackWatcher;
2982})();
2983
2984exports['default'] = PlaybackWatcher;
2985module.exports = exports['default'];
2986}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
2987},{"./ranges":11,"global/window":30}],8:[function(require,module,exports){
2988(function (global){
2989/**
2990 * @file playlist-loader.js
2991 *
2992 * A state machine that manages the loading, caching, and updating of
2993 * M3U8 playlists.
2994 *
2995 */
2996'use strict';
2997
2998Object.defineProperty(exports, '__esModule', {
2999 value: true
3000});
3001
3002function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
3003
3004var _resolveUrl = require('./resolve-url');
3005
3006var _resolveUrl2 = _interopRequireDefault(_resolveUrl);
3007
3008var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
3009
3010var _playlistJs = require('./playlist.js');
3011
3012var _m3u8Parser = require('m3u8-parser');
3013
3014var _m3u8Parser2 = _interopRequireDefault(_m3u8Parser);
3015
3016var _globalWindow = require('global/window');
3017
3018var _globalWindow2 = _interopRequireDefault(_globalWindow);
3019
3020/**
3021 * Returns a new array of segments that is the result of merging
3022 * properties from an older list of segments onto an updated
3023 * list. No properties on the updated playlist will be overridden.
3024 *
3025 * @param {Array} original the outdated list of segments
3026 * @param {Array} update the updated list of segments
3027 * @param {Number=} offset the index of the first update
3028 * segment in the original segment list. For non-live playlists,
3029 * this should always be zero and does not need to be
3030 * specified. For live playlists, it should be the difference
3031 * between the media sequence numbers in the original and updated
3032 * playlists.
3033 * @return a list of merged segment objects
3034 */
3035var updateSegments = function updateSegments(original, update, offset) {
3036 var result = update.slice();
3037 var length = undefined;
3038 var i = undefined;
3039
3040 offset = offset || 0;
3041 length = Math.min(original.length, update.length + offset);
3042
3043 for (i = offset; i < length; i++) {
3044 result[i - offset] = (0, _videoJs.mergeOptions)(original[i], result[i - offset]);
3045 }
3046 return result;
3047};
3048
3049/**
3050 * Returns a new master playlist that is the result of merging an
3051 * updated media playlist into the original version. If the
3052 * updated media playlist does not match any of the playlist
3053 * entries in the original master playlist, null is returned.
3054 *
3055 * @param {Object} master a parsed master M3U8 object
3056 * @param {Object} media a parsed media M3U8 object
3057 * @return {Object} a new object that represents the original
3058 * master playlist with the updated media playlist merged in, or
3059 * null if the merge produced no change.
3060 */
3061var updateMaster = function updateMaster(master, media) {
3062 var changed = false;
3063 var result = (0, _videoJs.mergeOptions)(master, {});
3064 var i = master.playlists.length;
3065 var playlist = undefined;
3066 var segment = undefined;
3067 var j = undefined;
3068
3069 while (i--) {
3070 playlist = result.playlists[i];
3071 if (playlist.uri === media.uri) {
3072 // consider the playlist unchanged if the number of segments
3073 // are equal and the media sequence number is unchanged
3074 if (playlist.segments && media.segments && playlist.segments.length === media.segments.length && playlist.mediaSequence === media.mediaSequence) {
3075 continue;
3076 }
3077
3078 result.playlists[i] = (0, _videoJs.mergeOptions)(playlist, media);
3079 result.playlists[media.uri] = result.playlists[i];
3080
3081 // if the update could overlap existing segment information,
3082 // merge the two lists
3083 if (playlist.segments) {
3084 result.playlists[i].segments = updateSegments(playlist.segments, media.segments, media.mediaSequence - playlist.mediaSequence);
3085 }
3086 // resolve any missing segment and key URIs
3087 j = 0;
3088 if (result.playlists[i].segments) {
3089 j = result.playlists[i].segments.length;
3090 }
3091 while (j--) {
3092 segment = result.playlists[i].segments[j];
3093 if (!segment.resolvedUri) {
3094 segment.resolvedUri = (0, _resolveUrl2['default'])(playlist.resolvedUri, segment.uri);
3095 }
3096 if (segment.key && !segment.key.resolvedUri) {
3097 segment.key.resolvedUri = (0, _resolveUrl2['default'])(playlist.resolvedUri, segment.key.uri);
3098 }
3099 if (segment.map && !segment.map.resolvedUri) {
3100 segment.map.resolvedUri = (0, _resolveUrl2['default'])(playlist.resolvedUri, segment.map.uri);
3101 }
3102 }
3103 changed = true;
3104 }
3105 }
3106 return changed ? result : null;
3107};
3108
3109/**
3110 * Load a playlist from a remote location
3111 *
3112 * @class PlaylistLoader
3113 * @extends Stream
3114 * @param {String} srcUrl the url to start with
3115 * @param {Boolean} withCredentials the withCredentials xhr option
3116 * @constructor
3117 */
3118var PlaylistLoader = function PlaylistLoader(srcUrl, hls, withCredentials) {
3119 var _this = this;
3120
3121 /* eslint-disable consistent-this */
3122 var loader = this;
3123 /* eslint-enable consistent-this */
3124 var mediaUpdateTimeout = undefined;
3125 var request = undefined;
3126 var playlistRequestError = undefined;
3127 var haveMetadata = undefined;
3128
3129 PlaylistLoader.prototype.constructor.call(this);
3130
3131 this.hls_ = hls;
3132
3133 if (!srcUrl) {
3134 throw new Error('A non-empty playlist URL is required');
3135 }
3136
3137 playlistRequestError = function (xhr, url, startingState) {
3138 loader.setBandwidth(request || xhr);
3139
3140 // any in-flight request is now finished
3141 request = null;
3142
3143 if (startingState) {
3144 loader.state = startingState;
3145 }
3146
3147 loader.error = {
3148 playlist: loader.master.playlists[url],
3149 status: xhr.status,
3150 message: 'HLS playlist request error at URL: ' + url,
3151 responseText: xhr.responseText,
3152 code: xhr.status >= 500 ? 4 : 2
3153 };
3154
3155 loader.trigger('error');
3156 };
3157
3158 // update the playlist loader's state in response to a new or
3159 // updated playlist.
3160 haveMetadata = function (xhr, url) {
3161 var parser = undefined;
3162 var refreshDelay = undefined;
3163 var update = undefined;
3164
3165 loader.setBandwidth(request || xhr);
3166
3167 // any in-flight request is now finished
3168 request = null;
3169
3170 loader.state = 'HAVE_METADATA';
3171
3172 parser = new _m3u8Parser2['default'].Parser();
3173 parser.push(xhr.responseText);
3174 parser.end();
3175 parser.manifest.uri = url;
3176
3177 // merge this playlist into the master
3178 update = updateMaster(loader.master, parser.manifest);
3179 refreshDelay = (parser.manifest.targetDuration || 10) * 1000;
3180 loader.targetDuration = parser.manifest.targetDuration;
3181 if (update) {
3182 loader.master = update;
3183 loader.media_ = loader.master.playlists[parser.manifest.uri];
3184 } else {
3185 // if the playlist is unchanged since the last reload,
3186 // try again after half the target duration
3187 refreshDelay /= 2;
3188 loader.trigger('playlistunchanged');
3189 }
3190
3191 // refresh live playlists after a target duration passes
3192 if (!loader.media().endList) {
3193 _globalWindow2['default'].clearTimeout(mediaUpdateTimeout);
3194 mediaUpdateTimeout = _globalWindow2['default'].setTimeout(function () {
3195 loader.trigger('mediaupdatetimeout');
3196 }, refreshDelay);
3197 }
3198
3199 loader.trigger('loadedplaylist');
3200 };
3201
3202 // initialize the loader state
3203 loader.state = 'HAVE_NOTHING';
3204
3205 /**
3206 * Abort any outstanding work and clean up.
3207 */
3208 loader.dispose = function () {
3209 loader.stopRequest();
3210 _globalWindow2['default'].clearTimeout(mediaUpdateTimeout);
3211 loader.off();
3212 };
3213
3214 loader.stopRequest = function () {
3215 if (request) {
3216 var oldRequest = request;
3217
3218 request = null;
3219 oldRequest.onreadystatechange = null;
3220 oldRequest.abort();
3221 }
3222 };
3223
3224 /**
3225 * Returns the number of enabled playlists on the master playlist object
3226 *
3227 * @return {Number} number of eneabled playlists
3228 */
3229 loader.enabledPlaylists_ = function () {
3230 return loader.master.playlists.filter(_playlistJs.isEnabled).length;
3231 };
3232
3233 /**
3234 * Returns whether the current playlist is the lowest rendition
3235 *
3236 * @return {Boolean} true if on lowest rendition
3237 */
3238 loader.isLowestEnabledRendition_ = function () {
3239 if (loader.master.playlists.length === 1) {
3240 return true;
3241 }
3242
3243 var media = loader.media();
3244
3245 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
3246
3247 return loader.master.playlists.filter(function (playlist) {
3248 var enabled = (0, _playlistJs.isEnabled)(playlist);
3249
3250 if (!enabled) {
3251 return false;
3252 }
3253
3254 var bandwidth = 0;
3255
3256 if (playlist && playlist.attributes) {
3257 bandwidth = playlist.attributes.BANDWIDTH;
3258 }
3259 return bandwidth < currentBandwidth;
3260 }).length === 0;
3261 };
3262
3263 /**
3264 * Returns whether the current playlist is the final available rendition
3265 *
3266 * @return {Boolean} true if on final rendition
3267 */
3268 loader.isFinalRendition_ = function () {
3269 return loader.master.playlists.filter(_playlistJs.isEnabled).length === 1;
3270 };
3271
3272 /**
3273 * When called without any arguments, returns the currently
3274 * active media playlist. When called with a single argument,
3275 * triggers the playlist loader to asynchronously switch to the
3276 * specified media playlist. Calling this method while the
3277 * loader is in the HAVE_NOTHING causes an error to be emitted
3278 * but otherwise has no effect.
3279 *
3280 * @param {Object=} playlist the parsed media playlist
3281 * object to switch to
3282 * @return {Playlist} the current loaded media
3283 */
3284 loader.media = function (playlist) {
3285 var startingState = loader.state;
3286 var mediaChange = undefined;
3287
3288 // getter
3289 if (!playlist) {
3290 return loader.media_;
3291 }
3292
3293 // setter
3294 if (loader.state === 'HAVE_NOTHING') {
3295 throw new Error('Cannot switch media playlist from ' + loader.state);
3296 }
3297
3298 // find the playlist object if the target playlist has been
3299 // specified by URI
3300 if (typeof playlist === 'string') {
3301 if (!loader.master.playlists[playlist]) {
3302 throw new Error('Unknown playlist URI: ' + playlist);
3303 }
3304 playlist = loader.master.playlists[playlist];
3305 }
3306
3307 mediaChange = !loader.media_ || playlist.uri !== loader.media_.uri;
3308
3309 // switch to fully loaded playlists immediately
3310 if (loader.master.playlists[playlist.uri].endList) {
3311 // abort outstanding playlist requests
3312 if (request) {
3313 request.onreadystatechange = null;
3314 request.abort();
3315 request = null;
3316 }
3317 loader.state = 'HAVE_METADATA';
3318 loader.media_ = playlist;
3319
3320 // trigger media change if the active media has been updated
3321 if (mediaChange) {
3322 loader.trigger('mediachanging');
3323 loader.trigger('mediachange');
3324 }
3325 return;
3326 }
3327
3328 // switching to the active playlist is a no-op
3329 if (!mediaChange) {
3330 return;
3331 }
3332
3333 loader.state = 'SWITCHING_MEDIA';
3334
3335 // there is already an outstanding playlist request
3336 if (request) {
3337 if ((0, _resolveUrl2['default'])(loader.master.uri, playlist.uri) === request.url) {
3338 // requesting to switch to the same playlist multiple times
3339 // has no effect after the first
3340 return;
3341 }
3342 request.onreadystatechange = null;
3343 request.abort();
3344 request = null;
3345 }
3346
3347 // request the new playlist
3348 if (this.media_) {
3349 this.trigger('mediachanging');
3350 }
3351 request = this.hls_.xhr({
3352 uri: (0, _resolveUrl2['default'])(loader.master.uri, playlist.uri),
3353 withCredentials: withCredentials
3354 }, function (error, req) {
3355 // disposed
3356 if (!request) {
3357 return;
3358 }
3359
3360 if (error) {
3361 return playlistRequestError(request, playlist.uri, startingState);
3362 }
3363
3364 haveMetadata(req, playlist.uri);
3365
3366 // fire loadedmetadata the first time a media playlist is loaded
3367 if (startingState === 'HAVE_MASTER') {
3368 loader.trigger('loadedmetadata');
3369 } else {
3370 loader.trigger('mediachange');
3371 }
3372 });
3373 };
3374
3375 /**
3376 * set the bandwidth on an xhr to the bandwidth on the playlist
3377 */
3378 loader.setBandwidth = function (xhr) {
3379 loader.bandwidth = xhr.bandwidth;
3380 };
3381
3382 // live playlist staleness timeout
3383 loader.on('mediaupdatetimeout', function () {
3384 if (loader.state !== 'HAVE_METADATA') {
3385 // only refresh the media playlist if no other activity is going on
3386 return;
3387 }
3388
3389 loader.state = 'HAVE_CURRENT_METADATA';
3390 request = this.hls_.xhr({
3391 uri: (0, _resolveUrl2['default'])(loader.master.uri, loader.media().uri),
3392 withCredentials: withCredentials
3393 }, function (error, req) {
3394 // disposed
3395 if (!request) {
3396 return;
3397 }
3398
3399 if (error) {
3400 return playlistRequestError(request, loader.media().uri, 'HAVE_METADATA');
3401 }
3402 haveMetadata(request, loader.media().uri);
3403 });
3404 });
3405
3406 // setup initial sync info
3407 loader.on('firstplay', function () {
3408 var playlist = loader.media();
3409
3410 if (playlist) {
3411 playlist.syncInfo = {
3412 mediaSequence: playlist.mediaSequence,
3413 time: 0
3414 };
3415 }
3416 });
3417
3418 /**
3419 * pause loading of the playlist
3420 */
3421 loader.pause = function () {
3422 loader.stopRequest();
3423 _globalWindow2['default'].clearTimeout(mediaUpdateTimeout);
3424 if (loader.state === 'HAVE_NOTHING') {
3425 // If we pause the loader before any data has been retrieved, its as if we never
3426 // started, so reset to an unstarted state.
3427 loader.started = false;
3428 }
3429 };
3430
3431 /**
3432 * start loading of the playlist
3433 */
3434 loader.load = function (isFinalRendition) {
3435 var media = loader.media();
3436
3437 _globalWindow2['default'].clearTimeout(mediaUpdateTimeout);
3438
3439 if (isFinalRendition) {
3440 var refreshDelay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
3441
3442 mediaUpdateTimeout = _globalWindow2['default'].setTimeout(loader.load.bind(null, false), refreshDelay);
3443 return;
3444 }
3445
3446 if (!loader.started) {
3447 loader.start();
3448 return;
3449 }
3450
3451 if (media && !media.endList) {
3452 loader.trigger('mediaupdatetimeout');
3453 } else {
3454 loader.trigger('loadedplaylist');
3455 }
3456 };
3457
3458 /**
3459 * start loading of the playlist
3460 */
3461 loader.start = function () {
3462 loader.started = true;
3463
3464 // request the specified URL
3465 request = _this.hls_.xhr({
3466 uri: srcUrl,
3467 withCredentials: withCredentials
3468 }, function (error, req) {
3469 var parser = undefined;
3470 var playlist = undefined;
3471 var i = undefined;
3472
3473 // disposed
3474 if (!request) {
3475 return;
3476 }
3477
3478 // clear the loader's request reference
3479 request = null;
3480
3481 if (error) {
3482 loader.error = {
3483 status: req.status,
3484 message: 'HLS playlist request error at URL: ' + srcUrl,
3485 responseText: req.responseText,
3486 // MEDIA_ERR_NETWORK
3487 code: 2
3488 };
3489 if (loader.state === 'HAVE_NOTHING') {
3490 loader.started = false;
3491 }
3492 return loader.trigger('error');
3493 }
3494
3495 parser = new _m3u8Parser2['default'].Parser();
3496 parser.push(req.responseText);
3497 parser.end();
3498
3499 loader.state = 'HAVE_MASTER';
3500
3501 parser.manifest.uri = srcUrl;
3502
3503 // loaded a master playlist
3504 if (parser.manifest.playlists) {
3505 loader.master = parser.manifest;
3506
3507 // setup by-URI lookups and resolve media playlist URIs
3508 i = loader.master.playlists.length;
3509 while (i--) {
3510 playlist = loader.master.playlists[i];
3511 loader.master.playlists[playlist.uri] = playlist;
3512 playlist.resolvedUri = (0, _resolveUrl2['default'])(loader.master.uri, playlist.uri);
3513 }
3514
3515 // resolve any media group URIs
3516 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
3517 for (var groupKey in loader.master.mediaGroups[mediaType]) {
3518 for (var labelKey in loader.master.mediaGroups[mediaType][groupKey]) {
3519 var mediaProperties = loader.master.mediaGroups[mediaType][groupKey][labelKey];
3520
3521 if (mediaProperties.uri) {
3522 mediaProperties.resolvedUri = (0, _resolveUrl2['default'])(loader.master.uri, mediaProperties.uri);
3523 }
3524 }
3525 }
3526 });
3527
3528 loader.trigger('loadedplaylist');
3529 if (!request) {
3530 // no media playlist was specifically selected so start
3531 // from the first listed one
3532 loader.media(parser.manifest.playlists[0]);
3533 }
3534 return;
3535 }
3536
3537 // loaded a media playlist
3538 // infer a master playlist if none was previously requested
3539 loader.master = {
3540 mediaGroups: {
3541 'AUDIO': {},
3542 'VIDEO': {},
3543 'CLOSED-CAPTIONS': {},
3544 'SUBTITLES': {}
3545 },
3546 uri: _globalWindow2['default'].location.href,
3547 playlists: [{
3548 uri: srcUrl
3549 }]
3550 };
3551 loader.master.playlists[srcUrl] = loader.master.playlists[0];
3552 loader.master.playlists[0].resolvedUri = srcUrl;
3553 haveMetadata(req, srcUrl);
3554 return loader.trigger('loadedmetadata');
3555 });
3556 };
3557};
3558
3559PlaylistLoader.prototype = new _videoJs.EventTarget();
3560
3561exports['default'] = PlaylistLoader;
3562module.exports = exports['default'];
3563}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
3564},{"./playlist.js":10,"./resolve-url":14,"global/window":30,"m3u8-parser":31}],9:[function(require,module,exports){
3565'use strict';
3566
3567Object.defineProperty(exports, '__esModule', {
3568 value: true
3569});
3570
3571function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
3572
3573var _config = require('./config');
3574
3575var _config2 = _interopRequireDefault(_config);
3576
3577var _playlist = require('./playlist');
3578
3579var _playlist2 = _interopRequireDefault(_playlist);
3580
3581// Utilities
3582
3583/**
3584 * Returns the CSS value for the specified property on an element
3585 * using `getComputedStyle`. Firefox has a long-standing issue where
3586 * getComputedStyle() may return null when running in an iframe with
3587 * `display: none`.
3588 *
3589 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
3590 * @param {HTMLElement} el the htmlelement to work on
3591 * @param {string} the proprety to get the style for
3592 */
3593var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
3594 var result = undefined;
3595
3596 if (!el) {
3597 return '';
3598 }
3599
3600 result = window.getComputedStyle(el);
3601 if (!result) {
3602 return '';
3603 }
3604
3605 return result[property];
3606};
3607
3608/**
3609 * Resuable stable sort function
3610 *
3611 * @param {Playlists} array
3612 * @param {Function} sortFn Different comparators
3613 * @function stableSort
3614 */
3615var stableSort = function stableSort(array, sortFn) {
3616 var newArray = array.slice();
3617
3618 array.sort(function (left, right) {
3619 var cmp = sortFn(left, right);
3620
3621 if (cmp === 0) {
3622 return newArray.indexOf(left) - newArray.indexOf(right);
3623 }
3624 return cmp;
3625 });
3626};
3627
3628/**
3629 * A comparator function to sort two playlist object by bandwidth.
3630 *
3631 * @param {Object} left a media playlist object
3632 * @param {Object} right a media playlist object
3633 * @return {Number} Greater than zero if the bandwidth attribute of
3634 * left is greater than the corresponding attribute of right. Less
3635 * than zero if the bandwidth of right is greater than left and
3636 * exactly zero if the two are equal.
3637 */
3638var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
3639 var leftBandwidth = undefined;
3640 var rightBandwidth = undefined;
3641
3642 if (left.attributes && left.attributes.BANDWIDTH) {
3643 leftBandwidth = left.attributes.BANDWIDTH;
3644 }
3645 leftBandwidth = leftBandwidth || window.Number.MAX_VALUE;
3646 if (right.attributes && right.attributes.BANDWIDTH) {
3647 rightBandwidth = right.attributes.BANDWIDTH;
3648 }
3649 rightBandwidth = rightBandwidth || window.Number.MAX_VALUE;
3650
3651 return leftBandwidth - rightBandwidth;
3652};
3653
3654exports.comparePlaylistBandwidth = comparePlaylistBandwidth;
3655/**
3656 * A comparator function to sort two playlist object by resolution (width).
3657 * @param {Object} left a media playlist object
3658 * @param {Object} right a media playlist object
3659 * @return {Number} Greater than zero if the resolution.width attribute of
3660 * left is greater than the corresponding attribute of right. Less
3661 * than zero if the resolution.width of right is greater than left and
3662 * exactly zero if the two are equal.
3663 */
3664var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
3665 var leftWidth = undefined;
3666 var rightWidth = undefined;
3667
3668 if (left.attributes && left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
3669 leftWidth = left.attributes.RESOLUTION.width;
3670 }
3671
3672 leftWidth = leftWidth || window.Number.MAX_VALUE;
3673
3674 if (right.attributes && right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
3675 rightWidth = right.attributes.RESOLUTION.width;
3676 }
3677
3678 rightWidth = rightWidth || window.Number.MAX_VALUE;
3679
3680 // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
3681 // have the same media dimensions/ resolution
3682 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
3683 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
3684 }
3685 return leftWidth - rightWidth;
3686};
3687
3688exports.comparePlaylistResolution = comparePlaylistResolution;
3689/**
3690 * Chooses the appropriate media playlist based on bandwidth and player size
3691 *
3692 * @param {Object} master
3693 * Object representation of the master manifest
3694 * @param {Number} playerBandwidth
3695 * Current calculated bandwidth of the player
3696 * @param {Number} playerWidth
3697 * Current width of the player element
3698 * @param {Number} playerHeight
3699 * Current height of the player element
3700 * @return {Playlist} the highest bitrate playlist less than the
3701 * currently detected bandwidth, accounting for some amount of
3702 * bandwidth variance
3703 */
3704var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight) {
3705 // convert the playlists to an intermediary representation to make comparisons easier
3706 var sortedPlaylistReps = master.playlists.map(function (playlist) {
3707 var width = undefined;
3708 var height = undefined;
3709 var bandwidth = undefined;
3710
3711 if (playlist.attributes) {
3712 width = playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
3713 height = playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
3714 bandwidth = playlist.attributes.BANDWIDTH;
3715 }
3716
3717 bandwidth = bandwidth || window.Number.MAX_VALUE;
3718
3719 return {
3720 bandwidth: bandwidth,
3721 width: width,
3722 height: height,
3723 playlist: playlist
3724 };
3725 });
3726
3727 stableSort(sortedPlaylistReps, function (left, right) {
3728 return left.bandwidth - right.bandwidth;
3729 });
3730
3731 // filter out any playlists that have been excluded due to
3732 // incompatible configurations or playback errors
3733 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
3734 return _playlist2['default'].isEnabled(rep.playlist);
3735 });
3736
3737 // filter out any variant that has greater effective bitrate
3738 // than the current estimated bandwidth
3739 var bandwidthPlaylistReps = sortedPlaylistReps.filter(function (rep) {
3740 return rep.bandwidth * _config2['default'].BANDWIDTH_VARIANCE < playerBandwidth;
3741 });
3742
3743 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1];
3744
3745 // get all of the renditions with the same (highest) bandwidth
3746 // and then taking the very first element
3747 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
3748 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
3749 })[0];
3750
3751 // filter out playlists without resolution information
3752 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
3753 return rep.width && rep.height;
3754 });
3755
3756 // sort variants by resolution
3757 stableSort(haveResolution, function (left, right) {
3758 return left.width - right.width;
3759 });
3760
3761 // if we have the exact resolution as the player use it
3762 var resolutionBestRepList = haveResolution.filter(function (rep) {
3763 return rep.width === playerWidth && rep.height === playerHeight;
3764 });
3765
3766 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1];
3767 // ensure that we pick the highest bandwidth variant that have exact resolution
3768 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
3769 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
3770 })[0];
3771
3772 var resolutionPlusOneList = undefined;
3773 var resolutionPlusOneSmallest = undefined;
3774 var resolutionPlusOneRep = undefined;
3775
3776 // find the smallest variant that is larger than the player
3777 // if there is no match of exact resolution
3778 if (!resolutionBestRep) {
3779 resolutionPlusOneList = haveResolution.filter(function (rep) {
3780 return rep.width > playerWidth || rep.height > playerHeight;
3781 });
3782
3783 // find all the variants have the same smallest resolution
3784 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
3785 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
3786 });
3787
3788 // ensure that we also pick the highest bandwidth variant that
3789 // is just-larger-than the video player
3790 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
3791 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
3792 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
3793 })[0];
3794 }
3795
3796 // fallback chain of variants
3797 return (resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || sortedPlaylistReps[0]).playlist;
3798};
3799
3800// Playlist Selectors
3801
3802/**
3803 * Chooses the appropriate media playlist based on the most recent
3804 * bandwidth estimate and the player size.
3805 *
3806 * Expects to be called within the context of an instance of HlsHandler
3807 *
3808 * @return {Playlist} the highest bitrate playlist less than the
3809 * currently detected bandwidth, accounting for some amount of
3810 * bandwidth variance
3811 */
3812var lastBandwidthSelector = function lastBandwidthSelector() {
3813 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10), parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10));
3814};
3815
3816exports.lastBandwidthSelector = lastBandwidthSelector;
3817/**
3818 * Chooses the appropriate media playlist based on an
3819 * exponential-weighted moving average of the bandwidth after
3820 * filtering for player size.
3821 *
3822 * Expects to be called within the context of an instance of HlsHandler
3823 *
3824 * @param {Number} decay - a number between 0 and 1. Higher values of
3825 * this parameter will cause previous bandwidth estimates to lose
3826 * significance more quickly.
3827 * @return {Function} a function which can be invoked to create a new
3828 * playlist selector function.
3829 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
3830 */
3831var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
3832 var average = -1;
3833
3834 if (decay < 0 || decay > 1) {
3835 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
3836 }
3837
3838 return function () {
3839 if (average < 0) {
3840 average = this.systemBandwidth;
3841 }
3842
3843 average = decay * this.systemBandwidth + (1 - decay) * average;
3844 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10), parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10));
3845 };
3846};
3847
3848exports.movingAverageBandwidthSelector = movingAverageBandwidthSelector;
3849/**
3850 * Chooses the appropriate media playlist based on the potential to rebuffer
3851 *
3852 * @param {Object} settings
3853 * Object of information required to use this selector
3854 * @param {Object} settings.master
3855 * Object representation of the master manifest
3856 * @param {Number} settings.currentTime
3857 * The current time of the player
3858 * @param {Number} settings.bandwidth
3859 * Current measured bandwidth
3860 * @param {Number} settings.duration
3861 * Duration of the media
3862 * @param {Number} settings.segmentDuration
3863 * Segment duration to be used in round trip time calculations
3864 * @param {Number} settings.timeUntilRebuffer
3865 * Time left in seconds until the player has to rebuffer
3866 * @param {Number} settings.currentTimeline
3867 * The current timeline segments are being loaded from
3868 * @param {SyncController} settings.syncController
3869 * SyncController for determining if we have a sync point for a given playlist
3870 * @return {Object|null}
3871 * {Object} return.playlist
3872 * The highest bandwidth playlist with the least amount of rebuffering
3873 * {Number} return.rebufferingImpact
3874 * The amount of time in seconds switching to this playlist will rebuffer. A
3875 * negative value means that switching will cause zero rebuffering.
3876 */
3877var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
3878 var master = settings.master;
3879 var currentTime = settings.currentTime;
3880 var bandwidth = settings.bandwidth;
3881 var duration = settings.duration;
3882 var segmentDuration = settings.segmentDuration;
3883 var timeUntilRebuffer = settings.timeUntilRebuffer;
3884 var currentTimeline = settings.currentTimeline;
3885 var syncController = settings.syncController;
3886
3887 var bandwidthPlaylists = master.playlists.filter(_playlist2['default'].hasAttribute.bind(null, 'BANDWIDTH'));
3888
3889 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
3890 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime);
3891 // If there is no sync point for this playlist, switching to it will require a
3892 // sync request first. This will double the request time
3893 var numRequests = syncPoint ? 1 : 2;
3894 var requestTimeEstimate = _playlist2['default'].estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
3895 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
3896
3897 return {
3898 playlist: playlist,
3899 rebufferingImpact: rebufferingImpact
3900 };
3901 });
3902
3903 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
3904 return estimate.rebufferingImpact <= 0;
3905 });
3906
3907 // Sort by bandwidth DESC
3908 stableSort(noRebufferingPlaylists, function (a, b) {
3909 return comparePlaylistBandwidth(b.playlist, a.playlist);
3910 });
3911
3912 if (noRebufferingPlaylists.length) {
3913 return noRebufferingPlaylists[0];
3914 }
3915
3916 stableSort(rebufferingEstimates, function (a, b) {
3917 return a.rebufferingImpact - b.rebufferingImpact;
3918 });
3919
3920 return rebufferingEstimates[0] || null;
3921};
3922exports.minRebufferMaxBandwidthSelector = minRebufferMaxBandwidthSelector;
3923},{"./config":3,"./playlist":10}],10:[function(require,module,exports){
3924(function (global){
3925/**
3926 * @file playlist.js
3927 *
3928 * Playlist related utilities.
3929 */
3930'use strict';
3931
3932Object.defineProperty(exports, '__esModule', {
3933 value: true
3934});
3935
3936function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
3937
3938var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
3939
3940var _globalWindow = require('global/window');
3941
3942var _globalWindow2 = _interopRequireDefault(_globalWindow);
3943
3944var Playlist = {
3945 /**
3946 * The number of segments that are unsafe to start playback at in
3947 * a live stream. Changing this value can cause playback stalls.
3948 * See HTTP Live Streaming, "Playing the Media Playlist File"
3949 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-18#section-6.3.3
3950 */
3951 UNSAFE_LIVE_SEGMENTS: 3
3952};
3953
3954/**
3955 * walk backward until we find a duration we can use
3956 * or return a failure
3957 *
3958 * @param {Playlist} playlist the playlist to walk through
3959 * @param {Number} endSequence the mediaSequence to stop walking on
3960 */
3961
3962var backwardDuration = function backwardDuration(playlist, endSequence) {
3963 var result = 0;
3964 var i = endSequence - playlist.mediaSequence;
3965 // if a start time is available for segment immediately following
3966 // the interval, use it
3967 var segment = playlist.segments[i];
3968
3969 // Walk backward until we find the latest segment with timeline
3970 // information that is earlier than endSequence
3971 if (segment) {
3972 if (typeof segment.start !== 'undefined') {
3973 return { result: segment.start, precise: true };
3974 }
3975 if (typeof segment.end !== 'undefined') {
3976 return {
3977 result: segment.end - segment.duration,
3978 precise: true
3979 };
3980 }
3981 }
3982 while (i--) {
3983 segment = playlist.segments[i];
3984 if (typeof segment.end !== 'undefined') {
3985 return { result: result + segment.end, precise: true };
3986 }
3987
3988 result += segment.duration;
3989
3990 if (typeof segment.start !== 'undefined') {
3991 return { result: result + segment.start, precise: true };
3992 }
3993 }
3994 return { result: result, precise: false };
3995};
3996
3997/**
3998 * walk forward until we find a duration we can use
3999 * or return a failure
4000 *
4001 * @param {Playlist} playlist the playlist to walk through
4002 * @param {Number} endSequence the mediaSequence to stop walking on
4003 */
4004var forwardDuration = function forwardDuration(playlist, endSequence) {
4005 var result = 0;
4006 var segment = undefined;
4007 var i = endSequence - playlist.mediaSequence;
4008 // Walk forward until we find the earliest segment with timeline
4009 // information
4010
4011 for (; i < playlist.segments.length; i++) {
4012 segment = playlist.segments[i];
4013 if (typeof segment.start !== 'undefined') {
4014 return {
4015 result: segment.start - result,
4016 precise: true
4017 };
4018 }
4019
4020 result += segment.duration;
4021
4022 if (typeof segment.end !== 'undefined') {
4023 return {
4024 result: segment.end - result,
4025 precise: true
4026 };
4027 }
4028 }
4029 // indicate we didn't find a useful duration estimate
4030 return { result: -1, precise: false };
4031};
4032
4033/**
4034 * Calculate the media duration from the segments associated with a
4035 * playlist. The duration of a subinterval of the available segments
4036 * may be calculated by specifying an end index.
4037 *
4038 * @param {Object} playlist a media playlist object
4039 * @param {Number=} endSequence an exclusive upper boundary
4040 * for the playlist. Defaults to playlist length.
4041 * @param {Number} expired the amount of time that has dropped
4042 * off the front of the playlist in a live scenario
4043 * @return {Number} the duration between the first available segment
4044 * and end index.
4045 */
4046var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
4047 var backward = undefined;
4048 var forward = undefined;
4049
4050 if (typeof endSequence === 'undefined') {
4051 endSequence = playlist.mediaSequence + playlist.segments.length;
4052 }
4053
4054 if (endSequence < playlist.mediaSequence) {
4055 return 0;
4056 }
4057
4058 // do a backward walk to estimate the duration
4059 backward = backwardDuration(playlist, endSequence);
4060 if (backward.precise) {
4061 // if we were able to base our duration estimate on timing
4062 // information provided directly from the Media Source, return
4063 // it
4064 return backward.result;
4065 }
4066
4067 // walk forward to see if a precise duration estimate can be made
4068 // that way
4069 forward = forwardDuration(playlist, endSequence);
4070 if (forward.precise) {
4071 // we found a segment that has been buffered and so it's
4072 // position is known precisely
4073 return forward.result;
4074 }
4075
4076 // return the less-precise, playlist-based duration estimate
4077 return backward.result + expired;
4078};
4079
4080/**
4081 * Calculates the duration of a playlist. If a start and end index
4082 * are specified, the duration will be for the subset of the media
4083 * timeline between those two indices. The total duration for live
4084 * playlists is always Infinity.
4085 *
4086 * @param {Object} playlist a media playlist object
4087 * @param {Number=} endSequence an exclusive upper
4088 * boundary for the playlist. Defaults to the playlist media
4089 * sequence number plus its length.
4090 * @param {Number=} expired the amount of time that has
4091 * dropped off the front of the playlist in a live scenario
4092 * @return {Number} the duration between the start index and end
4093 * index.
4094 */
4095var duration = function duration(playlist, endSequence, expired) {
4096 if (!playlist) {
4097 return 0;
4098 }
4099
4100 if (typeof expired !== 'number') {
4101 expired = 0;
4102 }
4103
4104 // if a slice of the total duration is not requested, use
4105 // playlist-level duration indicators when they're present
4106 if (typeof endSequence === 'undefined') {
4107 // if present, use the duration specified in the playlist
4108 if (playlist.totalDuration) {
4109 return playlist.totalDuration;
4110 }
4111
4112 // duration should be Infinity for live playlists
4113 if (!playlist.endList) {
4114 return _globalWindow2['default'].Infinity;
4115 }
4116 }
4117
4118 // calculate the total duration based on the segment durations
4119 return intervalDuration(playlist, endSequence, expired);
4120};
4121
4122exports.duration = duration;
4123/**
4124 * Calculate the time between two indexes in the current playlist
4125 * neight the start- nor the end-index need to be within the current
4126 * playlist in which case, the targetDuration of the playlist is used
4127 * to approximate the durations of the segments
4128 *
4129 * @param {Object} playlist a media playlist object
4130 * @param {Number} startIndex
4131 * @param {Number} endIndex
4132 * @return {Number} the number of seconds between startIndex and endIndex
4133 */
4134var sumDurations = function sumDurations(playlist, startIndex, endIndex) {
4135 var durations = 0;
4136
4137 if (startIndex > endIndex) {
4138 var _ref = [endIndex, startIndex];
4139 startIndex = _ref[0];
4140 endIndex = _ref[1];
4141 }
4142
4143 if (startIndex < 0) {
4144 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
4145 durations += playlist.targetDuration;
4146 }
4147 startIndex = 0;
4148 }
4149
4150 for (var i = startIndex; i < endIndex; i++) {
4151 durations += playlist.segments[i].duration;
4152 }
4153
4154 return durations;
4155};
4156
4157exports.sumDurations = sumDurations;
4158/**
4159 * Calculates the playlist end time
4160 *
4161 * @param {Object} playlist a media playlist object
4162 * @param {Number=} expired the amount of time that has
4163 * dropped off the front of the playlist in a live scenario
4164 * @param {Boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
4165 * playlist end calculation should consider the safe live end
4166 * (truncate the playlist end by three segments). This is normally
4167 * used for calculating the end of the playlist's seekable range.
4168 * @returns {Number} the end time of playlist
4169 * @function playlistEnd
4170 */
4171var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd) {
4172 if (!playlist || !playlist.segments) {
4173 return null;
4174 }
4175 if (playlist.endList) {
4176 return duration(playlist);
4177 }
4178
4179 if (expired === null) {
4180 return null;
4181 }
4182
4183 expired = expired || 0;
4184
4185 var endSequence = useSafeLiveEnd ? Math.max(0, playlist.segments.length - Playlist.UNSAFE_LIVE_SEGMENTS) : Math.max(0, playlist.segments.length);
4186
4187 return intervalDuration(playlist, playlist.mediaSequence + endSequence, expired);
4188};
4189
4190exports.playlistEnd = playlistEnd;
4191/**
4192 * Calculates the interval of time that is currently seekable in a
4193 * playlist. The returned time ranges are relative to the earliest
4194 * moment in the specified playlist that is still available. A full
4195 * seekable implementation for live streams would need to offset
4196 * these values by the duration of content that has expired from the
4197 * stream.
4198 *
4199 * @param {Object} playlist a media playlist object
4200 * dropped off the front of the playlist in a live scenario
4201 * @param {Number=} expired the amount of time that has
4202 * dropped off the front of the playlist in a live scenario
4203 * @return {TimeRanges} the periods of time that are valid targets
4204 * for seeking
4205 */
4206var seekable = function seekable(playlist, expired) {
4207 var useSafeLiveEnd = true;
4208 var seekableStart = expired || 0;
4209 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd);
4210
4211 if (seekableEnd === null) {
4212 return (0, _videoJs.createTimeRange)();
4213 }
4214 return (0, _videoJs.createTimeRange)(seekableStart, seekableEnd);
4215};
4216
4217exports.seekable = seekable;
4218var isWholeNumber = function isWholeNumber(num) {
4219 return num - Math.floor(num) === 0;
4220};
4221
4222var roundSignificantDigit = function roundSignificantDigit(increment, num) {
4223 // If we have a whole number, just add 1 to it
4224 if (isWholeNumber(num)) {
4225 return num + increment * 0.1;
4226 }
4227
4228 var numDecimalDigits = num.toString().split('.')[1].length;
4229
4230 for (var i = 1; i <= numDecimalDigits; i++) {
4231 var scale = Math.pow(10, i);
4232 var temp = num * scale;
4233
4234 if (isWholeNumber(temp) || i === numDecimalDigits) {
4235 return (temp + increment) / scale;
4236 }
4237 }
4238};
4239
4240var ceilLeastSignificantDigit = roundSignificantDigit.bind(null, 1);
4241var floorLeastSignificantDigit = roundSignificantDigit.bind(null, -1);
4242
4243/**
4244 * Determine the index and estimated starting time of the segment that
4245 * contains a specified playback position in a media playlist.
4246 *
4247 * @param {Object} playlist the media playlist to query
4248 * @param {Number} currentTime The number of seconds since the earliest
4249 * possible position to determine the containing segment for
4250 * @param {Number} startIndex
4251 * @param {Number} startTime
4252 * @return {Object}
4253 */
4254var getMediaInfoForTime = function getMediaInfoForTime(playlist, currentTime, startIndex, startTime) {
4255 var i = undefined;
4256 var segment = undefined;
4257 var numSegments = playlist.segments.length;
4258
4259 var time = currentTime - startTime;
4260
4261 if (time < 0) {
4262 // Walk backward from startIndex in the playlist, adding durations
4263 // until we find a segment that contains `time` and return it
4264 if (startIndex > 0) {
4265 for (i = startIndex - 1; i >= 0; i--) {
4266 segment = playlist.segments[i];
4267 time += floorLeastSignificantDigit(segment.duration);
4268 if (time > 0) {
4269 return {
4270 mediaIndex: i,
4271 startTime: startTime - sumDurations(playlist, startIndex, i)
4272 };
4273 }
4274 }
4275 }
4276 // We were unable to find a good segment within the playlist
4277 // so select the first segment
4278 return {
4279 mediaIndex: 0,
4280 startTime: currentTime
4281 };
4282 }
4283
4284 // When startIndex is negative, we first walk forward to first segment
4285 // adding target durations. If we "run out of time" before getting to
4286 // the first segment, return the first segment
4287 if (startIndex < 0) {
4288 for (i = startIndex; i < 0; i++) {
4289 time -= playlist.targetDuration;
4290 if (time < 0) {
4291 return {
4292 mediaIndex: 0,
4293 startTime: currentTime
4294 };
4295 }
4296 }
4297 startIndex = 0;
4298 }
4299
4300 // Walk forward from startIndex in the playlist, subtracting durations
4301 // until we find a segment that contains `time` and return it
4302 for (i = startIndex; i < numSegments; i++) {
4303 segment = playlist.segments[i];
4304 time -= ceilLeastSignificantDigit(segment.duration);
4305 if (time < 0) {
4306 return {
4307 mediaIndex: i,
4308 startTime: startTime + sumDurations(playlist, startIndex, i)
4309 };
4310 }
4311 }
4312
4313 // We are out of possible candidates so load the last one...
4314 return {
4315 mediaIndex: numSegments - 1,
4316 startTime: currentTime
4317 };
4318};
4319
4320exports.getMediaInfoForTime = getMediaInfoForTime;
4321/**
4322 * Check whether the playlist is blacklisted or not.
4323 *
4324 * @param {Object} playlist the media playlist object
4325 * @return {boolean} whether the playlist is blacklisted or not
4326 * @function isBlacklisted
4327 */
4328var isBlacklisted = function isBlacklisted(playlist) {
4329 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
4330};
4331
4332exports.isBlacklisted = isBlacklisted;
4333/**
4334 * Check whether the playlist is enabled or not.
4335 *
4336 * @param {Object} playlist the media playlist object
4337 * @return {boolean} whether the playlist is enabled or not
4338 * @function isEnabled
4339 */
4340var isEnabled = function isEnabled(playlist) {
4341 var blacklisted = isBlacklisted(playlist);
4342
4343 return !playlist.disabled && !blacklisted;
4344};
4345
4346exports.isEnabled = isEnabled;
4347/**
4348 * Returns whether the current playlist is an AES encrypted HLS stream
4349 *
4350 * @return {Boolean} true if it's an AES encrypted HLS stream
4351 */
4352var isAes = function isAes(media) {
4353 for (var i = 0; i < media.segments.length; i++) {
4354 if (media.segments[i].key) {
4355 return true;
4356 }
4357 }
4358 return false;
4359};
4360
4361exports.isAes = isAes;
4362/**
4363 * Returns whether the current playlist contains fMP4
4364 *
4365 * @return {Boolean} true if the playlist contains fMP4
4366 */
4367var isFmp4 = function isFmp4(media) {
4368 for (var i = 0; i < media.segments.length; i++) {
4369 if (media.segments[i].map) {
4370 return true;
4371 }
4372 }
4373 return false;
4374};
4375
4376exports.isFmp4 = isFmp4;
4377/**
4378 * Checks if the playlist has a value for the specified attribute
4379 *
4380 * @param {String} attr
4381 * Attribute to check for
4382 * @param {Object} playlist
4383 * The media playlist object
4384 * @return {Boolean}
4385 * Whether the playlist contains a value for the attribute or not
4386 * @function hasAttribute
4387 */
4388var hasAttribute = function hasAttribute(attr, playlist) {
4389 return playlist.attributes && playlist.attributes[attr];
4390};
4391
4392exports.hasAttribute = hasAttribute;
4393/**
4394 * Estimates the time required to complete a segment download from the specified playlist
4395 *
4396 * @param {Number} segmentDuration
4397 * Duration of requested segment
4398 * @param {Number} bandwidth
4399 * Current measured bandwidth of the player
4400 * @param {Object} playlist
4401 * The media playlist object
4402 * @param {Number=} bytesReceived
4403 * Number of bytes already received for the request. Defaults to 0
4404 * @return {Number|NaN}
4405 * The estimated time to request the segment. NaN if bandwidth information for
4406 * the given playlist is unavailable
4407 * @function estimateSegmentRequestTime
4408 */
4409var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist) {
4410 var bytesReceived = arguments.length <= 3 || arguments[3] === undefined ? 0 : arguments[3];
4411
4412 if (!hasAttribute('BANDWIDTH', playlist)) {
4413 return NaN;
4414 }
4415
4416 var size = segmentDuration * playlist.attributes.BANDWIDTH;
4417
4418 return (size - bytesReceived * 8) / bandwidth;
4419};
4420
4421exports.estimateSegmentRequestTime = estimateSegmentRequestTime;
4422Playlist.duration = duration;
4423Playlist.seekable = seekable;
4424Playlist.getMediaInfoForTime = getMediaInfoForTime;
4425Playlist.isEnabled = isEnabled;
4426Playlist.isBlacklisted = isBlacklisted;
4427Playlist.playlistEnd = playlistEnd;
4428Playlist.isAes = isAes;
4429Playlist.isFmp4 = isFmp4;
4430Playlist.hasAttribute = hasAttribute;
4431Playlist.estimateSegmentRequestTime = estimateSegmentRequestTime;
4432
4433// exports
4434exports['default'] = Playlist;
4435}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
4436},{"global/window":30}],11:[function(require,module,exports){
4437(function (global){
4438/**
4439 * ranges
4440 *
4441 * Utilities for working with TimeRanges.
4442 *
4443 */
4444
4445'use strict';
4446
4447Object.defineProperty(exports, '__esModule', {
4448 value: true
4449});
4450
4451var _slicedToArray = (function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i['return']) _i['return'](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError('Invalid attempt to destructure non-iterable instance'); } }; })();
4452
4453function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
4454
4455var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
4456
4457var _videoJs2 = _interopRequireDefault(_videoJs);
4458
4459// Fudge factor to account for TimeRanges rounding
4460var TIME_FUDGE_FACTOR = 1 / 30;
4461
4462/**
4463 * Clamps a value to within a range
4464 * @param {Number} num - the value to clamp
4465 * @param {Number} start - the start of the range to clamp within, inclusive
4466 * @param {Number} end - the end of the range to clamp within, inclusive
4467 * @return {Number}
4468 */
4469var clamp = function clamp(num, _ref) {
4470 var _ref2 = _slicedToArray(_ref, 2);
4471
4472 var start = _ref2[0];
4473 var end = _ref2[1];
4474
4475 return Math.min(Math.max(start, num), end);
4476};
4477var filterRanges = function filterRanges(timeRanges, predicate) {
4478 var results = [];
4479 var i = undefined;
4480
4481 if (timeRanges && timeRanges.length) {
4482 // Search for ranges that match the predicate
4483 for (i = 0; i < timeRanges.length; i++) {
4484 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
4485 results.push([timeRanges.start(i), timeRanges.end(i)]);
4486 }
4487 }
4488 }
4489
4490 return _videoJs2['default'].createTimeRanges(results);
4491};
4492
4493/**
4494 * Attempts to find the buffered TimeRange that contains the specified
4495 * time.
4496 * @param {TimeRanges} buffered - the TimeRanges object to query
4497 * @param {number} time - the time to filter on.
4498 * @returns {TimeRanges} a new TimeRanges object
4499 */
4500var findRange = function findRange(buffered, time) {
4501 return filterRanges(buffered, function (start, end) {
4502 return start - TIME_FUDGE_FACTOR <= time && end + TIME_FUDGE_FACTOR >= time;
4503 });
4504};
4505
4506/**
4507 * Returns the TimeRanges that begin later than the specified time.
4508 * @param {TimeRanges} timeRanges - the TimeRanges object to query
4509 * @param {number} time - the time to filter on.
4510 * @returns {TimeRanges} a new TimeRanges object.
4511 */
4512var findNextRange = function findNextRange(timeRanges, time) {
4513 return filterRanges(timeRanges, function (start) {
4514 return start - TIME_FUDGE_FACTOR >= time;
4515 });
4516};
4517
4518/**
4519 * Returns gaps within a list of TimeRanges
4520 * @param {TimeRanges} buffered - the TimeRanges object
4521 * @return {TimeRanges} a TimeRanges object of gaps
4522 */
4523var findGaps = function findGaps(buffered) {
4524 if (buffered.length < 2) {
4525 return _videoJs2['default'].createTimeRanges();
4526 }
4527
4528 var ranges = [];
4529
4530 for (var i = 1; i < buffered.length; i++) {
4531 var start = buffered.end(i - 1);
4532 var end = buffered.start(i);
4533
4534 ranges.push([start, end]);
4535 }
4536
4537 return _videoJs2['default'].createTimeRanges(ranges);
4538};
4539
4540/**
4541 * Search for a likely end time for the segment that was just appened
4542 * based on the state of the `buffered` property before and after the
4543 * append. If we fin only one such uncommon end-point return it.
4544 * @param {TimeRanges} original - the buffered time ranges before the update
4545 * @param {TimeRanges} update - the buffered time ranges after the update
4546 * @returns {Number|null} the end time added between `original` and `update`,
4547 * or null if one cannot be unambiguously determined.
4548 */
4549var findSoleUncommonTimeRangesEnd = function findSoleUncommonTimeRangesEnd(original, update) {
4550 var i = undefined;
4551 var start = undefined;
4552 var end = undefined;
4553 var result = [];
4554 var edges = [];
4555
4556 // In order to qualify as a possible candidate, the end point must:
4557 // 1) Not have already existed in the `original` ranges
4558 // 2) Not result from the shrinking of a range that already existed
4559 // in the `original` ranges
4560 // 3) Not be contained inside of a range that existed in `original`
4561 var overlapsCurrentEnd = function overlapsCurrentEnd(span) {
4562 return span[0] <= end && span[1] >= end;
4563 };
4564
4565 if (original) {
4566 // Save all the edges in the `original` TimeRanges object
4567 for (i = 0; i < original.length; i++) {
4568 start = original.start(i);
4569 end = original.end(i);
4570
4571 edges.push([start, end]);
4572 }
4573 }
4574
4575 if (update) {
4576 // Save any end-points in `update` that are not in the `original`
4577 // TimeRanges object
4578 for (i = 0; i < update.length; i++) {
4579 start = update.start(i);
4580 end = update.end(i);
4581
4582 if (edges.some(overlapsCurrentEnd)) {
4583 continue;
4584 }
4585
4586 // at this point it must be a unique non-shrinking end edge
4587 result.push(end);
4588 }
4589 }
4590
4591 // we err on the side of caution and return null if didn't find
4592 // exactly *one* differing end edge in the search above
4593 if (result.length !== 1) {
4594 return null;
4595 }
4596
4597 return result[0];
4598};
4599
4600/**
4601 * Calculate the intersection of two TimeRanges
4602 * @param {TimeRanges} bufferA
4603 * @param {TimeRanges} bufferB
4604 * @returns {TimeRanges} The interesection of `bufferA` with `bufferB`
4605 */
4606var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
4607 var start = null;
4608 var end = null;
4609 var arity = 0;
4610 var extents = [];
4611 var ranges = [];
4612
4613 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
4614 return _videoJs2['default'].createTimeRange();
4615 }
4616
4617 // Handle the case where we have both buffers and create an
4618 // intersection of the two
4619 var count = bufferA.length;
4620
4621 // A) Gather up all start and end times
4622 while (count--) {
4623 extents.push({ time: bufferA.start(count), type: 'start' });
4624 extents.push({ time: bufferA.end(count), type: 'end' });
4625 }
4626 count = bufferB.length;
4627 while (count--) {
4628 extents.push({ time: bufferB.start(count), type: 'start' });
4629 extents.push({ time: bufferB.end(count), type: 'end' });
4630 }
4631 // B) Sort them by time
4632 extents.sort(function (a, b) {
4633 return a.time - b.time;
4634 });
4635
4636 // C) Go along one by one incrementing arity for start and decrementing
4637 // arity for ends
4638 for (count = 0; count < extents.length; count++) {
4639 if (extents[count].type === 'start') {
4640 arity++;
4641
4642 // D) If arity is ever incremented to 2 we are entering an
4643 // overlapping range
4644 if (arity === 2) {
4645 start = extents[count].time;
4646 }
4647 } else if (extents[count].type === 'end') {
4648 arity--;
4649
4650 // E) If arity is ever decremented to 1 we leaving an
4651 // overlapping range
4652 if (arity === 1) {
4653 end = extents[count].time;
4654 }
4655 }
4656
4657 // F) Record overlapping ranges
4658 if (start !== null && end !== null) {
4659 ranges.push([start, end]);
4660 start = null;
4661 end = null;
4662 }
4663 }
4664
4665 return _videoJs2['default'].createTimeRanges(ranges);
4666};
4667
4668/**
4669 * Calculates the percentage of `segmentRange` that overlaps the
4670 * `buffered` time ranges.
4671 * @param {TimeRanges} segmentRange - the time range that the segment
4672 * covers adjusted according to currentTime
4673 * @param {TimeRanges} referenceRange - the original time range that the
4674 * segment covers
4675 * @param {Number} currentTime - time in seconds where the current playback
4676 * is at
4677 * @param {TimeRanges} buffered - the currently buffered time ranges
4678 * @returns {Number} percent of the segment currently buffered
4679 */
4680var calculateBufferedPercent = function calculateBufferedPercent(adjustedRange, referenceRange, currentTime, buffered) {
4681 var referenceDuration = referenceRange.end(0) - referenceRange.start(0);
4682 var adjustedDuration = adjustedRange.end(0) - adjustedRange.start(0);
4683 var bufferMissingFromAdjusted = referenceDuration - adjustedDuration;
4684 var adjustedIntersection = bufferIntersection(adjustedRange, buffered);
4685 var referenceIntersection = bufferIntersection(referenceRange, buffered);
4686 var adjustedOverlap = 0;
4687 var referenceOverlap = 0;
4688
4689 var count = adjustedIntersection.length;
4690
4691 while (count--) {
4692 adjustedOverlap += adjustedIntersection.end(count) - adjustedIntersection.start(count);
4693
4694 // If the current overlap segment starts at currentTime, then increase the
4695 // overlap duration so that it actually starts at the beginning of referenceRange
4696 // by including the difference between the two Range's durations
4697 // This is a work around for the way Flash has no buffer before currentTime
4698 if (adjustedIntersection.start(count) === currentTime) {
4699 adjustedOverlap += bufferMissingFromAdjusted;
4700 }
4701 }
4702
4703 count = referenceIntersection.length;
4704
4705 while (count--) {
4706 referenceOverlap += referenceIntersection.end(count) - referenceIntersection.start(count);
4707 }
4708
4709 // Use whichever value is larger for the percentage-buffered since that value
4710 // is likely more accurate because the only way
4711 return Math.max(adjustedOverlap, referenceOverlap) / referenceDuration * 100;
4712};
4713
4714/**
4715 * Return the amount of a range specified by the startOfSegment and segmentDuration
4716 * overlaps the current buffered content.
4717 *
4718 * @param {Number} startOfSegment - the time where the segment begins
4719 * @param {Number} segmentDuration - the duration of the segment in seconds
4720 * @param {Number} currentTime - time in seconds where the current playback
4721 * is at
4722 * @param {TimeRanges} buffered - the state of the buffer
4723 * @returns {Number} percentage of the segment's time range that is
4724 * already in `buffered`
4725 */
4726var getSegmentBufferedPercent = function getSegmentBufferedPercent(startOfSegment, segmentDuration, currentTime, buffered) {
4727 var endOfSegment = startOfSegment + segmentDuration;
4728
4729 // The entire time range of the segment
4730 var originalSegmentRange = _videoJs2['default'].createTimeRanges([[startOfSegment, endOfSegment]]);
4731
4732 // The adjusted segment time range that is setup such that it starts
4733 // no earlier than currentTime
4734 // Flash has no notion of a back-buffer so adjustedSegmentRange adjusts
4735 // for that and the function will still return 100% if a only half of a
4736 // segment is actually in the buffer as long as the currentTime is also
4737 // half-way through the segment
4738 var adjustedSegmentRange = _videoJs2['default'].createTimeRanges([[clamp(startOfSegment, [currentTime, endOfSegment]), endOfSegment]]);
4739
4740 // This condition happens when the currentTime is beyond the segment's
4741 // end time
4742 if (adjustedSegmentRange.start(0) === adjustedSegmentRange.end(0)) {
4743 return 0;
4744 }
4745
4746 var percent = calculateBufferedPercent(adjustedSegmentRange, originalSegmentRange, currentTime, buffered);
4747
4748 // If the segment is reported as having a zero duration, return 0%
4749 // since it is likely that we will need to fetch the segment
4750 if (isNaN(percent) || percent === Infinity || percent === -Infinity) {
4751 return 0;
4752 }
4753
4754 return percent;
4755};
4756
4757/**
4758 * Gets a human readable string for a TimeRange
4759 *
4760 * @param {TimeRange} range
4761 * @returns {String} a human readable string
4762 */
4763var printableRange = function printableRange(range) {
4764 var strArr = [];
4765
4766 if (!range || !range.length) {
4767 return '';
4768 }
4769
4770 for (var i = 0; i < range.length; i++) {
4771 strArr.push(range.start(i) + ' => ' + range.end(i));
4772 }
4773
4774 return strArr.join(', ');
4775};
4776
4777/**
4778 * Calculates the amount of time left in seconds until the player hits the end of the
4779 * buffer and causes a rebuffer
4780 *
4781 * @param {TimeRange} buffered
4782 * The state of the buffer
4783 * @param {Numnber} currentTime
4784 * The current time of the player
4785 * @param {Number} playbackRate
4786 * The current playback rate of the player. Defaults to 1.
4787 * @return {Number}
4788 * Time until the player has to start rebuffering in seconds.
4789 * @function timeUntilRebuffer
4790 */
4791var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime) {
4792 var playbackRate = arguments.length <= 2 || arguments[2] === undefined ? 1 : arguments[2];
4793
4794 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
4795
4796 return (bufferedEnd - currentTime) / playbackRate;
4797};
4798
4799exports['default'] = {
4800 findRange: findRange,
4801 findNextRange: findNextRange,
4802 findGaps: findGaps,
4803 findSoleUncommonTimeRangesEnd: findSoleUncommonTimeRangesEnd,
4804 getSegmentBufferedPercent: getSegmentBufferedPercent,
4805 TIME_FUDGE_FACTOR: TIME_FUDGE_FACTOR,
4806 printableRange: printableRange,
4807 timeUntilRebuffer: timeUntilRebuffer
4808};
4809module.exports = exports['default'];
4810}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
4811},{}],12:[function(require,module,exports){
4812(function (global){
4813'use strict';
4814
4815Object.defineProperty(exports, '__esModule', {
4816 value: true
4817});
4818
4819function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
4820
4821var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
4822
4823var _videoJs2 = _interopRequireDefault(_videoJs);
4824
4825var defaultOptions = {
4826 errorInterval: 30,
4827 getSource: function getSource(next) {
4828 var tech = this.tech({ IWillNotUseThisInPlugins: true });
4829 var sourceObj = tech.currentSource_;
4830
4831 return next(sourceObj);
4832 }
4833};
4834
4835/**
4836 * Main entry point for the plugin
4837 *
4838 * @param {Player} player a reference to a videojs Player instance
4839 * @param {Object} [options] an object with plugin options
4840 * @private
4841 */
4842var initPlugin = function initPlugin(player, options) {
4843 var lastCalled = 0;
4844 var seekTo = 0;
4845 var localOptions = _videoJs2['default'].mergeOptions(defaultOptions, options);
4846
4847 player.ready(function () {
4848 player.trigger({ type: 'usage', name: 'hls-error-reload-initialized' });
4849 });
4850
4851 /**
4852 * Player modifications to perform that must wait until `loadedmetadata`
4853 * has been triggered
4854 *
4855 * @private
4856 */
4857 var loadedMetadataHandler = function loadedMetadataHandler() {
4858 if (seekTo) {
4859 player.currentTime(seekTo);
4860 }
4861 };
4862
4863 /**
4864 * Set the source on the player element, play, and seek if necessary
4865 *
4866 * @param {Object} sourceObj An object specifying the source url and mime-type to play
4867 * @private
4868 */
4869 var setSource = function setSource(sourceObj) {
4870 if (sourceObj === null || sourceObj === undefined) {
4871 return;
4872 }
4873 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
4874
4875 player.one('loadedmetadata', loadedMetadataHandler);
4876
4877 player.src(sourceObj);
4878 player.trigger({ type: 'usage', name: 'hls-error-reload' });
4879 player.play();
4880 };
4881
4882 /**
4883 * Attempt to get a source from either the built-in getSource function
4884 * or a custom function provided via the options
4885 *
4886 * @private
4887 */
4888 var errorHandler = function errorHandler() {
4889 // Do not attempt to reload the source if a source-reload occurred before
4890 // 'errorInterval' time has elapsed since the last source-reload
4891 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
4892 player.trigger({ type: 'usage', name: 'hls-error-reload-canceled' });
4893 return;
4894 }
4895
4896 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
4897 _videoJs2['default'].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
4898 return;
4899 }
4900 lastCalled = Date.now();
4901
4902 return localOptions.getSource.call(player, setSource);
4903 };
4904
4905 /**
4906 * Unbind any event handlers that were bound by the plugin
4907 *
4908 * @private
4909 */
4910 var cleanupEvents = function cleanupEvents() {
4911 player.off('loadedmetadata', loadedMetadataHandler);
4912 player.off('error', errorHandler);
4913 player.off('dispose', cleanupEvents);
4914 };
4915
4916 /**
4917 * Cleanup before re-initializing the plugin
4918 *
4919 * @param {Object} [newOptions] an object with plugin options
4920 * @private
4921 */
4922 var reinitPlugin = function reinitPlugin(newOptions) {
4923 cleanupEvents();
4924 initPlugin(player, newOptions);
4925 };
4926
4927 player.on('error', errorHandler);
4928 player.on('dispose', cleanupEvents);
4929
4930 // Overwrite the plugin function so that we can correctly cleanup before
4931 // initializing the plugin
4932 player.reloadSourceOnError = reinitPlugin;
4933};
4934
4935/**
4936 * Reload the source when an error is detected as long as there
4937 * wasn't an error previously within the last 30 seconds
4938 *
4939 * @param {Object} [options] an object with plugin options
4940 */
4941var reloadSourceOnError = function reloadSourceOnError(options) {
4942 initPlugin(this, options);
4943};
4944
4945exports['default'] = reloadSourceOnError;
4946module.exports = exports['default'];
4947}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
4948},{}],13:[function(require,module,exports){
4949'use strict';
4950
4951Object.defineProperty(exports, '__esModule', {
4952 value: true
4953});
4954
4955function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
4956
4957var _playlistJs = require('./playlist.js');
4958
4959/**
4960 * Enable/disable playlist function. It is intended to have the first two
4961 * arguments partially-applied in order to create the final per-playlist
4962 * function.
4963 *
4964 * @param {PlaylistLoader} playlist - The rendition or media-playlist
4965 * @param {Function} changePlaylistFn - A function to be called after a
4966 * playlist's enabled-state has been changed. Will NOT be called if a
4967 * playlist's enabled-state is unchanged
4968 * @param {Boolean=} enable - Value to set the playlist enabled-state to
4969 * or if undefined returns the current enabled-state for the playlist
4970 * @return {Boolean} The current enabled-state of the playlist
4971 */
4972var enableFunction = function enableFunction(loader, playlistUri, changePlaylistFn, enable) {
4973 var playlist = loader.master.playlists[playlistUri];
4974 var blacklisted = (0, _playlistJs.isBlacklisted)(playlist);
4975 var currentlyEnabled = (0, _playlistJs.isEnabled)(playlist);
4976
4977 if (typeof enable === 'undefined') {
4978 return currentlyEnabled;
4979 }
4980
4981 if (enable) {
4982 delete playlist.disabled;
4983 } else {
4984 playlist.disabled = true;
4985 }
4986
4987 if (enable !== currentlyEnabled && !blacklisted) {
4988 // Ensure the outside world knows about our changes
4989 changePlaylistFn();
4990 if (enable) {
4991 loader.trigger('renditionenabled');
4992 } else {
4993 loader.trigger('renditiondisabled');
4994 }
4995 }
4996 return enable;
4997};
4998
4999/**
5000 * The representation object encapsulates the publicly visible information
5001 * in a media playlist along with a setter/getter-type function (enabled)
5002 * for changing the enabled-state of a particular playlist entry
5003 *
5004 * @class Representation
5005 */
5006
5007var Representation = function Representation(hlsHandler, playlist, id) {
5008 _classCallCheck(this, Representation);
5009
5010 // Get a reference to a bound version of fastQualityChange_
5011 var fastChangeFunction = hlsHandler.masterPlaylistController_.fastQualityChange_.bind(hlsHandler.masterPlaylistController_);
5012
5013 // Carefully descend into the playlist's attributes since most
5014 // properties are optional
5015 if (playlist.attributes) {
5016 var attributes = playlist.attributes;
5017
5018 if (attributes.RESOLUTION) {
5019 var resolution = attributes.RESOLUTION;
5020
5021 this.width = resolution.width;
5022 this.height = resolution.height;
5023 }
5024
5025 this.bandwidth = attributes.BANDWIDTH;
5026 }
5027
5028 // The id is simply the ordinality of the media playlist
5029 // within the master playlist
5030 this.id = id;
5031
5032 // Partially-apply the enableFunction to create a playlist-
5033 // specific variant
5034 this.enabled = enableFunction.bind(this, hlsHandler.playlists, playlist.uri, fastChangeFunction);
5035}
5036
5037/**
5038 * A mixin function that adds the `representations` api to an instance
5039 * of the HlsHandler class
5040 * @param {HlsHandler} hlsHandler - An instance of HlsHandler to add the
5041 * representation API into
5042 */
5043;
5044
5045var renditionSelectionMixin = function renditionSelectionMixin(hlsHandler) {
5046 var playlists = hlsHandler.playlists;
5047
5048 // Add a single API-specific function to the HlsHandler instance
5049 hlsHandler.representations = function () {
5050 return playlists.master.playlists.filter(function (media) {
5051 return !(0, _playlistJs.isBlacklisted)(media);
5052 }).map(function (e, i) {
5053 return new Representation(hlsHandler, e, e.uri);
5054 });
5055 };
5056};
5057
5058exports['default'] = renditionSelectionMixin;
5059module.exports = exports['default'];
5060},{"./playlist.js":10}],14:[function(require,module,exports){
5061/**
5062 * @file resolve-url.js
5063 */
5064
5065'use strict';
5066
5067Object.defineProperty(exports, '__esModule', {
5068 value: true
5069});
5070
5071function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
5072
5073var _urlToolkit = require('url-toolkit');
5074
5075var _urlToolkit2 = _interopRequireDefault(_urlToolkit);
5076
5077var _globalWindow = require('global/window');
5078
5079var _globalWindow2 = _interopRequireDefault(_globalWindow);
5080
5081var resolveUrl = function resolveUrl(baseURL, relativeURL) {
5082 // return early if we don't need to resolve
5083 if (/^[a-z]+:/i.test(relativeURL)) {
5084 return relativeURL;
5085 }
5086
5087 // if the base URL is relative then combine with the current location
5088 if (!/\/\//i.test(baseURL)) {
5089 baseURL = _urlToolkit2['default'].buildAbsoluteURL(_globalWindow2['default'].location.href, baseURL);
5090 }
5091
5092 return _urlToolkit2['default'].buildAbsoluteURL(baseURL, relativeURL);
5093};
5094
5095exports['default'] = resolveUrl;
5096module.exports = exports['default'];
5097},{"global/window":30,"url-toolkit":61}],15:[function(require,module,exports){
5098(function (global){
5099/**
5100 * @file segment-loader.js
5101 */
5102'use strict';
5103
5104Object.defineProperty(exports, '__esModule', {
5105 value: true
5106});
5107
5108var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
5109
5110var _get = function get(_x4, _x5, _x6) { var _again = true; _function: while (_again) { var object = _x4, property = _x5, receiver = _x6; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x4 = parent; _x5 = property; _x6 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
5111
5112function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
5113
5114function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
5115
5116function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
5117
5118var _playlist = require('./playlist');
5119
5120var _playlist2 = _interopRequireDefault(_playlist);
5121
5122var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
5123
5124var _videoJs2 = _interopRequireDefault(_videoJs);
5125
5126var _sourceUpdater = require('./source-updater');
5127
5128var _sourceUpdater2 = _interopRequireDefault(_sourceUpdater);
5129
5130var _config = require('./config');
5131
5132var _config2 = _interopRequireDefault(_config);
5133
5134var _globalWindow = require('global/window');
5135
5136var _globalWindow2 = _interopRequireDefault(_globalWindow);
5137
5138var _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs = require('videojs-contrib-media-sources/es5/remove-cues-from-track.js');
5139
5140var _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2 = _interopRequireDefault(_videojsContribMediaSourcesEs5RemoveCuesFromTrackJs);
5141
5142var _binUtils = require('./bin-utils');
5143
5144var _mediaSegmentRequest = require('./media-segment-request');
5145
5146var _ranges = require('./ranges');
5147
5148var _playlistSelectors = require('./playlist-selectors');
5149
5150// in ms
5151var CHECK_BUFFER_DELAY = 500;
5152
5153/**
5154 * Determines if we should call endOfStream on the media source based
5155 * on the state of the buffer or if appened segment was the final
5156 * segment in the playlist.
5157 *
5158 * @param {Object} playlist a media playlist object
5159 * @param {Object} mediaSource the MediaSource object
5160 * @param {Number} segmentIndex the index of segment we last appended
5161 * @returns {Boolean} do we need to call endOfStream on the MediaSource
5162 */
5163var detectEndOfStream = function detectEndOfStream(playlist, mediaSource, segmentIndex) {
5164 if (!playlist || !mediaSource) {
5165 return false;
5166 }
5167
5168 var segments = playlist.segments;
5169
5170 // determine a few boolean values to help make the branch below easier
5171 // to read
5172 var appendedLastSegment = segmentIndex === segments.length;
5173
5174 // if we've buffered to the end of the video, we need to call endOfStream
5175 // so that MediaSources can trigger the `ended` event when it runs out of
5176 // buffered data instead of waiting for me
5177 return playlist.endList && mediaSource.readyState === 'open' && appendedLastSegment;
5178};
5179
5180var finite = function finite(num) {
5181 return typeof num === 'number' && isFinite(num);
5182};
5183
5184/**
5185 * An object that manages segment loading and appending.
5186 *
5187 * @class SegmentLoader
5188 * @param {Object} options required and optional options
5189 * @extends videojs.EventTarget
5190 */
5191
5192var SegmentLoader = (function (_videojs$EventTarget) {
5193 _inherits(SegmentLoader, _videojs$EventTarget);
5194
5195 function SegmentLoader(settings) {
5196 var _this = this;
5197
5198 var options = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
5199
5200 _classCallCheck(this, SegmentLoader);
5201
5202 _get(Object.getPrototypeOf(SegmentLoader.prototype), 'constructor', this).call(this);
5203 // check pre-conditions
5204 if (!settings) {
5205 throw new TypeError('Initialization settings are required');
5206 }
5207 if (typeof settings.currentTime !== 'function') {
5208 throw new TypeError('No currentTime getter specified');
5209 }
5210 if (!settings.mediaSource) {
5211 throw new TypeError('No MediaSource specified');
5212 }
5213 // public properties
5214 this.state = 'INIT';
5215 this.bandwidth = settings.bandwidth;
5216 this.throughput = { rate: 0, count: 0 };
5217 this.roundTrip = NaN;
5218 this.resetStats_();
5219 this.mediaIndex = null;
5220
5221 // private settings
5222 this.hasPlayed_ = settings.hasPlayed;
5223 this.currentTime_ = settings.currentTime;
5224 this.seekable_ = settings.seekable;
5225 this.seeking_ = settings.seeking;
5226 this.duration_ = settings.duration;
5227 this.mediaSource_ = settings.mediaSource;
5228 this.hls_ = settings.hls;
5229 this.loaderType_ = settings.loaderType;
5230 this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
5231 this.goalBufferLength_ = settings.goalBufferLength;
5232
5233 // private instance variables
5234 this.checkBufferTimeout_ = null;
5235 this.error_ = void 0;
5236 this.currentTimeline_ = -1;
5237 this.pendingSegment_ = null;
5238 this.mimeType_ = null;
5239 this.sourceUpdater_ = null;
5240 this.xhrOptions_ = null;
5241
5242 // Fragmented mp4 playback
5243 this.activeInitSegmentId_ = null;
5244 this.initSegments_ = {};
5245
5246 this.decrypter_ = settings.decrypter;
5247
5248 // Manages the tracking and generation of sync-points, mappings
5249 // between a time in the display time and a segment index within
5250 // a playlist
5251 this.syncController_ = settings.syncController;
5252 this.syncPoint_ = {
5253 segmentIndex: 0,
5254 time: 0
5255 };
5256
5257 this.syncController_.on('syncinfoupdate', function () {
5258 return _this.trigger('syncinfoupdate');
5259 });
5260
5261 this.mediaSource_.addEventListener('sourceopen', function () {
5262 return _this.ended_ = false;
5263 });
5264
5265 // ...for determining the fetch location
5266 this.fetchAtBuffer_ = false;
5267
5268 if (options.debug) {
5269 this.logger_ = _videoJs2['default'].log.bind(_videoJs2['default'], 'segment-loader', this.loaderType_, '->');
5270 }
5271 }
5272
5273 /**
5274 * reset all of our media stats
5275 *
5276 * @private
5277 */
5278
5279 _createClass(SegmentLoader, [{
5280 key: 'resetStats_',
5281 value: function resetStats_() {
5282 this.mediaBytesTransferred = 0;
5283 this.mediaRequests = 0;
5284 this.mediaRequestsAborted = 0;
5285 this.mediaRequestsTimedout = 0;
5286 this.mediaRequestsErrored = 0;
5287 this.mediaTransferDuration = 0;
5288 this.mediaSecondsLoaded = 0;
5289 }
5290
5291 /**
5292 * dispose of the SegmentLoader and reset to the default state
5293 */
5294 }, {
5295 key: 'dispose',
5296 value: function dispose() {
5297 this.state = 'DISPOSED';
5298 this.pause();
5299 this.abort_();
5300 if (this.sourceUpdater_) {
5301 this.sourceUpdater_.dispose();
5302 }
5303 this.resetStats_();
5304 }
5305
5306 /**
5307 * abort anything that is currently doing on with the SegmentLoader
5308 * and reset to a default state
5309 */
5310 }, {
5311 key: 'abort',
5312 value: function abort() {
5313 if (this.state !== 'WAITING') {
5314 if (this.pendingSegment_) {
5315 this.pendingSegment_ = null;
5316 }
5317 return;
5318 }
5319
5320 this.abort_();
5321
5322 // We aborted the requests we were waiting on, so reset the loader's state to READY
5323 // since we are no longer "waiting" on any requests. XHR callback is not always run
5324 // when the request is aborted. This will prevent the loader from being stuck in the
5325 // WAITING state indefinitely.
5326 this.state = 'READY';
5327
5328 // don't wait for buffer check timeouts to begin fetching the
5329 // next segment
5330 if (!this.paused()) {
5331 this.monitorBuffer_();
5332 }
5333 }
5334
5335 /**
5336 * abort all pending xhr requests and null any pending segements
5337 *
5338 * @private
5339 */
5340 }, {
5341 key: 'abort_',
5342 value: function abort_() {
5343 if (this.pendingSegment_) {
5344 this.pendingSegment_.abortRequests();
5345 }
5346
5347 // clear out the segment being processed
5348 this.pendingSegment_ = null;
5349 }
5350
5351 /**
5352 * set an error on the segment loader and null out any pending segements
5353 *
5354 * @param {Error} error the error to set on the SegmentLoader
5355 * @return {Error} the error that was set or that is currently set
5356 */
5357 }, {
5358 key: 'error',
5359 value: function error(_error) {
5360 if (typeof _error !== 'undefined') {
5361 this.error_ = _error;
5362 }
5363
5364 this.pendingSegment_ = null;
5365 return this.error_;
5366 }
5367 }, {
5368 key: 'endOfStream',
5369 value: function endOfStream() {
5370 this.ended_ = true;
5371 this.pause();
5372 this.trigger('ended');
5373 }
5374
5375 /**
5376 * Indicates which time ranges are buffered
5377 *
5378 * @return {TimeRange}
5379 * TimeRange object representing the current buffered ranges
5380 */
5381 }, {
5382 key: 'buffered_',
5383 value: function buffered_() {
5384 if (!this.sourceUpdater_) {
5385 return _videoJs2['default'].createTimeRanges();
5386 }
5387
5388 return this.sourceUpdater_.buffered();
5389 }
5390
5391 /**
5392 * Gets and sets init segment for the provided map
5393 *
5394 * @param {Object} map
5395 * The map object representing the init segment to get or set
5396 * @param {Boolean=} set
5397 * If true, the init segment for the provided map should be saved
5398 * @return {Object}
5399 * map object for desired init segment
5400 */
5401 }, {
5402 key: 'initSegment',
5403 value: function initSegment(map) {
5404 var set = arguments.length <= 1 || arguments[1] === undefined ? false : arguments[1];
5405
5406 if (!map) {
5407 return null;
5408 }
5409
5410 var id = (0, _binUtils.initSegmentId)(map);
5411 var storedMap = this.initSegments_[id];
5412
5413 if (set && !storedMap && map.bytes) {
5414 this.initSegments_[id] = storedMap = {
5415 resolvedUri: map.resolvedUri,
5416 byterange: map.byterange,
5417 bytes: map.bytes
5418 };
5419 }
5420
5421 return storedMap || map;
5422 }
5423
5424 /**
5425 * Returns true if all configuration required for loading is present, otherwise false.
5426 *
5427 * @return {Boolean} True if the all configuration is ready for loading
5428 * @private
5429 */
5430 }, {
5431 key: 'couldBeginLoading_',
5432 value: function couldBeginLoading_() {
5433 return this.playlist_ && (
5434 // the source updater is created when init_ is called, so either having a
5435 // source updater or being in the INIT state with a mimeType is enough
5436 // to say we have all the needed configuration to start loading.
5437 this.sourceUpdater_ || this.mimeType_ && this.state === 'INIT') && !this.paused();
5438 }
5439
5440 /**
5441 * load a playlist and start to fill the buffer
5442 */
5443 }, {
5444 key: 'load',
5445 value: function load() {
5446 // un-pause
5447 this.monitorBuffer_();
5448
5449 // if we don't have a playlist yet, keep waiting for one to be
5450 // specified
5451 if (!this.playlist_) {
5452 return;
5453 }
5454
5455 // not sure if this is the best place for this
5456 this.syncController_.setDateTimeMapping(this.playlist_);
5457
5458 // if all the configuration is ready, initialize and begin loading
5459 if (this.state === 'INIT' && this.couldBeginLoading_()) {
5460 return this.init_();
5461 }
5462
5463 // if we're in the middle of processing a segment already, don't
5464 // kick off an additional segment request
5465 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
5466 return;
5467 }
5468
5469 this.state = 'READY';
5470 }
5471
5472 /**
5473 * Once all the starting parameters have been specified, begin
5474 * operation. This method should only be invoked from the INIT
5475 * state.
5476 *
5477 * @private
5478 */
5479 }, {
5480 key: 'init_',
5481 value: function init_() {
5482 this.state = 'READY';
5483 this.sourceUpdater_ = new _sourceUpdater2['default'](this.mediaSource_, this.mimeType_);
5484 this.resetEverything();
5485 return this.monitorBuffer_();
5486 }
5487
5488 /**
5489 * set a playlist on the segment loader
5490 *
5491 * @param {PlaylistLoader} media the playlist to set on the segment loader
5492 */
5493 }, {
5494 key: 'playlist',
5495 value: function playlist(newPlaylist) {
5496 var options = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
5497
5498 if (!newPlaylist) {
5499 return;
5500 }
5501
5502 var oldPlaylist = this.playlist_;
5503 var segmentInfo = this.pendingSegment_;
5504
5505 this.playlist_ = newPlaylist;
5506 this.xhrOptions_ = options;
5507
5508 // when we haven't started playing yet, the start of a live playlist
5509 // is always our zero-time so force a sync update each time the playlist
5510 // is refreshed from the server
5511 if (!this.hasPlayed_()) {
5512 newPlaylist.syncInfo = {
5513 mediaSequence: newPlaylist.mediaSequence,
5514 time: 0
5515 };
5516 }
5517
5518 // in VOD, this is always a rendition switch (or we updated our syncInfo above)
5519 // in LIVE, we always want to update with new playlists (including refreshes)
5520 this.trigger('syncinfoupdate');
5521
5522 // if we were unpaused but waiting for a playlist, start
5523 // buffering now
5524 if (this.state === 'INIT' && this.couldBeginLoading_()) {
5525 return this.init_();
5526 }
5527
5528 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
5529 if (this.mediaIndex !== null) {
5530 // we must "resync" the segment loader when we switch renditions and
5531 // the segment loader is already synced to the previous rendition
5532 this.resyncLoader();
5533 }
5534
5535 // the rest of this function depends on `oldPlaylist` being defined
5536 return;
5537 }
5538
5539 // we reloaded the same playlist so we are in a live scenario
5540 // and we will likely need to adjust the mediaIndex
5541 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
5542
5543 this.logger_('mediaSequenceDiff', mediaSequenceDiff);
5544
5545 // update the mediaIndex on the SegmentLoader
5546 // this is important because we can abort a request and this value must be
5547 // equal to the last appended mediaIndex
5548 if (this.mediaIndex !== null) {
5549 this.mediaIndex -= mediaSequenceDiff;
5550 }
5551
5552 // update the mediaIndex on the SegmentInfo object
5553 // this is important because we will update this.mediaIndex with this value
5554 // in `handleUpdateEnd_` after the segment has been successfully appended
5555 if (segmentInfo) {
5556 segmentInfo.mediaIndex -= mediaSequenceDiff;
5557
5558 // we need to update the referenced segment so that timing information is
5559 // saved for the new playlist's segment, however, if the segment fell off the
5560 // playlist, we can leave the old reference and just lose the timing info
5561 if (segmentInfo.mediaIndex >= 0) {
5562 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
5563 }
5564 }
5565
5566 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
5567 }
5568
5569 /**
5570 * Prevent the loader from fetching additional segments. If there
5571 * is a segment request outstanding, it will finish processing
5572 * before the loader halts. A segment loader can be unpaused by
5573 * calling load().
5574 */
5575 }, {
5576 key: 'pause',
5577 value: function pause() {
5578 if (this.checkBufferTimeout_) {
5579 _globalWindow2['default'].clearTimeout(this.checkBufferTimeout_);
5580
5581 this.checkBufferTimeout_ = null;
5582 }
5583 }
5584
5585 /**
5586 * Returns whether the segment loader is fetching additional
5587 * segments when given the opportunity. This property can be
5588 * modified through calls to pause() and load().
5589 */
5590 }, {
5591 key: 'paused',
5592 value: function paused() {
5593 return this.checkBufferTimeout_ === null;
5594 }
5595
5596 /**
5597 * create/set the following mimetype on the SourceBuffer through a
5598 * SourceUpdater
5599 *
5600 * @param {String} mimeType the mime type string to use
5601 */
5602 }, {
5603 key: 'mimeType',
5604 value: function mimeType(_mimeType) {
5605 if (this.mimeType_) {
5606 return;
5607 }
5608
5609 this.mimeType_ = _mimeType;
5610 // if we were unpaused but waiting for a sourceUpdater, start
5611 // buffering now
5612 if (this.state === 'INIT' && this.couldBeginLoading_()) {
5613 this.init_();
5614 }
5615 }
5616
5617 /**
5618 * Delete all the buffered data and reset the SegmentLoader
5619 */
5620 }, {
5621 key: 'resetEverything',
5622 value: function resetEverything() {
5623 this.ended_ = false;
5624 this.resetLoader();
5625 this.remove(0, Infinity);
5626 }
5627
5628 /**
5629 * Force the SegmentLoader to resync and start loading around the currentTime instead
5630 * of starting at the end of the buffer
5631 *
5632 * Useful for fast quality changes
5633 */
5634 }, {
5635 key: 'resetLoader',
5636 value: function resetLoader() {
5637 this.fetchAtBuffer_ = false;
5638 this.resyncLoader();
5639 }
5640
5641 /**
5642 * Force the SegmentLoader to restart synchronization and make a conservative guess
5643 * before returning to the simple walk-forward method
5644 */
5645 }, {
5646 key: 'resyncLoader',
5647 value: function resyncLoader() {
5648 this.mediaIndex = null;
5649 this.syncPoint_ = null;
5650 this.abort();
5651 }
5652
5653 /**
5654 * Remove any data in the source buffer between start and end times
5655 * @param {Number} start - the start time of the region to remove from the buffer
5656 * @param {Number} end - the end time of the region to remove from the buffer
5657 */
5658 }, {
5659 key: 'remove',
5660 value: function remove(start, end) {
5661 if (this.sourceUpdater_) {
5662 this.sourceUpdater_.remove(start, end);
5663 }
5664 (0, _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2['default'])(start, end, this.segmentMetadataTrack_);
5665 }
5666
5667 /**
5668 * (re-)schedule monitorBufferTick_ to run as soon as possible
5669 *
5670 * @private
5671 */
5672 }, {
5673 key: 'monitorBuffer_',
5674 value: function monitorBuffer_() {
5675 if (this.checkBufferTimeout_) {
5676 _globalWindow2['default'].clearTimeout(this.checkBufferTimeout_);
5677 }
5678
5679 this.checkBufferTimeout_ = _globalWindow2['default'].setTimeout(this.monitorBufferTick_.bind(this), 1);
5680 }
5681
5682 /**
5683 * As long as the SegmentLoader is in the READY state, periodically
5684 * invoke fillBuffer_().
5685 *
5686 * @private
5687 */
5688 }, {
5689 key: 'monitorBufferTick_',
5690 value: function monitorBufferTick_() {
5691 if (this.state === 'READY') {
5692 this.fillBuffer_();
5693 }
5694
5695 if (this.checkBufferTimeout_) {
5696 _globalWindow2['default'].clearTimeout(this.checkBufferTimeout_);
5697 }
5698
5699 this.checkBufferTimeout_ = _globalWindow2['default'].setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
5700 }
5701
5702 /**
5703 * fill the buffer with segements unless the sourceBuffers are
5704 * currently updating
5705 *
5706 * Note: this function should only ever be called by monitorBuffer_
5707 * and never directly
5708 *
5709 * @private
5710 */
5711 }, {
5712 key: 'fillBuffer_',
5713 value: function fillBuffer_() {
5714 if (this.sourceUpdater_.updating()) {
5715 return;
5716 }
5717
5718 if (!this.syncPoint_) {
5719 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
5720 }
5721
5722 // see if we need to begin loading immediately
5723 var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
5724
5725 if (!segmentInfo) {
5726 return;
5727 }
5728
5729 var isEndOfStream = detectEndOfStream(this.playlist_, this.mediaSource_, segmentInfo.mediaIndex);
5730
5731 if (isEndOfStream) {
5732 this.endOfStream();
5733 return;
5734 }
5735
5736 if (segmentInfo.mediaIndex === this.playlist_.segments.length - 1 && this.mediaSource_.readyState === 'ended' && !this.seeking_()) {
5737 return;
5738 }
5739
5740 // We will need to change timestampOffset of the sourceBuffer if either of
5741 // the following conditions are true:
5742 // - The segment.timeline !== this.currentTimeline
5743 // (we are crossing a discontinuity somehow)
5744 // - The "timestampOffset" for the start of this segment is less than
5745 // the currently set timestampOffset
5746 if (segmentInfo.timeline !== this.currentTimeline_ || segmentInfo.startOfSegment !== null && segmentInfo.startOfSegment < this.sourceUpdater_.timestampOffset()) {
5747 this.syncController_.reset();
5748 segmentInfo.timestampOffset = segmentInfo.startOfSegment;
5749 }
5750
5751 this.loadSegment_(segmentInfo);
5752 }
5753
5754 /**
5755 * Determines what segment request should be made, given current playback
5756 * state.
5757 *
5758 * @param {TimeRanges} buffered - the state of the buffer
5759 * @param {Object} playlist - the playlist object to fetch segments from
5760 * @param {Number} mediaIndex - the previous mediaIndex fetched or null
5761 * @param {Boolean} hasPlayed - a flag indicating whether we have played or not
5762 * @param {Number} currentTime - the playback position in seconds
5763 * @param {Object} syncPoint - a segment info object that describes the
5764 * @returns {Object} a segment request object that describes the segment to load
5765 */
5766 }, {
5767 key: 'checkBuffer_',
5768 value: function checkBuffer_(buffered, playlist, mediaIndex, hasPlayed, currentTime, syncPoint) {
5769 var lastBufferedEnd = 0;
5770 var startOfSegment = undefined;
5771
5772 if (buffered.length) {
5773 lastBufferedEnd = buffered.end(buffered.length - 1);
5774 }
5775
5776 var bufferedTime = Math.max(0, lastBufferedEnd - currentTime);
5777
5778 if (!playlist.segments.length) {
5779 return null;
5780 }
5781
5782 // if there is plenty of content buffered, and the video has
5783 // been played before relax for awhile
5784 if (bufferedTime >= this.goalBufferLength_()) {
5785 return null;
5786 }
5787
5788 // if the video has not yet played once, and we already have
5789 // one segment downloaded do nothing
5790 if (!hasPlayed && bufferedTime >= 1) {
5791 return null;
5792 }
5793
5794 this.logger_('checkBuffer_', 'mediaIndex:', mediaIndex, 'hasPlayed:', hasPlayed, 'currentTime:', currentTime, 'syncPoint:', syncPoint, 'fetchAtBuffer:', this.fetchAtBuffer_, 'bufferedTime:', bufferedTime);
5795
5796 // When the syncPoint is null, there is no way of determining a good
5797 // conservative segment index to fetch from
5798 // The best thing to do here is to get the kind of sync-point data by
5799 // making a request
5800 if (syncPoint === null) {
5801 mediaIndex = this.getSyncSegmentCandidate_(playlist);
5802 this.logger_('getSync', 'mediaIndex:', mediaIndex);
5803 return this.generateSegmentInfo_(playlist, mediaIndex, null, true);
5804 }
5805
5806 // Under normal playback conditions fetching is a simple walk forward
5807 if (mediaIndex !== null) {
5808 this.logger_('walkForward', 'mediaIndex:', mediaIndex + 1);
5809 var segment = playlist.segments[mediaIndex];
5810
5811 if (segment && segment.end) {
5812 startOfSegment = segment.end;
5813 } else {
5814 startOfSegment = lastBufferedEnd;
5815 }
5816 return this.generateSegmentInfo_(playlist, mediaIndex + 1, startOfSegment, false);
5817 }
5818
5819 // There is a sync-point but the lack of a mediaIndex indicates that
5820 // we need to make a good conservative guess about which segment to
5821 // fetch
5822 if (this.fetchAtBuffer_) {
5823 // Find the segment containing the end of the buffer
5824 var mediaSourceInfo = _playlist2['default'].getMediaInfoForTime(playlist, lastBufferedEnd, syncPoint.segmentIndex, syncPoint.time);
5825
5826 mediaIndex = mediaSourceInfo.mediaIndex;
5827 startOfSegment = mediaSourceInfo.startTime;
5828 } else {
5829 // Find the segment containing currentTime
5830 var mediaSourceInfo = _playlist2['default'].getMediaInfoForTime(playlist, currentTime, syncPoint.segmentIndex, syncPoint.time);
5831
5832 mediaIndex = mediaSourceInfo.mediaIndex;
5833 startOfSegment = mediaSourceInfo.startTime;
5834 }
5835 this.logger_('getMediaIndexForTime', 'mediaIndex:', mediaIndex, 'startOfSegment:', startOfSegment);
5836
5837 return this.generateSegmentInfo_(playlist, mediaIndex, startOfSegment, false);
5838 }
5839
5840 /**
5841 * The segment loader has no recourse except to fetch a segment in the
5842 * current playlist and use the internal timestamps in that segment to
5843 * generate a syncPoint. This function returns a good candidate index
5844 * for that process.
5845 *
5846 * @param {Object} playlist - the playlist object to look for a
5847 * @returns {Number} An index of a segment from the playlist to load
5848 */
5849 }, {
5850 key: 'getSyncSegmentCandidate_',
5851 value: function getSyncSegmentCandidate_(playlist) {
5852 var _this2 = this;
5853
5854 if (this.currentTimeline_ === -1) {
5855 return 0;
5856 }
5857
5858 var segmentIndexArray = playlist.segments.map(function (s, i) {
5859 return {
5860 timeline: s.timeline,
5861 segmentIndex: i
5862 };
5863 }).filter(function (s) {
5864 return s.timeline === _this2.currentTimeline_;
5865 });
5866
5867 if (segmentIndexArray.length) {
5868 return segmentIndexArray[Math.min(segmentIndexArray.length - 1, 1)].segmentIndex;
5869 }
5870
5871 return Math.max(playlist.segments.length - 1, 0);
5872 }
5873 }, {
5874 key: 'generateSegmentInfo_',
5875 value: function generateSegmentInfo_(playlist, mediaIndex, startOfSegment, isSyncRequest) {
5876 if (mediaIndex < 0 || mediaIndex >= playlist.segments.length) {
5877 return null;
5878 }
5879
5880 var segment = playlist.segments[mediaIndex];
5881
5882 return {
5883 requestId: 'segment-loader-' + Math.random(),
5884 // resolve the segment URL relative to the playlist
5885 uri: segment.resolvedUri,
5886 // the segment's mediaIndex at the time it was requested
5887 mediaIndex: mediaIndex,
5888 // whether or not to update the SegmentLoader's state with this
5889 // segment's mediaIndex
5890 isSyncRequest: isSyncRequest,
5891 startOfSegment: startOfSegment,
5892 // the segment's playlist
5893 playlist: playlist,
5894 // unencrypted bytes of the segment
5895 bytes: null,
5896 // when a key is defined for this segment, the encrypted bytes
5897 encryptedBytes: null,
5898 // The target timestampOffset for this segment when we append it
5899 // to the source buffer
5900 timestampOffset: null,
5901 // The timeline that the segment is in
5902 timeline: segment.timeline,
5903 // The expected duration of the segment in seconds
5904 duration: segment.duration,
5905 // retain the segment in case the playlist updates while doing an async process
5906 segment: segment
5907 };
5908 }
5909
5910 /**
5911 * Determines if the network has enough bandwidth to complete the current segment
5912 * request in a timely manner. If not, the request will be aborted early and bandwidth
5913 * updated to trigger a playlist switch.
5914 *
5915 * @param {Object} stats
5916 * Object containing stats about the request timing and size
5917 * @return {Boolean} True if the request was aborted, false otherwise
5918 * @private
5919 */
5920 }, {
5921 key: 'abortRequestEarly_',
5922 value: function abortRequestEarly_(stats) {
5923 if (this.hls_.tech_.paused() ||
5924 // Don't abort if the current playlist is on the lowestEnabledRendition
5925 // TODO: Replace using timeout with a boolean indicating whether this playlist is
5926 // the lowestEnabledRendition.
5927 !this.xhrOptions_.timeout ||
5928 // Don't abort if we have no bandwidth information to estimate segment sizes
5929 !(this.playlist_.attributes && this.playlist_.attributes.BANDWIDTH)) {
5930 return false;
5931 }
5932
5933 // Wait at least 1 second since the first byte of data has been received before
5934 // using the calculated bandwidth from the progress event to allow the bitrate
5935 // to stabilize
5936 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
5937 return false;
5938 }
5939
5940 var currentTime = this.currentTime_();
5941 var measuredBandwidth = stats.bandwidth;
5942 var segmentDuration = this.pendingSegment_.duration;
5943
5944 var requestTimeRemaining = _playlist2['default'].estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived);
5945
5946 // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
5947 // if we are only left with less than 1 second when the request completes.
5948 // A negative timeUntilRebuffering indicates we are already rebuffering
5949 var timeUntilRebuffer = (0, _ranges.timeUntilRebuffer)(this.buffered_(), currentTime, this.hls_.tech_.playbackRate()) - 1;
5950
5951 // Only consider aborting early if the estimated time to finish the download
5952 // is larger than the estimated time until the player runs out of forward buffer
5953 if (requestTimeRemaining <= timeUntilRebuffer) {
5954 return false;
5955 }
5956
5957 var switchCandidate = (0, _playlistSelectors.minRebufferMaxBandwidthSelector)({
5958 master: this.hls_.playlists.master,
5959 currentTime: currentTime,
5960 bandwidth: measuredBandwidth,
5961 duration: this.duration_(),
5962 segmentDuration: segmentDuration,
5963 timeUntilRebuffer: timeUntilRebuffer,
5964 currentTimeline: this.currentTimeline_,
5965 syncController: this.syncController_
5966 });
5967
5968 if (!switchCandidate) {
5969 return;
5970 }
5971
5972 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer;
5973
5974 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
5975
5976 var minimumTimeSaving = 0.5;
5977
5978 // If we are already rebuffering, increase the amount of variance we add to the
5979 // potential round trip time of the new request so that we are not too aggressive
5980 // with switching to a playlist that might save us a fraction of a second.
5981 if (timeUntilRebuffer <= _ranges.TIME_FUDGE_FACTOR) {
5982 minimumTimeSaving = 1;
5983 }
5984
5985 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
5986 return false;
5987 }
5988
5989 // set the bandwidth to that of the desired playlist being sure to scale by
5990 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
5991 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * _config2['default'].BANDWIDTH_VARIANCE + 1;
5992 this.abort();
5993 this.trigger('bandwidthupdate');
5994 return true;
5995 }
5996
5997 /**
5998 * XHR `progress` event handler
5999 *
6000 * @param {Event}
6001 * The XHR `progress` event
6002 * @param {Object} simpleSegment
6003 * A simplified segment object copy
6004 * @private
6005 */
6006 }, {
6007 key: 'handleProgress_',
6008 value: function handleProgress_(event, simpleSegment) {
6009 if (!this.pendingSegment_ || simpleSegment.requestId !== this.pendingSegment_.requestId || this.abortRequestEarly_(simpleSegment.stats)) {
6010 return;
6011 }
6012
6013 this.trigger('progress');
6014 }
6015
6016 /**
6017 * load a specific segment from a request into the buffer
6018 *
6019 * @private
6020 */
6021 }, {
6022 key: 'loadSegment_',
6023 value: function loadSegment_(segmentInfo) {
6024 this.state = 'WAITING';
6025 this.pendingSegment_ = segmentInfo;
6026 this.trimBackBuffer_(segmentInfo);
6027
6028 segmentInfo.abortRequests = (0, _mediaSegmentRequest.mediaSegmentRequest)(this.hls_.xhr, this.xhrOptions_, this.decrypter_, this.createSimplifiedSegmentObj_(segmentInfo),
6029 // progress callback
6030 this.handleProgress_.bind(this), this.segmentRequestFinished_.bind(this));
6031 }
6032
6033 /**
6034 * trim the back buffer so that we don't have too much data
6035 * in the source buffer
6036 *
6037 * @private
6038 *
6039 * @param {Object} segmentInfo - the current segment
6040 */
6041 }, {
6042 key: 'trimBackBuffer_',
6043 value: function trimBackBuffer_(segmentInfo) {
6044 var seekable = this.seekable_();
6045 var currentTime = this.currentTime_();
6046 var removeToTime = 0;
6047
6048 // Chrome has a hard limit of 150MB of
6049 // buffer and a very conservative "garbage collector"
6050 // We manually clear out the old buffer to ensure
6051 // we don't trigger the QuotaExceeded error
6052 // on the source buffer during subsequent appends
6053
6054 // If we have a seekable range use that as the limit for what can be removed safely
6055 // otherwise remove anything older than 30 seconds before the current play head
6056 if (seekable.length && seekable.start(0) > 0 && seekable.start(0) < currentTime) {
6057 removeToTime = seekable.start(0);
6058 } else {
6059 removeToTime = currentTime - 30;
6060 }
6061
6062 if (removeToTime > 0) {
6063 this.remove(0, removeToTime);
6064 }
6065 }
6066
6067 /**
6068 * created a simplified copy of the segment object with just the
6069 * information necessary to perform the XHR and decryption
6070 *
6071 * @private
6072 *
6073 * @param {Object} segmentInfo - the current segment
6074 * @returns {Object} a simplified segment object copy
6075 */
6076 }, {
6077 key: 'createSimplifiedSegmentObj_',
6078 value: function createSimplifiedSegmentObj_(segmentInfo) {
6079 var segment = segmentInfo.segment;
6080 var simpleSegment = {
6081 resolvedUri: segment.resolvedUri,
6082 byterange: segment.byterange,
6083 requestId: segmentInfo.requestId
6084 };
6085
6086 if (segment.key) {
6087 // if the media sequence is greater than 2^32, the IV will be incorrect
6088 // assuming 10s segments, that would be about 1300 years
6089 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
6090
6091 simpleSegment.key = {
6092 resolvedUri: segment.key.resolvedUri,
6093 iv: iv
6094 };
6095 }
6096
6097 if (segment.map) {
6098 simpleSegment.map = this.initSegment(segment.map);
6099 }
6100
6101 return simpleSegment;
6102 }
6103
6104 /**
6105 * Handle the callback from the segmentRequest function and set the
6106 * associated SegmentLoader state and errors if necessary
6107 *
6108 * @private
6109 */
6110 }, {
6111 key: 'segmentRequestFinished_',
6112 value: function segmentRequestFinished_(error, simpleSegment) {
6113 // every request counts as a media request even if it has been aborted
6114 // or canceled due to a timeout
6115 this.mediaRequests += 1;
6116
6117 if (simpleSegment.stats) {
6118 this.mediaBytesTransferred += simpleSegment.stats.bytesReceived;
6119 this.mediaTransferDuration += simpleSegment.stats.roundTripTime;
6120 }
6121
6122 // The request was aborted and the SegmentLoader has already been reset
6123 if (!this.pendingSegment_) {
6124 this.mediaRequestsAborted += 1;
6125 return;
6126 }
6127
6128 // the request was aborted and the SegmentLoader has already started
6129 // another request. this can happen when the timeout for an aborted
6130 // request triggers due to a limitation in the XHR library
6131 // do not count this as any sort of request or we risk double-counting
6132 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
6133 return;
6134 }
6135
6136 // an error occurred from the active pendingSegment_ so reset everything
6137 if (error) {
6138 this.pendingSegment_ = null;
6139 this.state = 'READY';
6140
6141 // the requests were aborted just record the aborted stat and exit
6142 // this is not a true error condition and nothing corrective needs
6143 // to be done
6144 if (error.code === _mediaSegmentRequest.REQUEST_ERRORS.ABORTED) {
6145 this.mediaRequestsAborted += 1;
6146 return;
6147 }
6148
6149 this.pause();
6150
6151 // the error is really just that at least one of the requests timed-out
6152 // set the bandwidth to a very low value and trigger an ABR switch to
6153 // take emergency action
6154 if (error.code === _mediaSegmentRequest.REQUEST_ERRORS.TIMEOUT) {
6155 this.mediaRequestsTimedout += 1;
6156 this.bandwidth = 1;
6157 this.roundTrip = NaN;
6158 this.trigger('bandwidthupdate');
6159 return;
6160 }
6161
6162 // if control-flow has arrived here, then the error is real
6163 // emit an error event to blacklist the current playlist
6164 this.mediaRequestsErrored += 1;
6165 this.error(error);
6166 this.trigger('error');
6167 return;
6168 }
6169
6170 // the response was a success so set any bandwidth stats the request
6171 // generated for ABR purposes
6172 this.bandwidth = simpleSegment.stats.bandwidth;
6173 this.roundTrip = simpleSegment.stats.roundTripTime;
6174
6175 // if this request included an initialization segment, save that data
6176 // to the initSegment cache
6177 if (simpleSegment.map) {
6178 simpleSegment.map = this.initSegment(simpleSegment.map, true);
6179 }
6180
6181 this.processSegmentResponse_(simpleSegment);
6182 }
6183
6184 /**
6185 * Move any important data from the simplified segment object
6186 * back to the real segment object for future phases
6187 *
6188 * @private
6189 */
6190 }, {
6191 key: 'processSegmentResponse_',
6192 value: function processSegmentResponse_(simpleSegment) {
6193 var segmentInfo = this.pendingSegment_;
6194
6195 segmentInfo.bytes = simpleSegment.bytes;
6196 if (simpleSegment.map) {
6197 segmentInfo.segment.map.bytes = simpleSegment.map.bytes;
6198 }
6199
6200 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
6201 this.handleSegment_();
6202 }
6203
6204 /**
6205 * append a decrypted segement to the SourceBuffer through a SourceUpdater
6206 *
6207 * @private
6208 */
6209 }, {
6210 key: 'handleSegment_',
6211 value: function handleSegment_() {
6212 var _this3 = this;
6213
6214 if (!this.pendingSegment_) {
6215 this.state = 'READY';
6216 return;
6217 }
6218
6219 this.state = 'APPENDING';
6220
6221 var segmentInfo = this.pendingSegment_;
6222 var segment = segmentInfo.segment;
6223
6224 this.syncController_.probeSegmentInfo(segmentInfo);
6225
6226 if (segmentInfo.isSyncRequest) {
6227 this.trigger('syncinfoupdate');
6228 this.pendingSegment_ = null;
6229 this.state = 'READY';
6230 return;
6231 }
6232
6233 if (segmentInfo.timestampOffset !== null && segmentInfo.timestampOffset !== this.sourceUpdater_.timestampOffset()) {
6234 this.sourceUpdater_.timestampOffset(segmentInfo.timestampOffset);
6235 // fired when a timestamp offset is set in HLS (can also identify discontinuities)
6236 this.trigger('timestampoffset');
6237 }
6238
6239 // if the media initialization segment is changing, append it
6240 // before the content segment
6241 if (segment.map) {
6242 (function () {
6243 var initId = (0, _binUtils.initSegmentId)(segment.map);
6244
6245 if (!_this3.activeInitSegmentId_ || _this3.activeInitSegmentId_ !== initId) {
6246 var initSegment = _this3.initSegment(segment.map);
6247
6248 _this3.sourceUpdater_.appendBuffer(initSegment.bytes, function () {
6249 _this3.activeInitSegmentId_ = initId;
6250 });
6251 }
6252 })();
6253 }
6254
6255 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
6256 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
6257 this.mediaSecondsLoaded += segment.end - segment.start;
6258 } else {
6259 this.mediaSecondsLoaded += segment.duration;
6260 }
6261
6262 this.sourceUpdater_.appendBuffer(segmentInfo.bytes, this.handleUpdateEnd_.bind(this));
6263 }
6264
6265 /**
6266 * callback to run when appendBuffer is finished. detects if we are
6267 * in a good state to do things with the data we got, or if we need
6268 * to wait for more
6269 *
6270 * @private
6271 */
6272 }, {
6273 key: 'handleUpdateEnd_',
6274 value: function handleUpdateEnd_() {
6275 this.logger_('handleUpdateEnd_', 'segmentInfo:', this.pendingSegment_);
6276
6277 if (!this.pendingSegment_) {
6278 this.state = 'READY';
6279 if (!this.paused()) {
6280 this.monitorBuffer_();
6281 }
6282 return;
6283 }
6284
6285 var segmentInfo = this.pendingSegment_;
6286 var segment = segmentInfo.segment;
6287 var isWalkingForward = this.mediaIndex !== null;
6288
6289 this.pendingSegment_ = null;
6290 this.recordThroughput_(segmentInfo);
6291 this.addSegmentMetadataCue_(segmentInfo);
6292
6293 this.state = 'READY';
6294
6295 this.mediaIndex = segmentInfo.mediaIndex;
6296 this.fetchAtBuffer_ = true;
6297 this.currentTimeline_ = segmentInfo.timeline;
6298
6299 // We must update the syncinfo to recalculate the seekable range before
6300 // the following conditional otherwise it may consider this a bad "guess"
6301 // and attempt to resync when the post-update seekable window and live
6302 // point would mean that this was the perfect segment to fetch
6303 this.trigger('syncinfoupdate');
6304
6305 // If we previously appended a segment that ends more than 3 targetDurations before
6306 // the currentTime_ that means that our conservative guess was too conservative.
6307 // In that case, reset the loader state so that we try to use any information gained
6308 // from the previous request to create a new, more accurate, sync-point.
6309 if (segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3) {
6310 this.resetEverything();
6311 return;
6312 }
6313
6314 // Don't do a rendition switch unless we have enough time to get a sync segment
6315 // and conservatively guess
6316 if (isWalkingForward) {
6317 this.trigger('bandwidthupdate');
6318 }
6319 this.trigger('progress');
6320
6321 // any time an update finishes and the last segment is in the
6322 // buffer, end the stream. this ensures the "ended" event will
6323 // fire if playback reaches that point.
6324 var isEndOfStream = detectEndOfStream(segmentInfo.playlist, this.mediaSource_, segmentInfo.mediaIndex + 1);
6325
6326 if (isEndOfStream) {
6327 this.endOfStream();
6328 }
6329
6330 if (!this.paused()) {
6331 this.monitorBuffer_();
6332 }
6333 }
6334
6335 /**
6336 * Records the current throughput of the decrypt, transmux, and append
6337 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
6338 * moving average of the throughput. `throughput.count` is the number of
6339 * data points in the average.
6340 *
6341 * @private
6342 * @param {Object} segmentInfo the object returned by loadSegment
6343 */
6344 }, {
6345 key: 'recordThroughput_',
6346 value: function recordThroughput_(segmentInfo) {
6347 var rate = this.throughput.rate;
6348 // Add one to the time to ensure that we don't accidentally attempt to divide
6349 // by zero in the case where the throughput is ridiculously high
6350 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1;
6351 // Multiply by 8000 to convert from bytes/millisecond to bits/second
6352 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000);
6353
6354 // This is just a cumulative moving average calculation:
6355 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
6356 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
6357 }
6358
6359 /**
6360 * A debugging logger noop that is set to console.log only if debugging
6361 * is enabled globally
6362 *
6363 * @private
6364 */
6365 }, {
6366 key: 'logger_',
6367 value: function logger_() {}
6368
6369 /**
6370 * Adds a cue to the segment-metadata track with some metadata information about the
6371 * segment
6372 *
6373 * @private
6374 * @param {Object} segmentInfo
6375 * the object returned by loadSegment
6376 * @method addSegmentMetadataCue_
6377 */
6378 }, {
6379 key: 'addSegmentMetadataCue_',
6380 value: function addSegmentMetadataCue_(segmentInfo) {
6381 if (!this.segmentMetadataTrack_) {
6382 return;
6383 }
6384
6385 var segment = segmentInfo.segment;
6386 var start = segment.start;
6387 var end = segment.end;
6388
6389 // Do not try adding the cue if the start and end times are invalid.
6390 if (!finite(start) || !finite(end)) {
6391 return;
6392 }
6393
6394 (0, _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2['default'])(start, end, this.segmentMetadataTrack_);
6395
6396 var Cue = _globalWindow2['default'].WebKitDataCue || _globalWindow2['default'].VTTCue;
6397 var value = {
6398 uri: segmentInfo.uri,
6399 timeline: segmentInfo.timeline,
6400 playlist: segmentInfo.playlist.uri,
6401 start: start,
6402 end: end
6403 };
6404 var data = JSON.stringify(value);
6405 var cue = new Cue(start, end, data);
6406
6407 // Attach the metadata to the value property of the cue to keep consistency between
6408 // the differences of WebKitDataCue in safari and VTTCue in other browsers
6409 cue.value = value;
6410
6411 this.segmentMetadataTrack_.addCue(cue);
6412 }
6413 }]);
6414
6415 return SegmentLoader;
6416})(_videoJs2['default'].EventTarget);
6417
6418exports['default'] = SegmentLoader;
6419module.exports = exports['default'];
6420}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
6421},{"./bin-utils":2,"./config":3,"./media-segment-request":6,"./playlist":10,"./playlist-selectors":9,"./ranges":11,"./source-updater":16,"global/window":30,"videojs-contrib-media-sources/es5/remove-cues-from-track.js":71}],16:[function(require,module,exports){
6422(function (global){
6423/**
6424 * @file source-updater.js
6425 */
6426'use strict';
6427
6428Object.defineProperty(exports, '__esModule', {
6429 value: true
6430});
6431
6432var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
6433
6434function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
6435
6436function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
6437
6438var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
6439
6440var _videoJs2 = _interopRequireDefault(_videoJs);
6441
6442var noop = function noop() {};
6443
6444/**
6445 * A queue of callbacks to be serialized and applied when a
6446 * MediaSource and its associated SourceBuffers are not in the
6447 * updating state. It is used by the segment loader to update the
6448 * underlying SourceBuffers when new data is loaded, for instance.
6449 *
6450 * @class SourceUpdater
6451 * @param {MediaSource} mediaSource the MediaSource to create the
6452 * SourceBuffer from
6453 * @param {String} mimeType the desired MIME type of the underlying
6454 * SourceBuffer
6455 */
6456
6457var SourceUpdater = (function () {
6458 function SourceUpdater(mediaSource, mimeType) {
6459 var _this = this;
6460
6461 _classCallCheck(this, SourceUpdater);
6462
6463 var createSourceBuffer = function createSourceBuffer() {
6464 _this.sourceBuffer_ = mediaSource.addSourceBuffer(mimeType);
6465
6466 // run completion handlers and process callbacks as updateend
6467 // events fire
6468 _this.onUpdateendCallback_ = function () {
6469 var pendingCallback = _this.pendingCallback_;
6470
6471 _this.pendingCallback_ = null;
6472
6473 if (pendingCallback) {
6474 pendingCallback();
6475 }
6476
6477 _this.runCallback_();
6478 };
6479
6480 _this.sourceBuffer_.addEventListener('updateend', _this.onUpdateendCallback_);
6481
6482 _this.runCallback_();
6483 };
6484
6485 this.callbacks_ = [];
6486 this.pendingCallback_ = null;
6487 this.timestampOffset_ = 0;
6488 this.mediaSource = mediaSource;
6489 this.processedAppend_ = false;
6490
6491 if (mediaSource.readyState === 'closed') {
6492 mediaSource.addEventListener('sourceopen', createSourceBuffer);
6493 } else {
6494 createSourceBuffer();
6495 }
6496 }
6497
6498 /**
6499 * Aborts the current segment and resets the segment parser.
6500 *
6501 * @param {Function} done function to call when done
6502 * @see http://w3c.github.io/media-source/#widl-SourceBuffer-abort-void
6503 */
6504
6505 _createClass(SourceUpdater, [{
6506 key: 'abort',
6507 value: function abort(done) {
6508 var _this2 = this;
6509
6510 if (this.processedAppend_) {
6511 this.queueCallback_(function () {
6512 _this2.sourceBuffer_.abort();
6513 }, done);
6514 }
6515 }
6516
6517 /**
6518 * Queue an update to append an ArrayBuffer.
6519 *
6520 * @param {ArrayBuffer} bytes
6521 * @param {Function} done the function to call when done
6522 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
6523 */
6524 }, {
6525 key: 'appendBuffer',
6526 value: function appendBuffer(bytes, done) {
6527 var _this3 = this;
6528
6529 this.processedAppend_ = true;
6530
6531 this.queueCallback_(function () {
6532 _this3.sourceBuffer_.appendBuffer(bytes);
6533 }, done);
6534 }
6535
6536 /**
6537 * Indicates what TimeRanges are buffered in the managed SourceBuffer.
6538 *
6539 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-buffered
6540 */
6541 }, {
6542 key: 'buffered',
6543 value: function buffered() {
6544 if (!this.sourceBuffer_) {
6545 return _videoJs2['default'].createTimeRanges();
6546 }
6547 return this.sourceBuffer_.buffered;
6548 }
6549
6550 /**
6551 * Queue an update to remove a time range from the buffer.
6552 *
6553 * @param {Number} start where to start the removal
6554 * @param {Number} end where to end the removal
6555 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
6556 */
6557 }, {
6558 key: 'remove',
6559 value: function remove(start, end) {
6560 var _this4 = this;
6561
6562 if (this.processedAppend_) {
6563 this.queueCallback_(function () {
6564 _this4.sourceBuffer_.remove(start, end);
6565 }, noop);
6566 }
6567 }
6568
6569 /**
6570 * Whether the underlying sourceBuffer is updating or not
6571 *
6572 * @return {Boolean} the updating status of the SourceBuffer
6573 */
6574 }, {
6575 key: 'updating',
6576 value: function updating() {
6577 return !this.sourceBuffer_ || this.sourceBuffer_.updating || this.pendingCallback_;
6578 }
6579
6580 /**
6581 * Set/get the timestampoffset on the SourceBuffer
6582 *
6583 * @return {Number} the timestamp offset
6584 */
6585 }, {
6586 key: 'timestampOffset',
6587 value: function timestampOffset(offset) {
6588 var _this5 = this;
6589
6590 if (typeof offset !== 'undefined') {
6591 this.queueCallback_(function () {
6592 _this5.sourceBuffer_.timestampOffset = offset;
6593 });
6594 this.timestampOffset_ = offset;
6595 }
6596 return this.timestampOffset_;
6597 }
6598
6599 /**
6600 * Queue a callback to run
6601 */
6602 }, {
6603 key: 'queueCallback_',
6604 value: function queueCallback_(callback, done) {
6605 this.callbacks_.push([callback.bind(this), done]);
6606 this.runCallback_();
6607 }
6608
6609 /**
6610 * Run a queued callback
6611 */
6612 }, {
6613 key: 'runCallback_',
6614 value: function runCallback_() {
6615 var callbacks = undefined;
6616
6617 if (!this.updating() && this.callbacks_.length) {
6618 callbacks = this.callbacks_.shift();
6619 this.pendingCallback_ = callbacks[1];
6620 callbacks[0]();
6621 }
6622 }
6623
6624 /**
6625 * dispose of the source updater and the underlying sourceBuffer
6626 */
6627 }, {
6628 key: 'dispose',
6629 value: function dispose() {
6630 this.sourceBuffer_.removeEventListener('updateend', this.onUpdateendCallback_);
6631 if (this.sourceBuffer_ && this.mediaSource.readyState === 'open') {
6632 this.sourceBuffer_.abort();
6633 }
6634 }
6635 }]);
6636
6637 return SourceUpdater;
6638})();
6639
6640exports['default'] = SourceUpdater;
6641module.exports = exports['default'];
6642}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
6643},{}],17:[function(require,module,exports){
6644(function (global){
6645/**
6646 * @file sync-controller.js
6647 */
6648
6649'use strict';
6650
6651Object.defineProperty(exports, '__esModule', {
6652 value: true
6653});
6654
6655var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
6656
6657var _get = function get(_x2, _x3, _x4) { var _again = true; _function: while (_again) { var object = _x2, property = _x3, receiver = _x4; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x2 = parent; _x3 = property; _x4 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
6658
6659function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
6660
6661function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
6662
6663function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
6664
6665var _muxJsLibMp4Probe = require('mux.js/lib/mp4/probe');
6666
6667var _muxJsLibMp4Probe2 = _interopRequireDefault(_muxJsLibMp4Probe);
6668
6669var _muxJsLibToolsTsInspectorJs = require('mux.js/lib/tools/ts-inspector.js');
6670
6671var _playlist = require('./playlist');
6672
6673var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
6674
6675var _videoJs2 = _interopRequireDefault(_videoJs);
6676
6677var syncPointStrategies = [
6678// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
6679// the equivalence display-time 0 === segment-index 0
6680{
6681 name: 'VOD',
6682 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
6683 if (duration !== Infinity) {
6684 var syncPoint = {
6685 time: 0,
6686 segmentIndex: 0
6687 };
6688
6689 return syncPoint;
6690 }
6691 return null;
6692 }
6693},
6694// Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
6695{
6696 name: 'ProgramDateTime',
6697 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
6698 if (syncController.datetimeToDisplayTime && playlist.dateTimeObject) {
6699 var playlistTime = playlist.dateTimeObject.getTime() / 1000;
6700 var playlistStart = playlistTime + syncController.datetimeToDisplayTime;
6701 var syncPoint = {
6702 time: playlistStart,
6703 segmentIndex: 0
6704 };
6705
6706 return syncPoint;
6707 }
6708 return null;
6709 }
6710},
6711// Stategy "Segment": We have a known time mapping for a timeline and a
6712// segment in the current timeline with timing data
6713{
6714 name: 'Segment',
6715 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
6716 var segments = playlist.segments || [];
6717 var syncPoint = null;
6718 var lastDistance = null;
6719
6720 currentTime = currentTime || 0;
6721
6722 for (var i = 0; i < segments.length; i++) {
6723 var segment = segments[i];
6724
6725 if (segment.timeline === currentTimeline && typeof segment.start !== 'undefined') {
6726 var distance = Math.abs(currentTime - segment.start);
6727
6728 // Once the distance begins to increase, we have passed
6729 // currentTime and can stop looking for better candidates
6730 if (lastDistance !== null && lastDistance < distance) {
6731 break;
6732 }
6733
6734 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
6735 lastDistance = distance;
6736 syncPoint = {
6737 time: segment.start,
6738 segmentIndex: i
6739 };
6740 }
6741 }
6742 }
6743 return syncPoint;
6744 }
6745},
6746// Stategy "Discontinuity": We have a discontinuity with a known
6747// display-time
6748{
6749 name: 'Discontinuity',
6750 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
6751 var syncPoint = null;
6752
6753 currentTime = currentTime || 0;
6754
6755 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
6756 var lastDistance = null;
6757
6758 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
6759 var segmentIndex = playlist.discontinuityStarts[i];
6760 var discontinuity = playlist.discontinuitySequence + i + 1;
6761 var discontinuitySync = syncController.discontinuities[discontinuity];
6762
6763 if (discontinuitySync) {
6764 var distance = Math.abs(currentTime - discontinuitySync.time);
6765
6766 // Once the distance begins to increase, we have passed
6767 // currentTime and can stop looking for better candidates
6768 if (lastDistance !== null && lastDistance < distance) {
6769 break;
6770 }
6771
6772 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
6773 lastDistance = distance;
6774 syncPoint = {
6775 time: discontinuitySync.time,
6776 segmentIndex: segmentIndex
6777 };
6778 }
6779 }
6780 }
6781 }
6782 return syncPoint;
6783 }
6784},
6785// Stategy "Playlist": We have a playlist with a known mapping of
6786// segment index to display time
6787{
6788 name: 'Playlist',
6789 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
6790 if (playlist.syncInfo) {
6791 var syncPoint = {
6792 time: playlist.syncInfo.time,
6793 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence
6794 };
6795
6796 return syncPoint;
6797 }
6798 return null;
6799 }
6800}];
6801
6802exports.syncPointStrategies = syncPointStrategies;
6803
6804var SyncController = (function (_videojs$EventTarget) {
6805 _inherits(SyncController, _videojs$EventTarget);
6806
6807 function SyncController() {
6808 var options = arguments.length <= 0 || arguments[0] === undefined ? {} : arguments[0];
6809
6810 _classCallCheck(this, SyncController);
6811
6812 _get(Object.getPrototypeOf(SyncController.prototype), 'constructor', this).call(this);
6813 // Segment Loader state variables...
6814 // ...for synching across variants
6815 this.inspectCache_ = undefined;
6816
6817 // ...for synching across variants
6818 this.timelines = [];
6819 this.discontinuities = [];
6820 this.datetimeToDisplayTime = null;
6821
6822 if (options.debug) {
6823 this.logger_ = _videoJs2['default'].log.bind(_videoJs2['default'], 'sync-controller ->');
6824 }
6825 }
6826
6827 /**
6828 * Find a sync-point for the playlist specified
6829 *
6830 * A sync-point is defined as a known mapping from display-time to
6831 * a segment-index in the current playlist.
6832 *
6833 * @param {Playlist} playlist
6834 * The playlist that needs a sync-point
6835 * @param {Number} duration
6836 * Duration of the MediaSource (Infinite if playing a live source)
6837 * @param {Number} currentTimeline
6838 * The last timeline from which a segment was loaded
6839 * @returns {Object}
6840 * A sync-point object
6841 */
6842
6843 _createClass(SyncController, [{
6844 key: 'getSyncPoint',
6845 value: function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
6846 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
6847
6848 if (!syncPoints.length) {
6849 // Signal that we need to attempt to get a sync-point manually
6850 // by fetching a segment in the playlist and constructing
6851 // a sync-point from that information
6852 return null;
6853 }
6854
6855 // Now find the sync-point that is closest to the currentTime because
6856 // that should result in the most accurate guess about which segment
6857 // to fetch
6858 return this.selectSyncPoint_(syncPoints, { key: 'time', value: currentTime });
6859 }
6860
6861 /**
6862 * Calculate the amount of time that has expired off the playlist during playback
6863 *
6864 * @param {Playlist} playlist
6865 * Playlist object to calculate expired from
6866 * @param {Number} duration
6867 * Duration of the MediaSource (Infinity if playling a live source)
6868 * @returns {Number|null}
6869 * The amount of time that has expired off the playlist during playback. Null
6870 * if no sync-points for the playlist can be found.
6871 */
6872 }, {
6873 key: 'getExpiredTime',
6874 value: function getExpiredTime(playlist, duration) {
6875 if (!playlist || !playlist.segments) {
6876 return null;
6877 }
6878
6879 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0);
6880
6881 // Without sync-points, there is not enough information to determine the expired time
6882 if (!syncPoints.length) {
6883 return null;
6884 }
6885
6886 var syncPoint = this.selectSyncPoint_(syncPoints, {
6887 key: 'segmentIndex',
6888 value: 0
6889 });
6890
6891 // If the sync-point is beyond the start of the playlist, we want to subtract the
6892 // duration from index 0 to syncPoint.segmentIndex instead of adding.
6893 if (syncPoint.segmentIndex > 0) {
6894 syncPoint.time *= -1;
6895 }
6896
6897 return Math.abs(syncPoint.time + (0, _playlist.sumDurations)(playlist, syncPoint.segmentIndex, 0));
6898 }
6899
6900 /**
6901 * Runs each sync-point strategy and returns a list of sync-points returned by the
6902 * strategies
6903 *
6904 * @private
6905 * @param {Playlist} playlist
6906 * The playlist that needs a sync-point
6907 * @param {Number} duration
6908 * Duration of the MediaSource (Infinity if playing a live source)
6909 * @param {Number} currentTimeline
6910 * The last timeline from which a segment was loaded
6911 * @returns {Array}
6912 * A list of sync-point objects
6913 */
6914 }, {
6915 key: 'runStrategies_',
6916 value: function runStrategies_(playlist, duration, currentTimeline, currentTime) {
6917 var syncPoints = [];
6918
6919 // Try to find a sync-point in by utilizing various strategies...
6920 for (var i = 0; i < syncPointStrategies.length; i++) {
6921 var strategy = syncPointStrategies[i];
6922 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
6923
6924 if (syncPoint) {
6925 syncPoint.strategy = strategy.name;
6926 syncPoints.push({
6927 strategy: strategy.name,
6928 syncPoint: syncPoint
6929 });
6930 this.logger_('syncPoint found via <' + strategy.name + '>:', syncPoint);
6931 }
6932 }
6933
6934 return syncPoints;
6935 }
6936
6937 /**
6938 * Selects the sync-point nearest the specified target
6939 *
6940 * @private
6941 * @param {Array} syncPoints
6942 * List of sync-points to select from
6943 * @param {Object} target
6944 * Object specifying the property and value we are targeting
6945 * @param {String} target.key
6946 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
6947 * @param {Number} target.value
6948 * The value to target for the specified key.
6949 * @returns {Object}
6950 * The sync-point nearest the target
6951 */
6952 }, {
6953 key: 'selectSyncPoint_',
6954 value: function selectSyncPoint_(syncPoints, target) {
6955 var bestSyncPoint = syncPoints[0].syncPoint;
6956 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
6957 var bestStrategy = syncPoints[0].strategy;
6958
6959 for (var i = 1; i < syncPoints.length; i++) {
6960 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
6961
6962 if (newDistance < bestDistance) {
6963 bestDistance = newDistance;
6964 bestSyncPoint = syncPoints[i].syncPoint;
6965 bestStrategy = syncPoints[i].strategy;
6966 }
6967 }
6968
6969 this.logger_('syncPoint with strategy <' + bestStrategy + '> chosen: ', bestSyncPoint);
6970 return bestSyncPoint;
6971 }
6972
6973 /**
6974 * Save any meta-data present on the segments when segments leave
6975 * the live window to the playlist to allow for synchronization at the
6976 * playlist level later.
6977 *
6978 * @param {Playlist} oldPlaylist - The previous active playlist
6979 * @param {Playlist} newPlaylist - The updated and most current playlist
6980 */
6981 }, {
6982 key: 'saveExpiredSegmentInfo',
6983 value: function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
6984 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
6985
6986 // When a segment expires from the playlist and it has a start time
6987 // save that information as a possible sync-point reference in future
6988 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
6989 var lastRemovedSegment = oldPlaylist.segments[i];
6990
6991 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
6992 newPlaylist.syncInfo = {
6993 mediaSequence: oldPlaylist.mediaSequence + i,
6994 time: lastRemovedSegment.start
6995 };
6996 this.logger_('playlist sync:', newPlaylist.syncInfo);
6997 this.trigger('syncinfoupdate');
6998 break;
6999 }
7000 }
7001 }
7002
7003 /**
7004 * Save the mapping from playlist's ProgramDateTime to display. This should
7005 * only ever happen once at the start of playback.
7006 *
7007 * @param {Playlist} playlist - The currently active playlist
7008 */
7009 }, {
7010 key: 'setDateTimeMapping',
7011 value: function setDateTimeMapping(playlist) {
7012 if (!this.datetimeToDisplayTime && playlist.dateTimeObject) {
7013 var playlistTimestamp = playlist.dateTimeObject.getTime() / 1000;
7014
7015 this.datetimeToDisplayTime = -playlistTimestamp;
7016 }
7017 }
7018
7019 /**
7020 * Reset the state of the inspection cache when we do a rendition
7021 * switch
7022 */
7023 }, {
7024 key: 'reset',
7025 value: function reset() {
7026 this.inspectCache_ = undefined;
7027 }
7028
7029 /**
7030 * Probe or inspect a fmp4 or an mpeg2-ts segment to determine the start
7031 * and end of the segment in it's internal "media time". Used to generate
7032 * mappings from that internal "media time" to the display time that is
7033 * shown on the player.
7034 *
7035 * @param {SegmentInfo} segmentInfo - The current active request information
7036 */
7037 }, {
7038 key: 'probeSegmentInfo',
7039 value: function probeSegmentInfo(segmentInfo) {
7040 var segment = segmentInfo.segment;
7041 var timingInfo = undefined;
7042
7043 if (segment.map) {
7044 timingInfo = this.probeMp4Segment_(segmentInfo);
7045 } else {
7046 timingInfo = this.probeTsSegment_(segmentInfo);
7047 }
7048
7049 if (timingInfo) {
7050 if (this.calculateSegmentTimeMapping_(segmentInfo, timingInfo)) {
7051 this.saveDiscontinuitySyncInfo_(segmentInfo);
7052 }
7053 }
7054 }
7055
7056 /**
7057 * Probe an fmp4 or an mpeg2-ts segment to determine the start of the segment
7058 * in it's internal "media time".
7059 *
7060 * @private
7061 * @param {SegmentInfo} segmentInfo - The current active request information
7062 * @return {object} The start and end time of the current segment in "media time"
7063 */
7064 }, {
7065 key: 'probeMp4Segment_',
7066 value: function probeMp4Segment_(segmentInfo) {
7067 var segment = segmentInfo.segment;
7068 var timescales = _muxJsLibMp4Probe2['default'].timescale(segment.map.bytes);
7069 var startTime = _muxJsLibMp4Probe2['default'].startTime(timescales, segmentInfo.bytes);
7070
7071 if (segmentInfo.timestampOffset !== null) {
7072 segmentInfo.timestampOffset -= startTime;
7073 }
7074
7075 return {
7076 start: startTime,
7077 end: startTime + segment.duration
7078 };
7079 }
7080
7081 /**
7082 * Probe an mpeg2-ts segment to determine the start and end of the segment
7083 * in it's internal "media time".
7084 *
7085 * @private
7086 * @param {SegmentInfo} segmentInfo - The current active request information
7087 * @return {object} The start and end time of the current segment in "media time"
7088 */
7089 }, {
7090 key: 'probeTsSegment_',
7091 value: function probeTsSegment_(segmentInfo) {
7092 var timeInfo = (0, _muxJsLibToolsTsInspectorJs.inspect)(segmentInfo.bytes, this.inspectCache_);
7093 var segmentStartTime = undefined;
7094 var segmentEndTime = undefined;
7095
7096 if (!timeInfo) {
7097 return null;
7098 }
7099
7100 if (timeInfo.video && timeInfo.video.length === 2) {
7101 this.inspectCache_ = timeInfo.video[1].dts;
7102 segmentStartTime = timeInfo.video[0].dtsTime;
7103 segmentEndTime = timeInfo.video[1].dtsTime;
7104 } else if (timeInfo.audio && timeInfo.audio.length === 2) {
7105 this.inspectCache_ = timeInfo.audio[1].dts;
7106 segmentStartTime = timeInfo.audio[0].dtsTime;
7107 segmentEndTime = timeInfo.audio[1].dtsTime;
7108 }
7109
7110 return {
7111 start: segmentStartTime,
7112 end: segmentEndTime
7113 };
7114 }
7115 }, {
7116 key: 'timestampOffsetForTimeline',
7117 value: function timestampOffsetForTimeline(timeline) {
7118 if (typeof this.timelines[timeline] === 'undefined') {
7119 return null;
7120 }
7121 return this.timelines[timeline].time;
7122 }
7123
7124 /**
7125 * Use the "media time" for a segment to generate a mapping to "display time" and
7126 * save that display time to the segment.
7127 *
7128 * @private
7129 * @param {SegmentInfo} segmentInfo
7130 * The current active request information
7131 * @param {object} timingInfo
7132 * The start and end time of the current segment in "media time"
7133 * @returns {Boolean}
7134 * Returns false if segment time mapping could not be calculated
7135 */
7136 }, {
7137 key: 'calculateSegmentTimeMapping_',
7138 value: function calculateSegmentTimeMapping_(segmentInfo, timingInfo) {
7139 var segment = segmentInfo.segment;
7140 var mappingObj = this.timelines[segmentInfo.timeline];
7141
7142 if (segmentInfo.timestampOffset !== null) {
7143 this.logger_('tsO:', segmentInfo.timestampOffset);
7144
7145 mappingObj = {
7146 time: segmentInfo.startOfSegment,
7147 mapping: segmentInfo.startOfSegment - timingInfo.start
7148 };
7149 this.timelines[segmentInfo.timeline] = mappingObj;
7150 this.trigger('timestampoffset');
7151
7152 segment.start = segmentInfo.startOfSegment;
7153 segment.end = timingInfo.end + mappingObj.mapping;
7154 } else if (mappingObj) {
7155 segment.start = timingInfo.start + mappingObj.mapping;
7156 segment.end = timingInfo.end + mappingObj.mapping;
7157 } else {
7158 return false;
7159 }
7160
7161 return true;
7162 }
7163
7164 /**
7165 * Each time we have discontinuity in the playlist, attempt to calculate the location
7166 * in display of the start of the discontinuity and save that. We also save an accuracy
7167 * value so that we save values with the most accuracy (closest to 0.)
7168 *
7169 * @private
7170 * @param {SegmentInfo} segmentInfo - The current active request information
7171 */
7172 }, {
7173 key: 'saveDiscontinuitySyncInfo_',
7174 value: function saveDiscontinuitySyncInfo_(segmentInfo) {
7175 var playlist = segmentInfo.playlist;
7176 var segment = segmentInfo.segment;
7177
7178 // If the current segment is a discontinuity then we know exactly where
7179 // the start of the range and it's accuracy is 0 (greater accuracy values
7180 // mean more approximation)
7181 if (segment.discontinuity) {
7182 this.discontinuities[segment.timeline] = {
7183 time: segment.start,
7184 accuracy: 0
7185 };
7186 } else if (playlist.discontinuityStarts.length) {
7187 // Search for future discontinuities that we can provide better timing
7188 // information for and save that information for sync purposes
7189 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
7190 var segmentIndex = playlist.discontinuityStarts[i];
7191 var discontinuity = playlist.discontinuitySequence + i + 1;
7192 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
7193 var accuracy = Math.abs(mediaIndexDiff);
7194
7195 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
7196 var time = undefined;
7197
7198 if (mediaIndexDiff < 0) {
7199 time = segment.start - (0, _playlist.sumDurations)(playlist, segmentInfo.mediaIndex, segmentIndex);
7200 } else {
7201 time = segment.end + (0, _playlist.sumDurations)(playlist, segmentInfo.mediaIndex + 1, segmentIndex);
7202 }
7203
7204 this.discontinuities[discontinuity] = {
7205 time: time,
7206 accuracy: accuracy
7207 };
7208 }
7209 }
7210 }
7211 }
7212
7213 /**
7214 * A debugging logger noop that is set to console.log only if debugging
7215 * is enabled globally
7216 *
7217 * @private
7218 */
7219 }, {
7220 key: 'logger_',
7221 value: function logger_() {}
7222 }]);
7223
7224 return SyncController;
7225})(_videoJs2['default'].EventTarget);
7226
7227exports['default'] = SyncController;
7228}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
7229},{"./playlist":10,"mux.js/lib/mp4/probe":55,"mux.js/lib/tools/ts-inspector.js":57}],18:[function(require,module,exports){
7230(function (global){
7231/**
7232 * @file vtt-segment-loader.js
7233 */
7234'use strict';
7235
7236Object.defineProperty(exports, '__esModule', {
7237 value: true
7238});
7239
7240var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
7241
7242var _get = function get(_x3, _x4, _x5) { var _again = true; _function: while (_again) { var object = _x3, property = _x4, receiver = _x5; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x3 = parent; _x4 = property; _x5 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
7243
7244function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
7245
7246function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
7247
7248function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
7249
7250var _segmentLoader = require('./segment-loader');
7251
7252var _segmentLoader2 = _interopRequireDefault(_segmentLoader);
7253
7254var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
7255
7256var _videoJs2 = _interopRequireDefault(_videoJs);
7257
7258var _globalWindow = require('global/window');
7259
7260var _globalWindow2 = _interopRequireDefault(_globalWindow);
7261
7262var _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs = require('videojs-contrib-media-sources/es5/remove-cues-from-track.js');
7263
7264var _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2 = _interopRequireDefault(_videojsContribMediaSourcesEs5RemoveCuesFromTrackJs);
7265
7266var _binUtils = require('./bin-utils');
7267
7268var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
7269 return char.charCodeAt(0);
7270}));
7271
7272var uintToString = function uintToString(uintArray) {
7273 return String.fromCharCode.apply(null, uintArray);
7274};
7275
7276/**
7277 * An object that manages segment loading and appending.
7278 *
7279 * @class VTTSegmentLoader
7280 * @param {Object} options required and optional options
7281 * @extends videojs.EventTarget
7282 */
7283
7284var VTTSegmentLoader = (function (_SegmentLoader) {
7285 _inherits(VTTSegmentLoader, _SegmentLoader);
7286
7287 function VTTSegmentLoader(settings) {
7288 var options = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
7289
7290 _classCallCheck(this, VTTSegmentLoader);
7291
7292 _get(Object.getPrototypeOf(VTTSegmentLoader.prototype), 'constructor', this).call(this, settings, options);
7293
7294 // SegmentLoader requires a MediaSource be specified or it will throw an error;
7295 // however, VTTSegmentLoader has no need of a media source, so delete the reference
7296 this.mediaSource_ = null;
7297
7298 this.subtitlesTrack_ = null;
7299 }
7300
7301 /**
7302 * Indicates which time ranges are buffered
7303 *
7304 * @return {TimeRange}
7305 * TimeRange object representing the current buffered ranges
7306 */
7307
7308 _createClass(VTTSegmentLoader, [{
7309 key: 'buffered_',
7310 value: function buffered_() {
7311 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues.length) {
7312 return _videoJs2['default'].createTimeRanges();
7313 }
7314
7315 var cues = this.subtitlesTrack_.cues;
7316 var start = cues[0].startTime;
7317 var end = cues[cues.length - 1].startTime;
7318
7319 return _videoJs2['default'].createTimeRanges([[start, end]]);
7320 }
7321
7322 /**
7323 * Gets and sets init segment for the provided map
7324 *
7325 * @param {Object} map
7326 * The map object representing the init segment to get or set
7327 * @param {Boolean=} set
7328 * If true, the init segment for the provided map should be saved
7329 * @return {Object}
7330 * map object for desired init segment
7331 */
7332 }, {
7333 key: 'initSegment',
7334 value: function initSegment(map) {
7335 var set = arguments.length <= 1 || arguments[1] === undefined ? false : arguments[1];
7336
7337 if (!map) {
7338 return null;
7339 }
7340
7341 var id = (0, _binUtils.initSegmentId)(map);
7342 var storedMap = this.initSegments_[id];
7343
7344 if (set && !storedMap && map.bytes) {
7345 // append WebVTT line terminators to the media initialization segment if it exists
7346 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
7347 // requires two or more WebVTT line terminators between the WebVTT header and the
7348 // rest of the file
7349 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
7350 var combinedSegment = new Uint8Array(combinedByteLength);
7351
7352 combinedSegment.set(map.bytes);
7353 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
7354
7355 this.initSegments_[id] = storedMap = {
7356 resolvedUri: map.resolvedUri,
7357 byterange: map.byterange,
7358 bytes: combinedSegment
7359 };
7360 }
7361
7362 return storedMap || map;
7363 }
7364
7365 /**
7366 * Returns true if all configuration required for loading is present, otherwise false.
7367 *
7368 * @return {Boolean} True if the all configuration is ready for loading
7369 * @private
7370 */
7371 }, {
7372 key: 'couldBeginLoading_',
7373 value: function couldBeginLoading_() {
7374 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
7375 }
7376
7377 /**
7378 * Once all the starting parameters have been specified, begin
7379 * operation. This method should only be invoked from the INIT
7380 * state.
7381 *
7382 * @private
7383 */
7384 }, {
7385 key: 'init_',
7386 value: function init_() {
7387 this.state = 'READY';
7388 this.resetEverything();
7389 return this.monitorBuffer_();
7390 }
7391
7392 /**
7393 * Set a subtitle track on the segment loader to add subtitles to
7394 *
7395 * @param {TextTrack} track
7396 * The text track to add loaded subtitles to
7397 */
7398 }, {
7399 key: 'track',
7400 value: function track(_track) {
7401 this.subtitlesTrack_ = _track;
7402
7403 // if we were unpaused but waiting for a sourceUpdater, start
7404 // buffering now
7405 if (this.state === 'INIT' && this.couldBeginLoading_()) {
7406 this.init_();
7407 }
7408 }
7409
7410 /**
7411 * Remove any data in the source buffer between start and end times
7412 * @param {Number} start - the start time of the region to remove from the buffer
7413 * @param {Number} end - the end time of the region to remove from the buffer
7414 */
7415 }, {
7416 key: 'remove',
7417 value: function remove(start, end) {
7418 (0, _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2['default'])(start, end, this.subtitlesTrack_);
7419 }
7420
7421 /**
7422 * fill the buffer with segements unless the sourceBuffers are
7423 * currently updating
7424 *
7425 * Note: this function should only ever be called by monitorBuffer_
7426 * and never directly
7427 *
7428 * @private
7429 */
7430 }, {
7431 key: 'fillBuffer_',
7432 value: function fillBuffer_() {
7433 var _this = this;
7434
7435 if (!this.syncPoint_) {
7436 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
7437 }
7438
7439 // see if we need to begin loading immediately
7440 var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
7441
7442 segmentInfo = this.skipEmptySegments_(segmentInfo);
7443
7444 if (!segmentInfo) {
7445 return;
7446 }
7447
7448 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
7449 // We don't have the timestamp offset that we need to sync subtitles.
7450 // Rerun on a timestamp offset or user interaction.
7451 var checkTimestampOffset = function checkTimestampOffset() {
7452 _this.state = 'READY';
7453 if (!_this.paused()) {
7454 // if not paused, queue a buffer check as soon as possible
7455 _this.monitorBuffer_();
7456 }
7457 };
7458
7459 this.syncController_.one('timestampoffset', checkTimestampOffset);
7460 this.state = 'WAITING_ON_TIMELINE';
7461 return;
7462 }
7463
7464 this.loadSegment_(segmentInfo);
7465 }
7466
7467 /**
7468 * Prevents the segment loader from requesting segments we know contain no subtitles
7469 * by walking forward until we find the next segment that we don't know whether it is
7470 * empty or not.
7471 *
7472 * @param {Object} segmentInfo
7473 * a segment info object that describes the current segment
7474 * @return {Object}
7475 * a segment info object that describes the current segment
7476 */
7477 }, {
7478 key: 'skipEmptySegments_',
7479 value: function skipEmptySegments_(segmentInfo) {
7480 while (segmentInfo && segmentInfo.segment.empty) {
7481 segmentInfo = this.generateSegmentInfo_(segmentInfo.playlist, segmentInfo.mediaIndex + 1, segmentInfo.startOfSegment + segmentInfo.duration, segmentInfo.isSyncRequest);
7482 }
7483 return segmentInfo;
7484 }
7485
7486 /**
7487 * append a decrypted segement to the SourceBuffer through a SourceUpdater
7488 *
7489 * @private
7490 */
7491 }, {
7492 key: 'handleSegment_',
7493 value: function handleSegment_() {
7494 var _this2 = this;
7495
7496 if (!this.pendingSegment_) {
7497 this.state = 'READY';
7498 return;
7499 }
7500
7501 this.state = 'APPENDING';
7502
7503 var segmentInfo = this.pendingSegment_;
7504 var segment = segmentInfo.segment;
7505
7506 // Make sure that vttjs has loaded, otherwise, wait till it finished loading
7507 if (typeof _globalWindow2['default'].WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
7508 var _ret = (function () {
7509
7510 var loadHandler = function loadHandler() {
7511 _this2.handleSegment_();
7512 };
7513
7514 _this2.state = 'WAITING_ON_VTTJS';
7515 _this2.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
7516 _this2.subtitlesTrack_.tech_.one('vttjserror', function () {
7517 _this2.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
7518 _this2.error({
7519 message: 'Error loading vtt.js'
7520 });
7521 _this2.state = 'READY';
7522 _this2.pause();
7523 _this2.trigger('error');
7524 });
7525
7526 return {
7527 v: undefined
7528 };
7529 })();
7530
7531 if (typeof _ret === 'object') return _ret.v;
7532 }
7533
7534 segment.requested = true;
7535
7536 try {
7537 this.parseVTTCues_(segmentInfo);
7538 } catch (e) {
7539 this.error({
7540 message: e.message
7541 });
7542 this.state = 'READY';
7543 this.pause();
7544 return this.trigger('error');
7545 }
7546
7547 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
7548
7549 if (segmentInfo.isSyncRequest) {
7550 this.trigger('syncinfoupdate');
7551 this.pendingSegment_ = null;
7552 this.state = 'READY';
7553 return;
7554 }
7555
7556 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
7557
7558 this.mediaSecondsLoaded += segment.duration;
7559
7560 segmentInfo.cues.forEach(function (cue) {
7561 _this2.subtitlesTrack_.addCue(cue);
7562 });
7563
7564 this.handleUpdateEnd_();
7565 }
7566
7567 /**
7568 * Uses the WebVTT parser to parse the segment response
7569 *
7570 * @param {Object} segmentInfo
7571 * a segment info object that describes the current segment
7572 * @private
7573 */
7574 }, {
7575 key: 'parseVTTCues_',
7576 value: function parseVTTCues_(segmentInfo) {
7577 var decoder = undefined;
7578 var decodeBytesToString = false;
7579
7580 if (typeof _globalWindow2['default'].TextDecoder === 'function') {
7581 decoder = new _globalWindow2['default'].TextDecoder('utf8');
7582 } else {
7583 decoder = _globalWindow2['default'].WebVTT.StringDecoder();
7584 decodeBytesToString = true;
7585 }
7586
7587 var parser = new _globalWindow2['default'].WebVTT.Parser(_globalWindow2['default'], _globalWindow2['default'].vttjs, decoder);
7588
7589 segmentInfo.cues = [];
7590 segmentInfo.timestampmap = { MPEGTS: 0, LOCAL: 0 };
7591
7592 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
7593 parser.ontimestampmap = function (map) {
7594 return segmentInfo.timestampmap = map;
7595 };
7596 parser.onparsingerror = function (error) {
7597 _videoJs2['default'].log.warn('Error encountered when parsing cues: ' + error.message);
7598 };
7599
7600 if (segmentInfo.segment.map) {
7601 var mapData = segmentInfo.segment.map.bytes;
7602
7603 if (decodeBytesToString) {
7604 mapData = uintToString(mapData);
7605 }
7606
7607 parser.parse(mapData);
7608 }
7609
7610 var segmentData = segmentInfo.bytes;
7611
7612 if (decodeBytesToString) {
7613 segmentData = uintToString(segmentData);
7614 }
7615
7616 parser.parse(segmentData);
7617 parser.flush();
7618 }
7619
7620 /**
7621 * Updates the start and end times of any cues parsed by the WebVTT parser using
7622 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
7623 * from the SyncController
7624 *
7625 * @param {Object} segmentInfo
7626 * a segment info object that describes the current segment
7627 * @param {Object} mappingObj
7628 * object containing a mapping from TS to media time
7629 * @param {Object} playlist
7630 * the playlist object containing the segment
7631 * @private
7632 */
7633 }, {
7634 key: 'updateTimeMapping_',
7635 value: function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
7636 var segment = segmentInfo.segment;
7637
7638 if (!mappingObj) {
7639 // If the sync controller does not have a mapping of TS to Media Time for the
7640 // timeline, then we don't have enough information to update the cue
7641 // start/end times
7642 return;
7643 }
7644
7645 if (!segmentInfo.cues.length) {
7646 // If there are no cues, we also do not have enough information to figure out
7647 // segment timing. Mark that the segment contains no cues so we don't re-request
7648 // an empty segment.
7649 segment.empty = true;
7650 return;
7651 }
7652
7653 var timestampmap = segmentInfo.timestampmap;
7654 var diff = timestampmap.MPEGTS / 90000 - timestampmap.LOCAL + mappingObj.mapping;
7655
7656 segmentInfo.cues.forEach(function (cue) {
7657 // First convert cue time to TS time using the timestamp-map provided within the vtt
7658 cue.startTime += diff;
7659 cue.endTime += diff;
7660 });
7661
7662 if (!playlist.syncInfo) {
7663 var firstStart = segmentInfo.cues[0].startTime;
7664 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
7665
7666 playlist.syncInfo = {
7667 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
7668 time: Math.min(firstStart, lastStart - segment.duration)
7669 };
7670 }
7671 }
7672 }]);
7673
7674 return VTTSegmentLoader;
7675})(_segmentLoader2['default']);
7676
7677exports['default'] = VTTSegmentLoader;
7678module.exports = exports['default'];
7679}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
7680},{"./bin-utils":2,"./segment-loader":15,"global/window":30,"videojs-contrib-media-sources/es5/remove-cues-from-track.js":71}],19:[function(require,module,exports){
7681(function (global){
7682/**
7683 * @file xhr.js
7684 */
7685
7686/**
7687 * A wrapper for videojs.xhr that tracks bandwidth.
7688 *
7689 * @param {Object} options options for the XHR
7690 * @param {Function} callback the callback to call when done
7691 * @return {Request} the xhr request that is going to be made
7692 */
7693'use strict';
7694
7695Object.defineProperty(exports, '__esModule', {
7696 value: true
7697});
7698
7699function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
7700
7701var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
7702
7703var _videoJs2 = _interopRequireDefault(_videoJs);
7704
7705var xhrFactory = function xhrFactory() {
7706 var xhr = function XhrFunction(options, callback) {
7707 // Add a default timeout for all hls requests
7708 options = (0, _videoJs.mergeOptions)({
7709 timeout: 45e3
7710 }, options);
7711
7712 // Allow an optional user-specified function to modify the option
7713 // object before we construct the xhr request
7714 var beforeRequest = XhrFunction.beforeRequest || _videoJs2['default'].Hls.xhr.beforeRequest;
7715
7716 if (beforeRequest && typeof beforeRequest === 'function') {
7717 var newOptions = beforeRequest(options);
7718
7719 if (newOptions) {
7720 options = newOptions;
7721 }
7722 }
7723
7724 var request = (0, _videoJs.xhr)(options, function (error, response) {
7725 var reqResponse = request.response;
7726
7727 if (!error && reqResponse) {
7728 request.responseTime = Date.now();
7729 request.roundTripTime = request.responseTime - request.requestTime;
7730 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
7731 if (!request.bandwidth) {
7732 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
7733 }
7734 }
7735
7736 // videojs.xhr now uses a specific code on the error
7737 // object to signal that a request has timed out instead
7738 // of setting a boolean on the request object
7739 if (error && error.code === 'ETIMEDOUT') {
7740 request.timedout = true;
7741 }
7742
7743 // videojs.xhr no longer considers status codes outside of 200 and 0
7744 // (for file uris) to be errors, but the old XHR did, so emulate that
7745 // behavior. Status 206 may be used in response to byterange requests.
7746 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
7747 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
7748 }
7749
7750 callback(error, request);
7751 });
7752 var originalAbort = request.abort;
7753
7754 request.abort = function () {
7755 request.aborted = true;
7756 return originalAbort.apply(request, arguments);
7757 };
7758 request.uri = options.uri;
7759 request.requestTime = Date.now();
7760 return request;
7761 };
7762
7763 return xhr;
7764};
7765
7766exports['default'] = xhrFactory;
7767module.exports = exports['default'];
7768}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
7769},{}],20:[function(require,module,exports){
7770/**
7771 * @file aes.js
7772 *
7773 * This file contains an adaptation of the AES decryption algorithm
7774 * from the Standford Javascript Cryptography Library. That work is
7775 * covered by the following copyright and permissions notice:
7776 *
7777 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
7778 * All rights reserved.
7779 *
7780 * Redistribution and use in source and binary forms, with or without
7781 * modification, are permitted provided that the following conditions are
7782 * met:
7783 *
7784 * 1. Redistributions of source code must retain the above copyright
7785 * notice, this list of conditions and the following disclaimer.
7786 *
7787 * 2. Redistributions in binary form must reproduce the above
7788 * copyright notice, this list of conditions and the following
7789 * disclaimer in the documentation and/or other materials provided
7790 * with the distribution.
7791 *
7792 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
7793 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
7794 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
7795 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
7796 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
7797 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
7798 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
7799 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
7800 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
7801 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
7802 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
7803 *
7804 * The views and conclusions contained in the software and documentation
7805 * are those of the authors and should not be interpreted as representing
7806 * official policies, either expressed or implied, of the authors.
7807 */
7808
7809/**
7810 * Expand the S-box tables.
7811 *
7812 * @private
7813 */
7814'use strict';
7815
7816Object.defineProperty(exports, '__esModule', {
7817 value: true
7818});
7819
7820var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
7821
7822function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
7823
7824var precompute = function precompute() {
7825 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
7826 var encTable = tables[0];
7827 var decTable = tables[1];
7828 var sbox = encTable[4];
7829 var sboxInv = decTable[4];
7830 var i = undefined;
7831 var x = undefined;
7832 var xInv = undefined;
7833 var d = [];
7834 var th = [];
7835 var x2 = undefined;
7836 var x4 = undefined;
7837 var x8 = undefined;
7838 var s = undefined;
7839 var tEnc = undefined;
7840 var tDec = undefined;
7841
7842 // Compute double and third tables
7843 for (i = 0; i < 256; i++) {
7844 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
7845 }
7846
7847 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
7848 // Compute sbox
7849 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
7850 s = s >> 8 ^ s & 255 ^ 99;
7851 sbox[x] = s;
7852 sboxInv[s] = x;
7853
7854 // Compute MixColumns
7855 x8 = d[x4 = d[x2 = d[x]]];
7856 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
7857 tEnc = d[s] * 0x101 ^ s * 0x1010100;
7858
7859 for (i = 0; i < 4; i++) {
7860 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
7861 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
7862 }
7863 }
7864
7865 // Compactify. Considerable speedup on Firefox.
7866 for (i = 0; i < 5; i++) {
7867 encTable[i] = encTable[i].slice(0);
7868 decTable[i] = decTable[i].slice(0);
7869 }
7870 return tables;
7871};
7872var aesTables = null;
7873
7874/**
7875 * Schedule out an AES key for both encryption and decryption. This
7876 * is a low-level class. Use a cipher mode to do bulk encryption.
7877 *
7878 * @class AES
7879 * @param key {Array} The key as an array of 4, 6 or 8 words.
7880 */
7881
7882var AES = (function () {
7883 function AES(key) {
7884 _classCallCheck(this, AES);
7885
7886 /**
7887 * The expanded S-box and inverse S-box tables. These will be computed
7888 * on the client so that we don't have to send them down the wire.
7889 *
7890 * There are two tables, _tables[0] is for encryption and
7891 * _tables[1] is for decryption.
7892 *
7893 * The first 4 sub-tables are the expanded S-box with MixColumns. The
7894 * last (_tables[01][4]) is the S-box itself.
7895 *
7896 * @private
7897 */
7898 // if we have yet to precompute the S-box tables
7899 // do so now
7900 if (!aesTables) {
7901 aesTables = precompute();
7902 }
7903 // then make a copy of that object for use
7904 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
7905 var i = undefined;
7906 var j = undefined;
7907 var tmp = undefined;
7908 var encKey = undefined;
7909 var decKey = undefined;
7910 var sbox = this._tables[0][4];
7911 var decTable = this._tables[1];
7912 var keyLen = key.length;
7913 var rcon = 1;
7914
7915 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
7916 throw new Error('Invalid aes key size');
7917 }
7918
7919 encKey = key.slice(0);
7920 decKey = [];
7921 this._key = [encKey, decKey];
7922
7923 // schedule encryption keys
7924 for (i = keyLen; i < 4 * keyLen + 28; i++) {
7925 tmp = encKey[i - 1];
7926
7927 // apply sbox
7928 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
7929 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255];
7930
7931 // shift rows and add rcon
7932 if (i % keyLen === 0) {
7933 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
7934 rcon = rcon << 1 ^ (rcon >> 7) * 283;
7935 }
7936 }
7937
7938 encKey[i] = encKey[i - keyLen] ^ tmp;
7939 }
7940
7941 // schedule decryption keys
7942 for (j = 0; i; j++, i--) {
7943 tmp = encKey[j & 3 ? i : i - 4];
7944 if (i <= 4 || j < 4) {
7945 decKey[j] = tmp;
7946 } else {
7947 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
7948 }
7949 }
7950 }
7951
7952 /**
7953 * Decrypt 16 bytes, specified as four 32-bit words.
7954 *
7955 * @param {Number} encrypted0 the first word to decrypt
7956 * @param {Number} encrypted1 the second word to decrypt
7957 * @param {Number} encrypted2 the third word to decrypt
7958 * @param {Number} encrypted3 the fourth word to decrypt
7959 * @param {Int32Array} out the array to write the decrypted words
7960 * into
7961 * @param {Number} offset the offset into the output array to start
7962 * writing results
7963 * @return {Array} The plaintext.
7964 */
7965
7966 _createClass(AES, [{
7967 key: 'decrypt',
7968 value: function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
7969 var key = this._key[1];
7970 // state variables a,b,c,d are loaded with pre-whitened data
7971 var a = encrypted0 ^ key[0];
7972 var b = encrypted3 ^ key[1];
7973 var c = encrypted2 ^ key[2];
7974 var d = encrypted1 ^ key[3];
7975 var a2 = undefined;
7976 var b2 = undefined;
7977 var c2 = undefined;
7978
7979 // key.length === 2 ?
7980 var nInnerRounds = key.length / 4 - 2;
7981 var i = undefined;
7982 var kIndex = 4;
7983 var table = this._tables[1];
7984
7985 // load up the tables
7986 var table0 = table[0];
7987 var table1 = table[1];
7988 var table2 = table[2];
7989 var table3 = table[3];
7990 var sbox = table[4];
7991
7992 // Inner rounds. Cribbed from OpenSSL.
7993 for (i = 0; i < nInnerRounds; i++) {
7994 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
7995 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
7996 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
7997 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
7998 kIndex += 4;
7999 a = a2;b = b2;c = c2;
8000 }
8001
8002 // Last round.
8003 for (i = 0; i < 4; i++) {
8004 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
8005 a2 = a;a = b;b = c;c = d;d = a2;
8006 }
8007 }
8008 }]);
8009
8010 return AES;
8011})();
8012
8013exports['default'] = AES;
8014module.exports = exports['default'];
8015},{}],21:[function(require,module,exports){
8016/**
8017 * @file async-stream.js
8018 */
8019'use strict';
8020
8021Object.defineProperty(exports, '__esModule', {
8022 value: true
8023});
8024
8025var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
8026
8027var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
8028
8029function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
8030
8031function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
8032
8033function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
8034
8035var _stream = require('./stream');
8036
8037var _stream2 = _interopRequireDefault(_stream);
8038
8039/**
8040 * A wrapper around the Stream class to use setTiemout
8041 * and run stream "jobs" Asynchronously
8042 *
8043 * @class AsyncStream
8044 * @extends Stream
8045 */
8046
8047var AsyncStream = (function (_Stream) {
8048 _inherits(AsyncStream, _Stream);
8049
8050 function AsyncStream() {
8051 _classCallCheck(this, AsyncStream);
8052
8053 _get(Object.getPrototypeOf(AsyncStream.prototype), 'constructor', this).call(this, _stream2['default']);
8054 this.jobs = [];
8055 this.delay = 1;
8056 this.timeout_ = null;
8057 }
8058
8059 /**
8060 * process an async job
8061 *
8062 * @private
8063 */
8064
8065 _createClass(AsyncStream, [{
8066 key: 'processJob_',
8067 value: function processJob_() {
8068 this.jobs.shift()();
8069 if (this.jobs.length) {
8070 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
8071 } else {
8072 this.timeout_ = null;
8073 }
8074 }
8075
8076 /**
8077 * push a job into the stream
8078 *
8079 * @param {Function} job the job to push into the stream
8080 */
8081 }, {
8082 key: 'push',
8083 value: function push(job) {
8084 this.jobs.push(job);
8085 if (!this.timeout_) {
8086 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
8087 }
8088 }
8089 }]);
8090
8091 return AsyncStream;
8092})(_stream2['default']);
8093
8094exports['default'] = AsyncStream;
8095module.exports = exports['default'];
8096},{"./stream":24}],22:[function(require,module,exports){
8097/**
8098 * @file decrypter.js
8099 *
8100 * An asynchronous implementation of AES-128 CBC decryption with
8101 * PKCS#7 padding.
8102 */
8103
8104'use strict';
8105
8106Object.defineProperty(exports, '__esModule', {
8107 value: true
8108});
8109
8110var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
8111
8112function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
8113
8114function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
8115
8116var _aes = require('./aes');
8117
8118var _aes2 = _interopRequireDefault(_aes);
8119
8120var _asyncStream = require('./async-stream');
8121
8122var _asyncStream2 = _interopRequireDefault(_asyncStream);
8123
8124var _pkcs7 = require('pkcs7');
8125
8126/**
8127 * Convert network-order (big-endian) bytes into their little-endian
8128 * representation.
8129 */
8130var ntoh = function ntoh(word) {
8131 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
8132};
8133
8134/**
8135 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
8136 *
8137 * @param {Uint8Array} encrypted the encrypted bytes
8138 * @param {Uint32Array} key the bytes of the decryption key
8139 * @param {Uint32Array} initVector the initialization vector (IV) to
8140 * use for the first round of CBC.
8141 * @return {Uint8Array} the decrypted bytes
8142 *
8143 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
8144 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
8145 * @see https://tools.ietf.org/html/rfc2315
8146 */
8147var decrypt = function decrypt(encrypted, key, initVector) {
8148 // word-level access to the encrypted bytes
8149 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
8150
8151 var decipher = new _aes2['default'](Array.prototype.slice.call(key));
8152
8153 // byte and word-level access for the decrypted output
8154 var decrypted = new Uint8Array(encrypted.byteLength);
8155 var decrypted32 = new Int32Array(decrypted.buffer);
8156
8157 // temporary variables for working with the IV, encrypted, and
8158 // decrypted data
8159 var init0 = undefined;
8160 var init1 = undefined;
8161 var init2 = undefined;
8162 var init3 = undefined;
8163 var encrypted0 = undefined;
8164 var encrypted1 = undefined;
8165 var encrypted2 = undefined;
8166 var encrypted3 = undefined;
8167
8168 // iteration variable
8169 var wordIx = undefined;
8170
8171 // pull out the words of the IV to ensure we don't modify the
8172 // passed-in reference and easier access
8173 init0 = initVector[0];
8174 init1 = initVector[1];
8175 init2 = initVector[2];
8176 init3 = initVector[3];
8177
8178 // decrypt four word sequences, applying cipher-block chaining (CBC)
8179 // to each decrypted block
8180 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
8181 // convert big-endian (network order) words into little-endian
8182 // (javascript order)
8183 encrypted0 = ntoh(encrypted32[wordIx]);
8184 encrypted1 = ntoh(encrypted32[wordIx + 1]);
8185 encrypted2 = ntoh(encrypted32[wordIx + 2]);
8186 encrypted3 = ntoh(encrypted32[wordIx + 3]);
8187
8188 // decrypt the block
8189 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx);
8190
8191 // XOR with the IV, and restore network byte-order to obtain the
8192 // plaintext
8193 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
8194 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
8195 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
8196 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3);
8197
8198 // setup the IV for the next round
8199 init0 = encrypted0;
8200 init1 = encrypted1;
8201 init2 = encrypted2;
8202 init3 = encrypted3;
8203 }
8204
8205 return decrypted;
8206};
8207
8208exports.decrypt = decrypt;
8209/**
8210 * The `Decrypter` class that manages decryption of AES
8211 * data through `AsyncStream` objects and the `decrypt`
8212 * function
8213 *
8214 * @param {Uint8Array} encrypted the encrypted bytes
8215 * @param {Uint32Array} key the bytes of the decryption key
8216 * @param {Uint32Array} initVector the initialization vector (IV) to
8217 * @param {Function} done the function to run when done
8218 * @class Decrypter
8219 */
8220
8221var Decrypter = (function () {
8222 function Decrypter(encrypted, key, initVector, done) {
8223 _classCallCheck(this, Decrypter);
8224
8225 var step = Decrypter.STEP;
8226 var encrypted32 = new Int32Array(encrypted.buffer);
8227 var decrypted = new Uint8Array(encrypted.byteLength);
8228 var i = 0;
8229
8230 this.asyncStream_ = new _asyncStream2['default']();
8231
8232 // split up the encryption job and do the individual chunks asynchronously
8233 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
8234 for (i = step; i < encrypted32.length; i += step) {
8235 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
8236 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
8237 }
8238 // invoke the done() callback when everything is finished
8239 this.asyncStream_.push(function () {
8240 // remove pkcs#7 padding from the decrypted bytes
8241 done(null, (0, _pkcs7.unpad)(decrypted));
8242 });
8243 }
8244
8245 /**
8246 * a getter for step the maximum number of bytes to process at one time
8247 *
8248 * @return {Number} the value of step 32000
8249 */
8250
8251 _createClass(Decrypter, [{
8252 key: 'decryptChunk_',
8253
8254 /**
8255 * @private
8256 */
8257 value: function decryptChunk_(encrypted, key, initVector, decrypted) {
8258 return function () {
8259 var bytes = decrypt(encrypted, key, initVector);
8260
8261 decrypted.set(bytes, encrypted.byteOffset);
8262 };
8263 }
8264 }], [{
8265 key: 'STEP',
8266 get: function get() {
8267 // 4 * 8000;
8268 return 32000;
8269 }
8270 }]);
8271
8272 return Decrypter;
8273})();
8274
8275exports.Decrypter = Decrypter;
8276exports['default'] = {
8277 Decrypter: Decrypter,
8278 decrypt: decrypt
8279};
8280},{"./aes":20,"./async-stream":21,"pkcs7":26}],23:[function(require,module,exports){
8281/**
8282 * @file index.js
8283 *
8284 * Index module to easily import the primary components of AES-128
8285 * decryption. Like this:
8286 *
8287 * ```js
8288 * import {Decrypter, decrypt, AsyncStream} from 'aes-decrypter';
8289 * ```
8290 */
8291'use strict';
8292
8293Object.defineProperty(exports, '__esModule', {
8294 value: true
8295});
8296
8297function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
8298
8299var _decrypter = require('./decrypter');
8300
8301var _asyncStream = require('./async-stream');
8302
8303var _asyncStream2 = _interopRequireDefault(_asyncStream);
8304
8305exports['default'] = {
8306 decrypt: _decrypter.decrypt,
8307 Decrypter: _decrypter.Decrypter,
8308 AsyncStream: _asyncStream2['default']
8309};
8310module.exports = exports['default'];
8311},{"./async-stream":21,"./decrypter":22}],24:[function(require,module,exports){
8312/**
8313 * @file stream.js
8314 */
8315/**
8316 * A lightweight readable stream implemention that handles event dispatching.
8317 *
8318 * @class Stream
8319 */
8320'use strict';
8321
8322Object.defineProperty(exports, '__esModule', {
8323 value: true
8324});
8325
8326var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
8327
8328function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
8329
8330var Stream = (function () {
8331 function Stream() {
8332 _classCallCheck(this, Stream);
8333
8334 this.listeners = {};
8335 }
8336
8337 /**
8338 * Add a listener for a specified event type.
8339 *
8340 * @param {String} type the event name
8341 * @param {Function} listener the callback to be invoked when an event of
8342 * the specified type occurs
8343 */
8344
8345 _createClass(Stream, [{
8346 key: 'on',
8347 value: function on(type, listener) {
8348 if (!this.listeners[type]) {
8349 this.listeners[type] = [];
8350 }
8351 this.listeners[type].push(listener);
8352 }
8353
8354 /**
8355 * Remove a listener for a specified event type.
8356 *
8357 * @param {String} type the event name
8358 * @param {Function} listener a function previously registered for this
8359 * type of event through `on`
8360 * @return {Boolean} if we could turn it off or not
8361 */
8362 }, {
8363 key: 'off',
8364 value: function off(type, listener) {
8365 var index = undefined;
8366
8367 if (!this.listeners[type]) {
8368 return false;
8369 }
8370 index = this.listeners[type].indexOf(listener);
8371 this.listeners[type].splice(index, 1);
8372 return index > -1;
8373 }
8374
8375 /**
8376 * Trigger an event of the specified type on this stream. Any additional
8377 * arguments to this function are passed as parameters to event listeners.
8378 *
8379 * @param {String} type the event name
8380 */
8381 }, {
8382 key: 'trigger',
8383 value: function trigger(type) {
8384 var callbacks = undefined;
8385 var i = undefined;
8386 var length = undefined;
8387 var args = undefined;
8388
8389 callbacks = this.listeners[type];
8390 if (!callbacks) {
8391 return;
8392 }
8393 // Slicing the arguments on every invocation of this method
8394 // can add a significant amount of overhead. Avoid the
8395 // intermediate object creation for the common case of a
8396 // single callback argument
8397 if (arguments.length === 2) {
8398 length = callbacks.length;
8399 for (i = 0; i < length; ++i) {
8400 callbacks[i].call(this, arguments[1]);
8401 }
8402 } else {
8403 args = Array.prototype.slice.call(arguments, 1);
8404 length = callbacks.length;
8405 for (i = 0; i < length; ++i) {
8406 callbacks[i].apply(this, args);
8407 }
8408 }
8409 }
8410
8411 /**
8412 * Destroys the stream and cleans up.
8413 */
8414 }, {
8415 key: 'dispose',
8416 value: function dispose() {
8417 this.listeners = {};
8418 }
8419
8420 /**
8421 * Forwards all `data` events on this stream to the destination stream. The
8422 * destination stream should provide a method `push` to receive the data
8423 * events as they arrive.
8424 *
8425 * @param {Stream} destination the stream that will receive all `data` events
8426 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
8427 */
8428 }, {
8429 key: 'pipe',
8430 value: function pipe(destination) {
8431 this.on('data', function (data) {
8432 destination.push(data);
8433 });
8434 }
8435 }]);
8436
8437 return Stream;
8438})();
8439
8440exports['default'] = Stream;
8441module.exports = exports['default'];
8442},{}],25:[function(require,module,exports){
8443/*
8444 * pkcs7.pad
8445 * https://github.com/brightcove/pkcs7
8446 *
8447 * Copyright (c) 2014 Brightcove
8448 * Licensed under the apache2 license.
8449 */
8450
8451'use strict';
8452
8453var PADDING;
8454
8455/**
8456 * Returns a new Uint8Array that is padded with PKCS#7 padding.
8457 * @param plaintext {Uint8Array} the input bytes before encryption
8458 * @return {Uint8Array} the padded bytes
8459 * @see http://tools.ietf.org/html/rfc5652
8460 */
8461module.exports = function pad(plaintext) {
8462 var padding = PADDING[(plaintext.byteLength % 16) || 0],
8463 result = new Uint8Array(plaintext.byteLength + padding.length);
8464 result.set(plaintext);
8465 result.set(padding, plaintext.byteLength);
8466 return result;
8467};
8468
8469// pre-define the padding values
8470PADDING = [
8471 [16, 16, 16, 16,
8472 16, 16, 16, 16,
8473 16, 16, 16, 16,
8474 16, 16, 16, 16],
8475
8476 [15, 15, 15, 15,
8477 15, 15, 15, 15,
8478 15, 15, 15, 15,
8479 15, 15, 15],
8480
8481 [14, 14, 14, 14,
8482 14, 14, 14, 14,
8483 14, 14, 14, 14,
8484 14, 14],
8485
8486 [13, 13, 13, 13,
8487 13, 13, 13, 13,
8488 13, 13, 13, 13,
8489 13],
8490
8491 [12, 12, 12, 12,
8492 12, 12, 12, 12,
8493 12, 12, 12, 12],
8494
8495 [11, 11, 11, 11,
8496 11, 11, 11, 11,
8497 11, 11, 11],
8498
8499 [10, 10, 10, 10,
8500 10, 10, 10, 10,
8501 10, 10],
8502
8503 [9, 9, 9, 9,
8504 9, 9, 9, 9,
8505 9],
8506
8507 [8, 8, 8, 8,
8508 8, 8, 8, 8],
8509
8510 [7, 7, 7, 7,
8511 7, 7, 7],
8512
8513 [6, 6, 6, 6,
8514 6, 6],
8515
8516 [5, 5, 5, 5,
8517 5],
8518
8519 [4, 4, 4, 4],
8520
8521 [3, 3, 3],
8522
8523 [2, 2],
8524
8525 [1]
8526];
8527
8528},{}],26:[function(require,module,exports){
8529/*
8530 * pkcs7
8531 * https://github.com/brightcove/pkcs7
8532 *
8533 * Copyright (c) 2014 Brightcove
8534 * Licensed under the apache2 license.
8535 */
8536
8537'use strict';
8538
8539exports.pad = require('./pad.js');
8540exports.unpad = require('./unpad.js');
8541
8542},{"./pad.js":25,"./unpad.js":27}],27:[function(require,module,exports){
8543/*
8544 * pkcs7.unpad
8545 * https://github.com/brightcove/pkcs7
8546 *
8547 * Copyright (c) 2014 Brightcove
8548 * Licensed under the apache2 license.
8549 */
8550
8551'use strict';
8552
8553/**
8554 * Returns the subarray of a Uint8Array without PKCS#7 padding.
8555 * @param padded {Uint8Array} unencrypted bytes that have been padded
8556 * @return {Uint8Array} the unpadded bytes
8557 * @see http://tools.ietf.org/html/rfc5652
8558 */
8559module.exports = function unpad(padded) {
8560 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
8561};
8562
8563},{}],28:[function(require,module,exports){
8564
8565},{}],29:[function(require,module,exports){
8566(function (global){
8567var topLevel = typeof global !== 'undefined' ? global :
8568 typeof window !== 'undefined' ? window : {}
8569var minDoc = require('min-document');
8570
8571var doccy;
8572
8573if (typeof document !== 'undefined') {
8574 doccy = document;
8575} else {
8576 doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'];
8577
8578 if (!doccy) {
8579 doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'] = minDoc;
8580 }
8581}
8582
8583module.exports = doccy;
8584
8585}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
8586},{"min-document":28}],30:[function(require,module,exports){
8587(function (global){
8588var win;
8589
8590if (typeof window !== "undefined") {
8591 win = window;
8592} else if (typeof global !== "undefined") {
8593 win = global;
8594} else if (typeof self !== "undefined"){
8595 win = self;
8596} else {
8597 win = {};
8598}
8599
8600module.exports = win;
8601
8602}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
8603},{}],31:[function(require,module,exports){
8604'use strict';
8605
8606var _lineStream = require('./line-stream');
8607
8608var _lineStream2 = _interopRequireDefault(_lineStream);
8609
8610var _parseStream = require('./parse-stream');
8611
8612var _parseStream2 = _interopRequireDefault(_parseStream);
8613
8614var _parser = require('./parser');
8615
8616var _parser2 = _interopRequireDefault(_parser);
8617
8618function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
8619
8620module.exports = {
8621 LineStream: _lineStream2['default'],
8622 ParseStream: _parseStream2['default'],
8623 Parser: _parser2['default']
8624}; /**
8625 * @file m3u8/index.js
8626 *
8627 * Utilities for parsing M3U8 files. If the entire manifest is available,
8628 * `Parser` will create an object representation with enough detail for managing
8629 * playback. `ParseStream` and `LineStream` are lower-level parsing primitives
8630 * that do not assume the entirety of the manifest is ready and expose a
8631 * ReadableStream-like interface.
8632 */
8633},{"./line-stream":32,"./parse-stream":33,"./parser":34}],32:[function(require,module,exports){
8634'use strict';
8635
8636Object.defineProperty(exports, "__esModule", {
8637 value: true
8638});
8639
8640var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
8641
8642var _stream = require('./stream');
8643
8644var _stream2 = _interopRequireDefault(_stream);
8645
8646function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
8647
8648function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
8649
8650function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
8651
8652function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /**
8653 * @file m3u8/line-stream.js
8654 */
8655
8656
8657/**
8658 * A stream that buffers string input and generates a `data` event for each
8659 * line.
8660 *
8661 * @class LineStream
8662 * @extends Stream
8663 */
8664var LineStream = function (_Stream) {
8665 _inherits(LineStream, _Stream);
8666
8667 function LineStream() {
8668 _classCallCheck(this, LineStream);
8669
8670 var _this = _possibleConstructorReturn(this, (LineStream.__proto__ || Object.getPrototypeOf(LineStream)).call(this));
8671
8672 _this.buffer = '';
8673 return _this;
8674 }
8675
8676 /**
8677 * Add new data to be parsed.
8678 *
8679 * @param {String} data the text to process
8680 */
8681
8682
8683 _createClass(LineStream, [{
8684 key: 'push',
8685 value: function push(data) {
8686 var nextNewline = void 0;
8687
8688 this.buffer += data;
8689 nextNewline = this.buffer.indexOf('\n');
8690
8691 for (; nextNewline > -1; nextNewline = this.buffer.indexOf('\n')) {
8692 this.trigger('data', this.buffer.substring(0, nextNewline));
8693 this.buffer = this.buffer.substring(nextNewline + 1);
8694 }
8695 }
8696 }]);
8697
8698 return LineStream;
8699}(_stream2['default']);
8700
8701exports['default'] = LineStream;
8702},{"./stream":35}],33:[function(require,module,exports){
8703'use strict';
8704
8705Object.defineProperty(exports, "__esModule", {
8706 value: true
8707});
8708
8709var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }();
8710
8711var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
8712
8713var _stream = require('./stream');
8714
8715var _stream2 = _interopRequireDefault(_stream);
8716
8717function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
8718
8719function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
8720
8721function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
8722
8723function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /**
8724 * @file m3u8/parse-stream.js
8725 */
8726
8727
8728/**
8729 * "forgiving" attribute list psuedo-grammar:
8730 * attributes -> keyvalue (',' keyvalue)*
8731 * keyvalue -> key '=' value
8732 * key -> [^=]*
8733 * value -> '"' [^"]* '"' | [^,]*
8734 */
8735var attributeSeparator = function attributeSeparator() {
8736 var key = '[^=]*';
8737 var value = '"[^"]*"|[^,]*';
8738 var keyvalue = '(?:' + key + ')=(?:' + value + ')';
8739
8740 return new RegExp('(?:^|,)(' + keyvalue + ')');
8741};
8742
8743/**
8744 * Parse attributes from a line given the seperator
8745 *
8746 * @param {String} attributes the attibute line to parse
8747 */
8748var parseAttributes = function parseAttributes(attributes) {
8749 // split the string using attributes as the separator
8750 var attrs = attributes.split(attributeSeparator());
8751 var result = {};
8752 var i = attrs.length;
8753 var attr = void 0;
8754
8755 while (i--) {
8756 // filter out unmatched portions of the string
8757 if (attrs[i] === '') {
8758 continue;
8759 }
8760
8761 // split the key and value
8762 attr = /([^=]*)=(.*)/.exec(attrs[i]).slice(1);
8763 // trim whitespace and remove optional quotes around the value
8764 attr[0] = attr[0].replace(/^\s+|\s+$/g, '');
8765 attr[1] = attr[1].replace(/^\s+|\s+$/g, '');
8766 attr[1] = attr[1].replace(/^['"](.*)['"]$/g, '$1');
8767 result[attr[0]] = attr[1];
8768 }
8769 return result;
8770};
8771
8772/**
8773 * A line-level M3U8 parser event stream. It expects to receive input one
8774 * line at a time and performs a context-free parse of its contents. A stream
8775 * interpretation of a manifest can be useful if the manifest is expected to
8776 * be too large to fit comfortably into memory or the entirety of the input
8777 * is not immediately available. Otherwise, it's probably much easier to work
8778 * with a regular `Parser` object.
8779 *
8780 * Produces `data` events with an object that captures the parser's
8781 * interpretation of the input. That object has a property `tag` that is one
8782 * of `uri`, `comment`, or `tag`. URIs only have a single additional
8783 * property, `line`, which captures the entirety of the input without
8784 * interpretation. Comments similarly have a single additional property
8785 * `text` which is the input without the leading `#`.
8786 *
8787 * Tags always have a property `tagType` which is the lower-cased version of
8788 * the M3U8 directive without the `#EXT` or `#EXT-X-` prefix. For instance,
8789 * `#EXT-X-MEDIA-SEQUENCE` becomes `media-sequence` when parsed. Unrecognized
8790 * tags are given the tag type `unknown` and a single additional property
8791 * `data` with the remainder of the input.
8792 *
8793 * @class ParseStream
8794 * @extends Stream
8795 */
8796
8797var ParseStream = function (_Stream) {
8798 _inherits(ParseStream, _Stream);
8799
8800 function ParseStream() {
8801 _classCallCheck(this, ParseStream);
8802
8803 return _possibleConstructorReturn(this, (ParseStream.__proto__ || Object.getPrototypeOf(ParseStream)).call(this));
8804 }
8805
8806 /**
8807 * Parses an additional line of input.
8808 *
8809 * @param {String} line a single line of an M3U8 file to parse
8810 */
8811
8812
8813 _createClass(ParseStream, [{
8814 key: 'push',
8815 value: function push(line) {
8816 var match = void 0;
8817 var event = void 0;
8818
8819 // strip whitespace
8820 line = line.replace(/^[\u0000\s]+|[\u0000\s]+$/g, '');
8821 if (line.length === 0) {
8822 // ignore empty lines
8823 return;
8824 }
8825
8826 // URIs
8827 if (line[0] !== '#') {
8828 this.trigger('data', {
8829 type: 'uri',
8830 uri: line
8831 });
8832 return;
8833 }
8834
8835 // Comments
8836 if (line.indexOf('#EXT') !== 0) {
8837 this.trigger('data', {
8838 type: 'comment',
8839 text: line.slice(1)
8840 });
8841 return;
8842 }
8843
8844 // strip off any carriage returns here so the regex matching
8845 // doesn't have to account for them.
8846 line = line.replace('\r', '');
8847
8848 // Tags
8849 match = /^#EXTM3U/.exec(line);
8850 if (match) {
8851 this.trigger('data', {
8852 type: 'tag',
8853 tagType: 'm3u'
8854 });
8855 return;
8856 }
8857 match = /^#EXTINF:?([0-9\.]*)?,?(.*)?$/.exec(line);
8858 if (match) {
8859 event = {
8860 type: 'tag',
8861 tagType: 'inf'
8862 };
8863 if (match[1]) {
8864 event.duration = parseFloat(match[1]);
8865 }
8866 if (match[2]) {
8867 event.title = match[2];
8868 }
8869 this.trigger('data', event);
8870 return;
8871 }
8872 match = /^#EXT-X-TARGETDURATION:?([0-9.]*)?/.exec(line);
8873 if (match) {
8874 event = {
8875 type: 'tag',
8876 tagType: 'targetduration'
8877 };
8878 if (match[1]) {
8879 event.duration = parseInt(match[1], 10);
8880 }
8881 this.trigger('data', event);
8882 return;
8883 }
8884 match = /^#ZEN-TOTAL-DURATION:?([0-9.]*)?/.exec(line);
8885 if (match) {
8886 event = {
8887 type: 'tag',
8888 tagType: 'totalduration'
8889 };
8890 if (match[1]) {
8891 event.duration = parseInt(match[1], 10);
8892 }
8893 this.trigger('data', event);
8894 return;
8895 }
8896 match = /^#EXT-X-VERSION:?([0-9.]*)?/.exec(line);
8897 if (match) {
8898 event = {
8899 type: 'tag',
8900 tagType: 'version'
8901 };
8902 if (match[1]) {
8903 event.version = parseInt(match[1], 10);
8904 }
8905 this.trigger('data', event);
8906 return;
8907 }
8908 match = /^#EXT-X-MEDIA-SEQUENCE:?(\-?[0-9.]*)?/.exec(line);
8909 if (match) {
8910 event = {
8911 type: 'tag',
8912 tagType: 'media-sequence'
8913 };
8914 if (match[1]) {
8915 event.number = parseInt(match[1], 10);
8916 }
8917 this.trigger('data', event);
8918 return;
8919 }
8920 match = /^#EXT-X-DISCONTINUITY-SEQUENCE:?(\-?[0-9.]*)?/.exec(line);
8921 if (match) {
8922 event = {
8923 type: 'tag',
8924 tagType: 'discontinuity-sequence'
8925 };
8926 if (match[1]) {
8927 event.number = parseInt(match[1], 10);
8928 }
8929 this.trigger('data', event);
8930 return;
8931 }
8932 match = /^#EXT-X-PLAYLIST-TYPE:?(.*)?$/.exec(line);
8933 if (match) {
8934 event = {
8935 type: 'tag',
8936 tagType: 'playlist-type'
8937 };
8938 if (match[1]) {
8939 event.playlistType = match[1];
8940 }
8941 this.trigger('data', event);
8942 return;
8943 }
8944 match = /^#EXT-X-BYTERANGE:?([0-9.]*)?@?([0-9.]*)?/.exec(line);
8945 if (match) {
8946 event = {
8947 type: 'tag',
8948 tagType: 'byterange'
8949 };
8950 if (match[1]) {
8951 event.length = parseInt(match[1], 10);
8952 }
8953 if (match[2]) {
8954 event.offset = parseInt(match[2], 10);
8955 }
8956 this.trigger('data', event);
8957 return;
8958 }
8959 match = /^#EXT-X-ALLOW-CACHE:?(YES|NO)?/.exec(line);
8960 if (match) {
8961 event = {
8962 type: 'tag',
8963 tagType: 'allow-cache'
8964 };
8965 if (match[1]) {
8966 event.allowed = !/NO/.test(match[1]);
8967 }
8968 this.trigger('data', event);
8969 return;
8970 }
8971 match = /^#EXT-X-MAP:?(.*)$/.exec(line);
8972 if (match) {
8973 event = {
8974 type: 'tag',
8975 tagType: 'map'
8976 };
8977
8978 if (match[1]) {
8979 var attributes = parseAttributes(match[1]);
8980
8981 if (attributes.URI) {
8982 event.uri = attributes.URI;
8983 }
8984 if (attributes.BYTERANGE) {
8985 var _attributes$BYTERANGE = attributes.BYTERANGE.split('@'),
8986 _attributes$BYTERANGE2 = _slicedToArray(_attributes$BYTERANGE, 2),
8987 length = _attributes$BYTERANGE2[0],
8988 offset = _attributes$BYTERANGE2[1];
8989
8990 event.byterange = {};
8991 if (length) {
8992 event.byterange.length = parseInt(length, 10);
8993 }
8994 if (offset) {
8995 event.byterange.offset = parseInt(offset, 10);
8996 }
8997 }
8998 }
8999
9000 this.trigger('data', event);
9001 return;
9002 }
9003 match = /^#EXT-X-STREAM-INF:?(.*)$/.exec(line);
9004 if (match) {
9005 event = {
9006 type: 'tag',
9007 tagType: 'stream-inf'
9008 };
9009 if (match[1]) {
9010 event.attributes = parseAttributes(match[1]);
9011
9012 if (event.attributes.RESOLUTION) {
9013 var split = event.attributes.RESOLUTION.split('x');
9014 var resolution = {};
9015
9016 if (split[0]) {
9017 resolution.width = parseInt(split[0], 10);
9018 }
9019 if (split[1]) {
9020 resolution.height = parseInt(split[1], 10);
9021 }
9022 event.attributes.RESOLUTION = resolution;
9023 }
9024 if (event.attributes.BANDWIDTH) {
9025 event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
9026 }
9027 if (event.attributes['PROGRAM-ID']) {
9028 event.attributes['PROGRAM-ID'] = parseInt(event.attributes['PROGRAM-ID'], 10);
9029 }
9030 }
9031 this.trigger('data', event);
9032 return;
9033 }
9034 match = /^#EXT-X-MEDIA:?(.*)$/.exec(line);
9035 if (match) {
9036 event = {
9037 type: 'tag',
9038 tagType: 'media'
9039 };
9040 if (match[1]) {
9041 event.attributes = parseAttributes(match[1]);
9042 }
9043 this.trigger('data', event);
9044 return;
9045 }
9046 match = /^#EXT-X-ENDLIST/.exec(line);
9047 if (match) {
9048 this.trigger('data', {
9049 type: 'tag',
9050 tagType: 'endlist'
9051 });
9052 return;
9053 }
9054 match = /^#EXT-X-DISCONTINUITY/.exec(line);
9055 if (match) {
9056 this.trigger('data', {
9057 type: 'tag',
9058 tagType: 'discontinuity'
9059 });
9060 return;
9061 }
9062 match = /^#EXT-X-PROGRAM-DATE-TIME:?(.*)$/.exec(line);
9063 if (match) {
9064 event = {
9065 type: 'tag',
9066 tagType: 'program-date-time'
9067 };
9068 if (match[1]) {
9069 event.dateTimeString = match[1];
9070 event.dateTimeObject = new Date(match[1]);
9071 }
9072 this.trigger('data', event);
9073 return;
9074 }
9075 match = /^#EXT-X-KEY:?(.*)$/.exec(line);
9076 if (match) {
9077 event = {
9078 type: 'tag',
9079 tagType: 'key'
9080 };
9081 if (match[1]) {
9082 event.attributes = parseAttributes(match[1]);
9083 // parse the IV string into a Uint32Array
9084 if (event.attributes.IV) {
9085 if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {
9086 event.attributes.IV = event.attributes.IV.substring(2);
9087 }
9088
9089 event.attributes.IV = event.attributes.IV.match(/.{8}/g);
9090 event.attributes.IV[0] = parseInt(event.attributes.IV[0], 16);
9091 event.attributes.IV[1] = parseInt(event.attributes.IV[1], 16);
9092 event.attributes.IV[2] = parseInt(event.attributes.IV[2], 16);
9093 event.attributes.IV[3] = parseInt(event.attributes.IV[3], 16);
9094 event.attributes.IV = new Uint32Array(event.attributes.IV);
9095 }
9096 }
9097 this.trigger('data', event);
9098 return;
9099 }
9100 match = /^#EXT-X-CUE-OUT-CONT:?(.*)?$/.exec(line);
9101 if (match) {
9102 event = {
9103 type: 'tag',
9104 tagType: 'cue-out-cont'
9105 };
9106 if (match[1]) {
9107 event.data = match[1];
9108 } else {
9109 event.data = '';
9110 }
9111 this.trigger('data', event);
9112 return;
9113 }
9114 match = /^#EXT-X-CUE-OUT:?(.*)?$/.exec(line);
9115 if (match) {
9116 event = {
9117 type: 'tag',
9118 tagType: 'cue-out'
9119 };
9120 if (match[1]) {
9121 event.data = match[1];
9122 } else {
9123 event.data = '';
9124 }
9125 this.trigger('data', event);
9126 return;
9127 }
9128 match = /^#EXT-X-CUE-IN:?(.*)?$/.exec(line);
9129 if (match) {
9130 event = {
9131 type: 'tag',
9132 tagType: 'cue-in'
9133 };
9134 if (match[1]) {
9135 event.data = match[1];
9136 } else {
9137 event.data = '';
9138 }
9139 this.trigger('data', event);
9140 return;
9141 }
9142
9143 // unknown tag type
9144 this.trigger('data', {
9145 type: 'tag',
9146 data: line.slice(4)
9147 });
9148 }
9149 }]);
9150
9151 return ParseStream;
9152}(_stream2['default']);
9153
9154exports['default'] = ParseStream;
9155},{"./stream":35}],34:[function(require,module,exports){
9156'use strict';
9157
9158Object.defineProperty(exports, "__esModule", {
9159 value: true
9160});
9161
9162var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
9163
9164var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
9165
9166var _stream = require('./stream');
9167
9168var _stream2 = _interopRequireDefault(_stream);
9169
9170var _lineStream = require('./line-stream');
9171
9172var _lineStream2 = _interopRequireDefault(_lineStream);
9173
9174var _parseStream = require('./parse-stream');
9175
9176var _parseStream2 = _interopRequireDefault(_parseStream);
9177
9178function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
9179
9180function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
9181
9182function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
9183
9184function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /**
9185 * @file m3u8/parser.js
9186 */
9187
9188
9189/**
9190 * A parser for M3U8 files. The current interpretation of the input is
9191 * exposed as a property `manifest` on parser objects. It's just two lines to
9192 * create and parse a manifest once you have the contents available as a string:
9193 *
9194 * ```js
9195 * var parser = new m3u8.Parser();
9196 * parser.push(xhr.responseText);
9197 * ```
9198 *
9199 * New input can later be applied to update the manifest object by calling
9200 * `push` again.
9201 *
9202 * The parser attempts to create a usable manifest object even if the
9203 * underlying input is somewhat nonsensical. It emits `info` and `warning`
9204 * events during the parse if it encounters input that seems invalid or
9205 * requires some property of the manifest object to be defaulted.
9206 *
9207 * @class Parser
9208 * @extends Stream
9209 */
9210var Parser = function (_Stream) {
9211 _inherits(Parser, _Stream);
9212
9213 function Parser() {
9214 _classCallCheck(this, Parser);
9215
9216 var _this = _possibleConstructorReturn(this, (Parser.__proto__ || Object.getPrototypeOf(Parser)).call(this));
9217
9218 _this.lineStream = new _lineStream2['default']();
9219 _this.parseStream = new _parseStream2['default']();
9220 _this.lineStream.pipe(_this.parseStream);
9221 /* eslint-disable consistent-this */
9222 var self = _this;
9223 /* eslint-enable consistent-this */
9224 var uris = [];
9225 var currentUri = {};
9226 // if specified, the active EXT-X-MAP definition
9227 var currentMap = void 0;
9228 // if specified, the active decryption key
9229 var _key = void 0;
9230 var noop = function noop() {};
9231 var defaultMediaGroups = {
9232 'AUDIO': {},
9233 'VIDEO': {},
9234 'CLOSED-CAPTIONS': {},
9235 'SUBTITLES': {}
9236 };
9237 // group segments into numbered timelines delineated by discontinuities
9238 var currentTimeline = 0;
9239
9240 // the manifest is empty until the parse stream begins delivering data
9241 _this.manifest = {
9242 allowCache: true,
9243 discontinuityStarts: [],
9244 segments: []
9245 };
9246
9247 // update the manifest with the m3u8 entry from the parse stream
9248 _this.parseStream.on('data', function (entry) {
9249 var mediaGroup = void 0;
9250 var rendition = void 0;
9251
9252 ({
9253 tag: function tag() {
9254 // switch based on the tag type
9255 (({
9256 'allow-cache': function allowCache() {
9257 this.manifest.allowCache = entry.allowed;
9258 if (!('allowed' in entry)) {
9259 this.trigger('info', {
9260 message: 'defaulting allowCache to YES'
9261 });
9262 this.manifest.allowCache = true;
9263 }
9264 },
9265 byterange: function byterange() {
9266 var byterange = {};
9267
9268 if ('length' in entry) {
9269 currentUri.byterange = byterange;
9270 byterange.length = entry.length;
9271
9272 if (!('offset' in entry)) {
9273 this.trigger('info', {
9274 message: 'defaulting offset to zero'
9275 });
9276 entry.offset = 0;
9277 }
9278 }
9279 if ('offset' in entry) {
9280 currentUri.byterange = byterange;
9281 byterange.offset = entry.offset;
9282 }
9283 },
9284 endlist: function endlist() {
9285 this.manifest.endList = true;
9286 },
9287 inf: function inf() {
9288 if (!('mediaSequence' in this.manifest)) {
9289 this.manifest.mediaSequence = 0;
9290 this.trigger('info', {
9291 message: 'defaulting media sequence to zero'
9292 });
9293 }
9294 if (!('discontinuitySequence' in this.manifest)) {
9295 this.manifest.discontinuitySequence = 0;
9296 this.trigger('info', {
9297 message: 'defaulting discontinuity sequence to zero'
9298 });
9299 }
9300 if (entry.duration > 0) {
9301 currentUri.duration = entry.duration;
9302 }
9303
9304 if (entry.duration === 0) {
9305 currentUri.duration = 0.01;
9306 this.trigger('info', {
9307 message: 'updating zero segment duration to a small value'
9308 });
9309 }
9310
9311 this.manifest.segments = uris;
9312 },
9313 key: function key() {
9314 if (!entry.attributes) {
9315 this.trigger('warn', {
9316 message: 'ignoring key declaration without attribute list'
9317 });
9318 return;
9319 }
9320 // clear the active encryption key
9321 if (entry.attributes.METHOD === 'NONE') {
9322 _key = null;
9323 return;
9324 }
9325 if (!entry.attributes.URI) {
9326 this.trigger('warn', {
9327 message: 'ignoring key declaration without URI'
9328 });
9329 return;
9330 }
9331 if (!entry.attributes.METHOD) {
9332 this.trigger('warn', {
9333 message: 'defaulting key method to AES-128'
9334 });
9335 }
9336
9337 // setup an encryption key for upcoming segments
9338 _key = {
9339 method: entry.attributes.METHOD || 'AES-128',
9340 uri: entry.attributes.URI
9341 };
9342
9343 if (typeof entry.attributes.IV !== 'undefined') {
9344 _key.iv = entry.attributes.IV;
9345 }
9346 },
9347 'media-sequence': function mediaSequence() {
9348 if (!isFinite(entry.number)) {
9349 this.trigger('warn', {
9350 message: 'ignoring invalid media sequence: ' + entry.number
9351 });
9352 return;
9353 }
9354 this.manifest.mediaSequence = entry.number;
9355 },
9356 'discontinuity-sequence': function discontinuitySequence() {
9357 if (!isFinite(entry.number)) {
9358 this.trigger('warn', {
9359 message: 'ignoring invalid discontinuity sequence: ' + entry.number
9360 });
9361 return;
9362 }
9363 this.manifest.discontinuitySequence = entry.number;
9364 currentTimeline = entry.number;
9365 },
9366 'playlist-type': function playlistType() {
9367 if (!/VOD|EVENT/.test(entry.playlistType)) {
9368 this.trigger('warn', {
9369 message: 'ignoring unknown playlist type: ' + entry.playlist
9370 });
9371 return;
9372 }
9373 this.manifest.playlistType = entry.playlistType;
9374 },
9375 map: function map() {
9376 currentMap = {};
9377 if (entry.uri) {
9378 currentMap.uri = entry.uri;
9379 }
9380 if (entry.byterange) {
9381 currentMap.byterange = entry.byterange;
9382 }
9383 },
9384 'stream-inf': function streamInf() {
9385 this.manifest.playlists = uris;
9386 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
9387
9388 if (!entry.attributes) {
9389 this.trigger('warn', {
9390 message: 'ignoring empty stream-inf attributes'
9391 });
9392 return;
9393 }
9394
9395 if (!currentUri.attributes) {
9396 currentUri.attributes = {};
9397 }
9398 _extends(currentUri.attributes, entry.attributes);
9399 },
9400 media: function media() {
9401 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
9402
9403 if (!(entry.attributes && entry.attributes.TYPE && entry.attributes['GROUP-ID'] && entry.attributes.NAME)) {
9404 this.trigger('warn', {
9405 message: 'ignoring incomplete or missing media group'
9406 });
9407 return;
9408 }
9409
9410 // find the media group, creating defaults as necessary
9411 var mediaGroupType = this.manifest.mediaGroups[entry.attributes.TYPE];
9412
9413 mediaGroupType[entry.attributes['GROUP-ID']] = mediaGroupType[entry.attributes['GROUP-ID']] || {};
9414 mediaGroup = mediaGroupType[entry.attributes['GROUP-ID']];
9415
9416 // collect the rendition metadata
9417 rendition = {
9418 'default': /yes/i.test(entry.attributes.DEFAULT)
9419 };
9420 if (rendition['default']) {
9421 rendition.autoselect = true;
9422 } else {
9423 rendition.autoselect = /yes/i.test(entry.attributes.AUTOSELECT);
9424 }
9425 if (entry.attributes.LANGUAGE) {
9426 rendition.language = entry.attributes.LANGUAGE;
9427 }
9428 if (entry.attributes.URI) {
9429 rendition.uri = entry.attributes.URI;
9430 }
9431 if (entry.attributes['INSTREAM-ID']) {
9432 rendition.instreamId = entry.attributes['INSTREAM-ID'];
9433 }
9434 if (entry.attributes.CHARACTERISTICS) {
9435 rendition.characteristics = entry.attributes.CHARACTERISTICS;
9436 }
9437 if (entry.attributes.FORCED) {
9438 rendition.forced = /yes/i.test(entry.attributes.FORCED);
9439 }
9440
9441 // insert the new rendition
9442 mediaGroup[entry.attributes.NAME] = rendition;
9443 },
9444 discontinuity: function discontinuity() {
9445 currentTimeline += 1;
9446 currentUri.discontinuity = true;
9447 this.manifest.discontinuityStarts.push(uris.length);
9448 },
9449 'program-date-time': function programDateTime() {
9450 this.manifest.dateTimeString = entry.dateTimeString;
9451 this.manifest.dateTimeObject = entry.dateTimeObject;
9452 },
9453 targetduration: function targetduration() {
9454 if (!isFinite(entry.duration) || entry.duration < 0) {
9455 this.trigger('warn', {
9456 message: 'ignoring invalid target duration: ' + entry.duration
9457 });
9458 return;
9459 }
9460 this.manifest.targetDuration = entry.duration;
9461 },
9462 totalduration: function totalduration() {
9463 if (!isFinite(entry.duration) || entry.duration < 0) {
9464 this.trigger('warn', {
9465 message: 'ignoring invalid total duration: ' + entry.duration
9466 });
9467 return;
9468 }
9469 this.manifest.totalDuration = entry.duration;
9470 },
9471 'cue-out': function cueOut() {
9472 currentUri.cueOut = entry.data;
9473 },
9474 'cue-out-cont': function cueOutCont() {
9475 currentUri.cueOutCont = entry.data;
9476 },
9477 'cue-in': function cueIn() {
9478 currentUri.cueIn = entry.data;
9479 }
9480 })[entry.tagType] || noop).call(self);
9481 },
9482 uri: function uri() {
9483 currentUri.uri = entry.uri;
9484 uris.push(currentUri);
9485
9486 // if no explicit duration was declared, use the target duration
9487 if (this.manifest.targetDuration && !('duration' in currentUri)) {
9488 this.trigger('warn', {
9489 message: 'defaulting segment duration to the target duration'
9490 });
9491 currentUri.duration = this.manifest.targetDuration;
9492 }
9493 // annotate with encryption information, if necessary
9494 if (_key) {
9495 currentUri.key = _key;
9496 }
9497 currentUri.timeline = currentTimeline;
9498 // annotate with initialization segment information, if necessary
9499 if (currentMap) {
9500 currentUri.map = currentMap;
9501 }
9502
9503 // prepare for the next URI
9504 currentUri = {};
9505 },
9506 comment: function comment() {
9507 // comments are not important for playback
9508 }
9509 })[entry.type].call(self);
9510 });
9511
9512 return _this;
9513 }
9514
9515 /**
9516 * Parse the input string and update the manifest object.
9517 *
9518 * @param {String} chunk a potentially incomplete portion of the manifest
9519 */
9520
9521
9522 _createClass(Parser, [{
9523 key: 'push',
9524 value: function push(chunk) {
9525 this.lineStream.push(chunk);
9526 }
9527
9528 /**
9529 * Flush any remaining input. This can be handy if the last line of an M3U8
9530 * manifest did not contain a trailing newline but the file has been
9531 * completely received.
9532 */
9533
9534 }, {
9535 key: 'end',
9536 value: function end() {
9537 // flush any buffered input
9538 this.lineStream.push('\n');
9539 }
9540 }]);
9541
9542 return Parser;
9543}(_stream2['default']);
9544
9545exports['default'] = Parser;
9546},{"./line-stream":32,"./parse-stream":33,"./stream":35}],35:[function(require,module,exports){
9547'use strict';
9548
9549Object.defineProperty(exports, "__esModule", {
9550 value: true
9551});
9552
9553var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
9554
9555function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
9556
9557/**
9558 * @file stream.js
9559 */
9560/**
9561 * A lightweight readable stream implemention that handles event dispatching.
9562 *
9563 * @class Stream
9564 */
9565var Stream = function () {
9566 function Stream() {
9567 _classCallCheck(this, Stream);
9568
9569 this.listeners = {};
9570 }
9571
9572 /**
9573 * Add a listener for a specified event type.
9574 *
9575 * @param {String} type the event name
9576 * @param {Function} listener the callback to be invoked when an event of
9577 * the specified type occurs
9578 */
9579
9580
9581 _createClass(Stream, [{
9582 key: 'on',
9583 value: function on(type, listener) {
9584 if (!this.listeners[type]) {
9585 this.listeners[type] = [];
9586 }
9587 this.listeners[type].push(listener);
9588 }
9589
9590 /**
9591 * Remove a listener for a specified event type.
9592 *
9593 * @param {String} type the event name
9594 * @param {Function} listener a function previously registered for this
9595 * type of event through `on`
9596 * @return {Boolean} if we could turn it off or not
9597 */
9598
9599 }, {
9600 key: 'off',
9601 value: function off(type, listener) {
9602 if (!this.listeners[type]) {
9603 return false;
9604 }
9605
9606 var index = this.listeners[type].indexOf(listener);
9607
9608 this.listeners[type].splice(index, 1);
9609 return index > -1;
9610 }
9611
9612 /**
9613 * Trigger an event of the specified type on this stream. Any additional
9614 * arguments to this function are passed as parameters to event listeners.
9615 *
9616 * @param {String} type the event name
9617 */
9618
9619 }, {
9620 key: 'trigger',
9621 value: function trigger(type) {
9622 var callbacks = this.listeners[type];
9623 var i = void 0;
9624 var length = void 0;
9625 var args = void 0;
9626
9627 if (!callbacks) {
9628 return;
9629 }
9630 // Slicing the arguments on every invocation of this method
9631 // can add a significant amount of overhead. Avoid the
9632 // intermediate object creation for the common case of a
9633 // single callback argument
9634 if (arguments.length === 2) {
9635 length = callbacks.length;
9636 for (i = 0; i < length; ++i) {
9637 callbacks[i].call(this, arguments[1]);
9638 }
9639 } else {
9640 args = Array.prototype.slice.call(arguments, 1);
9641 length = callbacks.length;
9642 for (i = 0; i < length; ++i) {
9643 callbacks[i].apply(this, args);
9644 }
9645 }
9646 }
9647
9648 /**
9649 * Destroys the stream and cleans up.
9650 */
9651
9652 }, {
9653 key: 'dispose',
9654 value: function dispose() {
9655 this.listeners = {};
9656 }
9657 /**
9658 * Forwards all `data` events on this stream to the destination stream. The
9659 * destination stream should provide a method `push` to receive the data
9660 * events as they arrive.
9661 *
9662 * @param {Stream} destination the stream that will receive all `data` events
9663 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
9664 */
9665
9666 }, {
9667 key: 'pipe',
9668 value: function pipe(destination) {
9669 this.on('data', function (data) {
9670 destination.push(data);
9671 });
9672 }
9673 }]);
9674
9675 return Stream;
9676}();
9677
9678exports['default'] = Stream;
9679},{}],36:[function(require,module,exports){
9680/**
9681 * mux.js
9682 *
9683 * Copyright (c) 2016 Brightcove
9684 * All rights reserved.
9685 *
9686 * A stream-based aac to mp4 converter. This utility can be used to
9687 * deliver mp4s to a SourceBuffer on platforms that support native
9688 * Media Source Extensions.
9689 */
9690'use strict';
9691var Stream = require('../utils/stream.js');
9692
9693// Constants
9694var AacStream;
9695
9696/**
9697 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
9698 */
9699
9700AacStream = function() {
9701 var
9702 everything = new Uint8Array(),
9703 timeStamp = 0;
9704
9705 AacStream.prototype.init.call(this);
9706
9707 this.setTimestamp = function(timestamp) {
9708 timeStamp = timestamp;
9709 };
9710
9711 this.parseId3TagSize = function(header, byteIndex) {
9712 var
9713 returnSize = (header[byteIndex + 6] << 21) |
9714 (header[byteIndex + 7] << 14) |
9715 (header[byteIndex + 8] << 7) |
9716 (header[byteIndex + 9]),
9717 flags = header[byteIndex + 5],
9718 footerPresent = (flags & 16) >> 4;
9719
9720 if (footerPresent) {
9721 return returnSize + 20;
9722 }
9723 return returnSize + 10;
9724 };
9725
9726 this.parseAdtsSize = function(header, byteIndex) {
9727 var
9728 lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
9729 middle = header[byteIndex + 4] << 3,
9730 highTwo = header[byteIndex + 3] & 0x3 << 11;
9731
9732 return (highTwo | middle) | lowThree;
9733 };
9734
9735 this.push = function(bytes) {
9736 var
9737 frameSize = 0,
9738 byteIndex = 0,
9739 bytesLeft,
9740 chunk,
9741 packet,
9742 tempLength;
9743
9744 // If there are bytes remaining from the last segment, prepend them to the
9745 // bytes that were pushed in
9746 if (everything.length) {
9747 tempLength = everything.length;
9748 everything = new Uint8Array(bytes.byteLength + tempLength);
9749 everything.set(everything.subarray(0, tempLength));
9750 everything.set(bytes, tempLength);
9751 } else {
9752 everything = bytes;
9753 }
9754
9755 while (everything.length - byteIndex >= 3) {
9756 if ((everything[byteIndex] === 'I'.charCodeAt(0)) &&
9757 (everything[byteIndex + 1] === 'D'.charCodeAt(0)) &&
9758 (everything[byteIndex + 2] === '3'.charCodeAt(0))) {
9759
9760 // Exit early because we don't have enough to parse
9761 // the ID3 tag header
9762 if (everything.length - byteIndex < 10) {
9763 break;
9764 }
9765
9766 // check framesize
9767 frameSize = this.parseId3TagSize(everything, byteIndex);
9768
9769 // Exit early if we don't have enough in the buffer
9770 // to emit a full packet
9771 if (frameSize > everything.length) {
9772 break;
9773 }
9774 chunk = {
9775 type: 'timed-metadata',
9776 data: everything.subarray(byteIndex, byteIndex + frameSize)
9777 };
9778 this.trigger('data', chunk);
9779 byteIndex += frameSize;
9780 continue;
9781 } else if ((everything[byteIndex] & 0xff === 0xff) &&
9782 ((everything[byteIndex + 1] & 0xf0) === 0xf0)) {
9783
9784 // Exit early because we don't have enough to parse
9785 // the ADTS frame header
9786 if (everything.length - byteIndex < 7) {
9787 break;
9788 }
9789
9790 frameSize = this.parseAdtsSize(everything, byteIndex);
9791
9792 // Exit early if we don't have enough in the buffer
9793 // to emit a full packet
9794 if (frameSize > everything.length) {
9795 break;
9796 }
9797
9798 packet = {
9799 type: 'audio',
9800 data: everything.subarray(byteIndex, byteIndex + frameSize),
9801 pts: timeStamp,
9802 dts: timeStamp
9803 };
9804 this.trigger('data', packet);
9805 byteIndex += frameSize;
9806 continue;
9807 }
9808 byteIndex++;
9809 }
9810 bytesLeft = everything.length - byteIndex;
9811
9812 if (bytesLeft > 0) {
9813 everything = everything.subarray(byteIndex);
9814 } else {
9815 everything = new Uint8Array();
9816 }
9817 };
9818};
9819
9820AacStream.prototype = new Stream();
9821
9822module.exports = AacStream;
9823
9824},{"../utils/stream.js":60}],37:[function(require,module,exports){
9825/**
9826 * mux.js
9827 *
9828 * Copyright (c) 2016 Brightcove
9829 * All rights reserved.
9830 *
9831 * Utilities to detect basic properties and metadata about Aac data.
9832 */
9833'use strict';
9834
9835var ADTS_SAMPLING_FREQUENCIES = [
9836 96000,
9837 88200,
9838 64000,
9839 48000,
9840 44100,
9841 32000,
9842 24000,
9843 22050,
9844 16000,
9845 12000,
9846 11025,
9847 8000,
9848 7350
9849];
9850
9851var parseSyncSafeInteger = function(data) {
9852 return (data[0] << 21) |
9853 (data[1] << 14) |
9854 (data[2] << 7) |
9855 (data[3]);
9856};
9857
9858// return a percent-encoded representation of the specified byte range
9859// @see http://en.wikipedia.org/wiki/Percent-encoding
9860var percentEncode = function(bytes, start, end) {
9861 var i, result = '';
9862 for (i = start; i < end; i++) {
9863 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
9864 }
9865 return result;
9866};
9867
9868// return the string representation of the specified byte range,
9869// interpreted as ISO-8859-1.
9870var parseIso88591 = function(bytes, start, end) {
9871 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
9872};
9873
9874var parseId3TagSize = function(header, byteIndex) {
9875 var
9876 returnSize = (header[byteIndex + 6] << 21) |
9877 (header[byteIndex + 7] << 14) |
9878 (header[byteIndex + 8] << 7) |
9879 (header[byteIndex + 9]),
9880 flags = header[byteIndex + 5],
9881 footerPresent = (flags & 16) >> 4;
9882
9883 if (footerPresent) {
9884 return returnSize + 20;
9885 }
9886 return returnSize + 10;
9887};
9888
9889var parseAdtsSize = function(header, byteIndex) {
9890 var
9891 lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
9892 middle = header[byteIndex + 4] << 3,
9893 highTwo = header[byteIndex + 3] & 0x3 << 11;
9894
9895 return (highTwo | middle) | lowThree;
9896};
9897
9898var parseType = function(header, byteIndex) {
9899 if ((header[byteIndex] === 'I'.charCodeAt(0)) &&
9900 (header[byteIndex + 1] === 'D'.charCodeAt(0)) &&
9901 (header[byteIndex + 2] === '3'.charCodeAt(0))) {
9902 return 'timed-metadata';
9903 } else if ((header[byteIndex] & 0xff === 0xff) &&
9904 ((header[byteIndex + 1] & 0xf0) === 0xf0)) {
9905 return 'audio';
9906 }
9907 return null;
9908};
9909
9910var parseSampleRate = function(packet) {
9911 var i = 0;
9912
9913 while (i + 5 < packet.length) {
9914 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
9915 // If a valid header was not found, jump one forward and attempt to
9916 // find a valid ADTS header starting at the next byte
9917 i++;
9918 continue;
9919 }
9920 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
9921 }
9922
9923 return null;
9924};
9925
9926var parseAacTimestamp = function(packet) {
9927 var frameStart, frameSize, frame, frameHeader;
9928
9929 // find the start of the first frame and the end of the tag
9930 frameStart = 10;
9931 if (packet[5] & 0x40) {
9932 // advance the frame start past the extended header
9933 frameStart += 4; // header size field
9934 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
9935 }
9936
9937 // parse one or more ID3 frames
9938 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
9939 do {
9940 // determine the number of bytes in this frame
9941 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
9942 if (frameSize < 1) {
9943 return null;
9944 }
9945 frameHeader = String.fromCharCode(packet[frameStart],
9946 packet[frameStart + 1],
9947 packet[frameStart + 2],
9948 packet[frameStart + 3]);
9949
9950 if (frameHeader === 'PRIV') {
9951 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
9952
9953 for (var i = 0; i < frame.byteLength; i++) {
9954 if (frame[i] === 0) {
9955 var owner = parseIso88591(frame, 0, i);
9956 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
9957 var d = frame.subarray(i + 1);
9958 var size = ((d[3] & 0x01) << 30) |
9959 (d[4] << 22) |
9960 (d[5] << 14) |
9961 (d[6] << 6) |
9962 (d[7] >>> 2);
9963 size *= 4;
9964 size += d[7] & 0x03;
9965
9966 return size;
9967 }
9968 break;
9969 }
9970 }
9971 }
9972
9973 frameStart += 10; // advance past the frame header
9974 frameStart += frameSize; // advance past the frame body
9975 } while (frameStart < packet.byteLength);
9976 return null;
9977};
9978
9979module.exports = {
9980 parseId3TagSize: parseId3TagSize,
9981 parseAdtsSize: parseAdtsSize,
9982 parseType: parseType,
9983 parseSampleRate: parseSampleRate,
9984 parseAacTimestamp: parseAacTimestamp
9985};
9986
9987},{}],38:[function(require,module,exports){
9988'use strict';
9989
9990var Stream = require('../utils/stream.js');
9991
9992var AdtsStream;
9993
9994var
9995 ADTS_SAMPLING_FREQUENCIES = [
9996 96000,
9997 88200,
9998 64000,
9999 48000,
10000 44100,
10001 32000,
10002 24000,
10003 22050,
10004 16000,
10005 12000,
10006 11025,
10007 8000,
10008 7350
10009 ];
10010
10011/*
10012 * Accepts a ElementaryStream and emits data events with parsed
10013 * AAC Audio Frames of the individual packets. Input audio in ADTS
10014 * format is unpacked and re-emitted as AAC frames.
10015 *
10016 * @see http://wiki.multimedia.cx/index.php?title=ADTS
10017 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
10018 */
10019AdtsStream = function() {
10020 var buffer;
10021
10022 AdtsStream.prototype.init.call(this);
10023
10024 this.push = function(packet) {
10025 var
10026 i = 0,
10027 frameNum = 0,
10028 frameLength,
10029 protectionSkipBytes,
10030 frameEnd,
10031 oldBuffer,
10032 sampleCount,
10033 adtsFrameDuration;
10034
10035 if (packet.type !== 'audio') {
10036 // ignore non-audio data
10037 return;
10038 }
10039
10040 // Prepend any data in the buffer to the input data so that we can parse
10041 // aac frames the cross a PES packet boundary
10042 if (buffer) {
10043 oldBuffer = buffer;
10044 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
10045 buffer.set(oldBuffer);
10046 buffer.set(packet.data, oldBuffer.byteLength);
10047 } else {
10048 buffer = packet.data;
10049 }
10050
10051 // unpack any ADTS frames which have been fully received
10052 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
10053 while (i + 5 < buffer.length) {
10054
10055 // Loook for the start of an ADTS header..
10056 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
10057 // If a valid header was not found, jump one forward and attempt to
10058 // find a valid ADTS header starting at the next byte
10059 i++;
10060 continue;
10061 }
10062
10063 // The protection skip bit tells us if we have 2 bytes of CRC data at the
10064 // end of the ADTS header
10065 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2;
10066
10067 // Frame length is a 13 bit integer starting 16 bits from the
10068 // end of the sync sequence
10069 frameLength = ((buffer[i + 3] & 0x03) << 11) |
10070 (buffer[i + 4] << 3) |
10071 ((buffer[i + 5] & 0xe0) >> 5);
10072
10073 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
10074 adtsFrameDuration = (sampleCount * 90000) /
10075 ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2];
10076
10077 frameEnd = i + frameLength;
10078
10079 // If we don't have enough data to actually finish this ADTS frame, return
10080 // and wait for more data
10081 if (buffer.byteLength < frameEnd) {
10082 return;
10083 }
10084
10085 // Otherwise, deliver the complete AAC frame
10086 this.trigger('data', {
10087 pts: packet.pts + (frameNum * adtsFrameDuration),
10088 dts: packet.dts + (frameNum * adtsFrameDuration),
10089 sampleCount: sampleCount,
10090 audioobjecttype: ((buffer[i + 2] >>> 6) & 0x03) + 1,
10091 channelcount: ((buffer[i + 2] & 1) << 2) |
10092 ((buffer[i + 3] & 0xc0) >>> 6),
10093 samplerate: ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2],
10094 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
10095 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
10096 samplesize: 16,
10097 data: buffer.subarray(i + 7 + protectionSkipBytes, frameEnd)
10098 });
10099
10100 // If the buffer is empty, clear it and return
10101 if (buffer.byteLength === frameEnd) {
10102 buffer = undefined;
10103 return;
10104 }
10105
10106 frameNum++;
10107
10108 // Remove the finished frame from the buffer and start the process again
10109 buffer = buffer.subarray(frameEnd);
10110 }
10111 };
10112 this.flush = function() {
10113 this.trigger('done');
10114 };
10115};
10116
10117AdtsStream.prototype = new Stream();
10118
10119module.exports = AdtsStream;
10120
10121},{"../utils/stream.js":60}],39:[function(require,module,exports){
10122'use strict';
10123
10124var Stream = require('../utils/stream.js');
10125var ExpGolomb = require('../utils/exp-golomb.js');
10126
10127var H264Stream, NalByteStream;
10128var PROFILES_WITH_OPTIONAL_SPS_DATA;
10129
10130/**
10131 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
10132 */
10133NalByteStream = function() {
10134 var
10135 syncPoint = 0,
10136 i,
10137 buffer;
10138 NalByteStream.prototype.init.call(this);
10139
10140 this.push = function(data) {
10141 var swapBuffer;
10142
10143 if (!buffer) {
10144 buffer = data.data;
10145 } else {
10146 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
10147 swapBuffer.set(buffer);
10148 swapBuffer.set(data.data, buffer.byteLength);
10149 buffer = swapBuffer;
10150 }
10151
10152 // Rec. ITU-T H.264, Annex B
10153 // scan for NAL unit boundaries
10154
10155 // a match looks like this:
10156 // 0 0 1 .. NAL .. 0 0 1
10157 // ^ sync point ^ i
10158 // or this:
10159 // 0 0 1 .. NAL .. 0 0 0
10160 // ^ sync point ^ i
10161
10162 // advance the sync point to a NAL start, if necessary
10163 for (; syncPoint < buffer.byteLength - 3; syncPoint++) {
10164 if (buffer[syncPoint + 2] === 1) {
10165 // the sync point is properly aligned
10166 i = syncPoint + 5;
10167 break;
10168 }
10169 }
10170
10171 while (i < buffer.byteLength) {
10172 // look at the current byte to determine if we've hit the end of
10173 // a NAL unit boundary
10174 switch (buffer[i]) {
10175 case 0:
10176 // skip past non-sync sequences
10177 if (buffer[i - 1] !== 0) {
10178 i += 2;
10179 break;
10180 } else if (buffer[i - 2] !== 0) {
10181 i++;
10182 break;
10183 }
10184
10185 // deliver the NAL unit if it isn't empty
10186 if (syncPoint + 3 !== i - 2) {
10187 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
10188 }
10189
10190 // drop trailing zeroes
10191 do {
10192 i++;
10193 } while (buffer[i] !== 1 && i < buffer.length);
10194 syncPoint = i - 2;
10195 i += 3;
10196 break;
10197 case 1:
10198 // skip past non-sync sequences
10199 if (buffer[i - 1] !== 0 ||
10200 buffer[i - 2] !== 0) {
10201 i += 3;
10202 break;
10203 }
10204
10205 // deliver the NAL unit
10206 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
10207 syncPoint = i - 2;
10208 i += 3;
10209 break;
10210 default:
10211 // the current byte isn't a one or zero, so it cannot be part
10212 // of a sync sequence
10213 i += 3;
10214 break;
10215 }
10216 }
10217 // filter out the NAL units that were delivered
10218 buffer = buffer.subarray(syncPoint);
10219 i -= syncPoint;
10220 syncPoint = 0;
10221 };
10222
10223 this.flush = function() {
10224 // deliver the last buffered NAL unit
10225 if (buffer && buffer.byteLength > 3) {
10226 this.trigger('data', buffer.subarray(syncPoint + 3));
10227 }
10228 // reset the stream state
10229 buffer = null;
10230 syncPoint = 0;
10231 this.trigger('done');
10232 };
10233};
10234NalByteStream.prototype = new Stream();
10235
10236// values of profile_idc that indicate additional fields are included in the SPS
10237// see Recommendation ITU-T H.264 (4/2013),
10238// 7.3.2.1.1 Sequence parameter set data syntax
10239PROFILES_WITH_OPTIONAL_SPS_DATA = {
10240 100: true,
10241 110: true,
10242 122: true,
10243 244: true,
10244 44: true,
10245 83: true,
10246 86: true,
10247 118: true,
10248 128: true,
10249 138: true,
10250 139: true,
10251 134: true
10252};
10253
10254/**
10255 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
10256 * events.
10257 */
10258H264Stream = function() {
10259 var
10260 nalByteStream = new NalByteStream(),
10261 self,
10262 trackId,
10263 currentPts,
10264 currentDts,
10265
10266 discardEmulationPreventionBytes,
10267 readSequenceParameterSet,
10268 skipScalingList;
10269
10270 H264Stream.prototype.init.call(this);
10271 self = this;
10272
10273 this.push = function(packet) {
10274 if (packet.type !== 'video') {
10275 return;
10276 }
10277 trackId = packet.trackId;
10278 currentPts = packet.pts;
10279 currentDts = packet.dts;
10280
10281 nalByteStream.push(packet);
10282 };
10283
10284 nalByteStream.on('data', function(data) {
10285 var
10286 event = {
10287 trackId: trackId,
10288 pts: currentPts,
10289 dts: currentDts,
10290 data: data
10291 };
10292
10293 switch (data[0] & 0x1f) {
10294 case 0x05:
10295 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
10296 break;
10297 case 0x06:
10298 event.nalUnitType = 'sei_rbsp';
10299 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
10300 break;
10301 case 0x07:
10302 event.nalUnitType = 'seq_parameter_set_rbsp';
10303 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
10304 event.config = readSequenceParameterSet(event.escapedRBSP);
10305 break;
10306 case 0x08:
10307 event.nalUnitType = 'pic_parameter_set_rbsp';
10308 break;
10309 case 0x09:
10310 event.nalUnitType = 'access_unit_delimiter_rbsp';
10311 break;
10312
10313 default:
10314 break;
10315 }
10316 self.trigger('data', event);
10317 });
10318 nalByteStream.on('done', function() {
10319 self.trigger('done');
10320 });
10321
10322 this.flush = function() {
10323 nalByteStream.flush();
10324 };
10325
10326 /**
10327 * Advance the ExpGolomb decoder past a scaling list. The scaling
10328 * list is optionally transmitted as part of a sequence parameter
10329 * set and is not relevant to transmuxing.
10330 * @param count {number} the number of entries in this scaling list
10331 * @param expGolombDecoder {object} an ExpGolomb pointed to the
10332 * start of a scaling list
10333 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
10334 */
10335 skipScalingList = function(count, expGolombDecoder) {
10336 var
10337 lastScale = 8,
10338 nextScale = 8,
10339 j,
10340 deltaScale;
10341
10342 for (j = 0; j < count; j++) {
10343 if (nextScale !== 0) {
10344 deltaScale = expGolombDecoder.readExpGolomb();
10345 nextScale = (lastScale + deltaScale + 256) % 256;
10346 }
10347
10348 lastScale = (nextScale === 0) ? lastScale : nextScale;
10349 }
10350 };
10351
10352 /**
10353 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
10354 * Sequence Payload"
10355 * @param data {Uint8Array} the bytes of a RBSP from a NAL
10356 * unit
10357 * @return {Uint8Array} the RBSP without any Emulation
10358 * Prevention Bytes
10359 */
10360 discardEmulationPreventionBytes = function(data) {
10361 var
10362 length = data.byteLength,
10363 emulationPreventionBytesPositions = [],
10364 i = 1,
10365 newLength, newData;
10366
10367 // Find all `Emulation Prevention Bytes`
10368 while (i < length - 2) {
10369 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
10370 emulationPreventionBytesPositions.push(i + 2);
10371 i += 2;
10372 } else {
10373 i++;
10374 }
10375 }
10376
10377 // If no Emulation Prevention Bytes were found just return the original
10378 // array
10379 if (emulationPreventionBytesPositions.length === 0) {
10380 return data;
10381 }
10382
10383 // Create a new array to hold the NAL unit data
10384 newLength = length - emulationPreventionBytesPositions.length;
10385 newData = new Uint8Array(newLength);
10386 var sourceIndex = 0;
10387
10388 for (i = 0; i < newLength; sourceIndex++, i++) {
10389 if (sourceIndex === emulationPreventionBytesPositions[0]) {
10390 // Skip this byte
10391 sourceIndex++;
10392 // Remove this position index
10393 emulationPreventionBytesPositions.shift();
10394 }
10395 newData[i] = data[sourceIndex];
10396 }
10397
10398 return newData;
10399 };
10400
10401 /**
10402 * Read a sequence parameter set and return some interesting video
10403 * properties. A sequence parameter set is the H264 metadata that
10404 * describes the properties of upcoming video frames.
10405 * @param data {Uint8Array} the bytes of a sequence parameter set
10406 * @return {object} an object with configuration parsed from the
10407 * sequence parameter set, including the dimensions of the
10408 * associated video frames.
10409 */
10410 readSequenceParameterSet = function(data) {
10411 var
10412 frameCropLeftOffset = 0,
10413 frameCropRightOffset = 0,
10414 frameCropTopOffset = 0,
10415 frameCropBottomOffset = 0,
10416 sarScale = 1,
10417 expGolombDecoder, profileIdc, levelIdc, profileCompatibility,
10418 chromaFormatIdc, picOrderCntType,
10419 numRefFramesInPicOrderCntCycle, picWidthInMbsMinus1,
10420 picHeightInMapUnitsMinus1,
10421 frameMbsOnlyFlag,
10422 scalingListCount,
10423 sarRatio,
10424 aspectRatioIdc,
10425 i;
10426
10427 expGolombDecoder = new ExpGolomb(data);
10428 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
10429 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
10430 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
10431 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
10432
10433 // some profiles have more optional data we don't need
10434 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
10435 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
10436 if (chromaFormatIdc === 3) {
10437 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
10438 }
10439 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
10440 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
10441 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
10442 if (expGolombDecoder.readBoolean()) { // seq_scaling_matrix_present_flag
10443 scalingListCount = (chromaFormatIdc !== 3) ? 8 : 12;
10444 for (i = 0; i < scalingListCount; i++) {
10445 if (expGolombDecoder.readBoolean()) { // seq_scaling_list_present_flag[ i ]
10446 if (i < 6) {
10447 skipScalingList(16, expGolombDecoder);
10448 } else {
10449 skipScalingList(64, expGolombDecoder);
10450 }
10451 }
10452 }
10453 }
10454 }
10455
10456 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
10457 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
10458
10459 if (picOrderCntType === 0) {
10460 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
10461 } else if (picOrderCntType === 1) {
10462 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
10463 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
10464 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
10465 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
10466 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
10467 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
10468 }
10469 }
10470
10471 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
10472 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
10473
10474 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
10475 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
10476
10477 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
10478 if (frameMbsOnlyFlag === 0) {
10479 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
10480 }
10481
10482 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
10483 if (expGolombDecoder.readBoolean()) { // frame_cropping_flag
10484 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
10485 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
10486 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
10487 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
10488 }
10489 if (expGolombDecoder.readBoolean()) {
10490 // vui_parameters_present_flag
10491 if (expGolombDecoder.readBoolean()) {
10492 // aspect_ratio_info_present_flag
10493 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
10494 switch (aspectRatioIdc) {
10495 case 1: sarRatio = [1, 1]; break;
10496 case 2: sarRatio = [12, 11]; break;
10497 case 3: sarRatio = [10, 11]; break;
10498 case 4: sarRatio = [16, 11]; break;
10499 case 5: sarRatio = [40, 33]; break;
10500 case 6: sarRatio = [24, 11]; break;
10501 case 7: sarRatio = [20, 11]; break;
10502 case 8: sarRatio = [32, 11]; break;
10503 case 9: sarRatio = [80, 33]; break;
10504 case 10: sarRatio = [18, 11]; break;
10505 case 11: sarRatio = [15, 11]; break;
10506 case 12: sarRatio = [64, 33]; break;
10507 case 13: sarRatio = [160, 99]; break;
10508 case 14: sarRatio = [4, 3]; break;
10509 case 15: sarRatio = [3, 2]; break;
10510 case 16: sarRatio = [2, 1]; break;
10511 case 255: {
10512 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 |
10513 expGolombDecoder.readUnsignedByte(),
10514 expGolombDecoder.readUnsignedByte() << 8 |
10515 expGolombDecoder.readUnsignedByte() ];
10516 break;
10517 }
10518 }
10519 if (sarRatio) {
10520 sarScale = sarRatio[0] / sarRatio[1];
10521 }
10522 }
10523 }
10524 return {
10525 profileIdc: profileIdc,
10526 levelIdc: levelIdc,
10527 profileCompatibility: profileCompatibility,
10528 width: Math.ceil((((picWidthInMbsMinus1 + 1) * 16) - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
10529 height: ((2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16) - (frameCropTopOffset * 2) - (frameCropBottomOffset * 2)
10530 };
10531 };
10532
10533};
10534H264Stream.prototype = new Stream();
10535
10536module.exports = {
10537 H264Stream: H264Stream,
10538 NalByteStream: NalByteStream
10539};
10540
10541},{"../utils/exp-golomb.js":59,"../utils/stream.js":60}],40:[function(require,module,exports){
10542var highPrefix = [33, 16, 5, 32, 164, 27];
10543var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
10544var zeroFill = function(count) {
10545 var a = [];
10546 while (count--) {
10547 a.push(0);
10548 }
10549 return a;
10550};
10551
10552var makeTable = function(metaTable) {
10553 return Object.keys(metaTable).reduce(function(obj, key) {
10554 obj[key] = new Uint8Array(metaTable[key].reduce(function(arr, part) {
10555 return arr.concat(part);
10556 }, []));
10557 return obj;
10558 }, {});
10559};
10560
10561// Frames-of-silence to use for filling in missing AAC frames
10562var coneOfSilence = {
10563 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
10564 88200: [highPrefix, [231], zeroFill(170), [56]],
10565 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
10566 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
10567 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
10568 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
10569 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
10570 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
10571 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
10572 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
10573 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
10574};
10575
10576module.exports = makeTable(coneOfSilence);
10577
10578},{}],41:[function(require,module,exports){
10579'use strict';
10580
10581var Stream = require('../utils/stream.js');
10582
10583/**
10584 * The final stage of the transmuxer that emits the flv tags
10585 * for audio, video, and metadata. Also tranlates in time and
10586 * outputs caption data and id3 cues.
10587 */
10588var CoalesceStream = function(options) {
10589 // Number of Tracks per output segment
10590 // If greater than 1, we combine multiple
10591 // tracks into a single segment
10592 this.numberOfTracks = 0;
10593 this.metadataStream = options.metadataStream;
10594
10595 this.videoTags = [];
10596 this.audioTags = [];
10597 this.videoTrack = null;
10598 this.audioTrack = null;
10599 this.pendingCaptions = [];
10600 this.pendingMetadata = [];
10601 this.pendingTracks = 0;
10602 this.processedTracks = 0;
10603
10604 CoalesceStream.prototype.init.call(this);
10605
10606 // Take output from multiple
10607 this.push = function(output) {
10608 // buffer incoming captions until the associated video segment
10609 // finishes
10610 if (output.text) {
10611 return this.pendingCaptions.push(output);
10612 }
10613 // buffer incoming id3 tags until the final flush
10614 if (output.frames) {
10615 return this.pendingMetadata.push(output);
10616 }
10617
10618 if (output.track.type === 'video') {
10619 this.videoTrack = output.track;
10620 this.videoTags = output.tags;
10621 this.pendingTracks++;
10622 }
10623 if (output.track.type === 'audio') {
10624 this.audioTrack = output.track;
10625 this.audioTags = output.tags;
10626 this.pendingTracks++;
10627 }
10628 };
10629};
10630
10631CoalesceStream.prototype = new Stream();
10632CoalesceStream.prototype.flush = function(flushSource) {
10633 var
10634 id3,
10635 caption,
10636 i,
10637 timelineStartPts,
10638 event = {
10639 tags: {},
10640 captions: [],
10641 metadata: []
10642 };
10643
10644 if (this.pendingTracks < this.numberOfTracks) {
10645 if (flushSource !== 'VideoSegmentStream' &&
10646 flushSource !== 'AudioSegmentStream') {
10647 // Return because we haven't received a flush from a data-generating
10648 // portion of the segment (meaning that we have only recieved meta-data
10649 // or captions.)
10650 return;
10651 } else if (this.pendingTracks === 0) {
10652 // In the case where we receive a flush without any data having been
10653 // received we consider it an emitted track for the purposes of coalescing
10654 // `done` events.
10655 // We do this for the case where there is an audio and video track in the
10656 // segment but no audio data. (seen in several playlists with alternate
10657 // audio tracks and no audio present in the main TS segments.)
10658 this.processedTracks++;
10659
10660 if (this.processedTracks < this.numberOfTracks) {
10661 return;
10662 }
10663 }
10664 }
10665
10666 this.processedTracks += this.pendingTracks;
10667 this.pendingTracks = 0;
10668
10669 if (this.processedTracks < this.numberOfTracks) {
10670 return;
10671 }
10672
10673 if (this.videoTrack) {
10674 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
10675 } else if (this.audioTrack) {
10676 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
10677 }
10678
10679 event.tags.videoTags = this.videoTags;
10680 event.tags.audioTags = this.audioTags;
10681
10682 // Translate caption PTS times into second offsets into the
10683 // video timeline for the segment
10684 for (i = 0; i < this.pendingCaptions.length; i++) {
10685 caption = this.pendingCaptions[i];
10686 caption.startTime = caption.startPts - timelineStartPts;
10687 caption.startTime /= 90e3;
10688 caption.endTime = caption.endPts - timelineStartPts;
10689 caption.endTime /= 90e3;
10690 event.captions.push(caption);
10691 }
10692
10693 // Translate ID3 frame PTS times into second offsets into the
10694 // video timeline for the segment
10695 for (i = 0; i < this.pendingMetadata.length; i++) {
10696 id3 = this.pendingMetadata[i];
10697 id3.cueTime = id3.pts - timelineStartPts;
10698 id3.cueTime /= 90e3;
10699 event.metadata.push(id3);
10700 }
10701 // We add this to every single emitted segment even though we only need
10702 // it for the first
10703 event.metadata.dispatchType = this.metadataStream.dispatchType;
10704
10705 // Reset stream state
10706 this.videoTrack = null;
10707 this.audioTrack = null;
10708 this.videoTags = [];
10709 this.audioTags = [];
10710 this.pendingCaptions.length = 0;
10711 this.pendingMetadata.length = 0;
10712 this.pendingTracks = 0;
10713 this.processedTracks = 0;
10714
10715 // Emit the final segment
10716 this.trigger('data', event);
10717
10718 this.trigger('done');
10719};
10720
10721module.exports = CoalesceStream;
10722
10723},{"../utils/stream.js":60}],42:[function(require,module,exports){
10724'use strict';
10725
10726var FlvTag = require('./flv-tag.js');
10727
10728// For information on the FLV format, see
10729// http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf.
10730// Technically, this function returns the header and a metadata FLV tag
10731// if duration is greater than zero
10732// duration in seconds
10733// @return {object} the bytes of the FLV header as a Uint8Array
10734var getFlvHeader = function(duration, audio, video) { // :ByteArray {
10735 var
10736 headBytes = new Uint8Array(3 + 1 + 1 + 4),
10737 head = new DataView(headBytes.buffer),
10738 metadata,
10739 result,
10740 metadataLength;
10741
10742 // default arguments
10743 duration = duration || 0;
10744 audio = audio === undefined ? true : audio;
10745 video = video === undefined ? true : video;
10746
10747 // signature
10748 head.setUint8(0, 0x46); // 'F'
10749 head.setUint8(1, 0x4c); // 'L'
10750 head.setUint8(2, 0x56); // 'V'
10751
10752 // version
10753 head.setUint8(3, 0x01);
10754
10755 // flags
10756 head.setUint8(4, (audio ? 0x04 : 0x00) | (video ? 0x01 : 0x00));
10757
10758 // data offset, should be 9 for FLV v1
10759 head.setUint32(5, headBytes.byteLength);
10760
10761 // init the first FLV tag
10762 if (duration <= 0) {
10763 // no duration available so just write the first field of the first
10764 // FLV tag
10765 result = new Uint8Array(headBytes.byteLength + 4);
10766 result.set(headBytes);
10767 result.set([0, 0, 0, 0], headBytes.byteLength);
10768 return result;
10769 }
10770
10771 // write out the duration metadata tag
10772 metadata = new FlvTag(FlvTag.METADATA_TAG);
10773 metadata.pts = metadata.dts = 0;
10774 metadata.writeMetaDataDouble('duration', duration);
10775 metadataLength = metadata.finalize().length;
10776 result = new Uint8Array(headBytes.byteLength + metadataLength);
10777 result.set(headBytes);
10778 result.set(head.byteLength, metadataLength);
10779
10780 return result;
10781};
10782
10783module.exports = getFlvHeader;
10784
10785},{"./flv-tag.js":43}],43:[function(require,module,exports){
10786/**
10787 * An object that stores the bytes of an FLV tag and methods for
10788 * querying and manipulating that data.
10789 * @see http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf
10790 */
10791'use strict';
10792
10793var FlvTag;
10794
10795// (type:uint, extraData:Boolean = false) extends ByteArray
10796FlvTag = function(type, extraData) {
10797 var
10798 // Counter if this is a metadata tag, nal start marker if this is a video
10799 // tag. unused if this is an audio tag
10800 adHoc = 0, // :uint
10801
10802 // The default size is 16kb but this is not enough to hold iframe
10803 // data and the resizing algorithm costs a bit so we create a larger
10804 // starting buffer for video tags
10805 bufferStartSize = 16384,
10806
10807 // checks whether the FLV tag has enough capacity to accept the proposed
10808 // write and re-allocates the internal buffers if necessary
10809 prepareWrite = function(flv, count) {
10810 var
10811 bytes,
10812 minLength = flv.position + count;
10813 if (minLength < flv.bytes.byteLength) {
10814 // there's enough capacity so do nothing
10815 return;
10816 }
10817
10818 // allocate a new buffer and copy over the data that will not be modified
10819 bytes = new Uint8Array(minLength * 2);
10820 bytes.set(flv.bytes.subarray(0, flv.position), 0);
10821 flv.bytes = bytes;
10822 flv.view = new DataView(flv.bytes.buffer);
10823 },
10824
10825 // commonly used metadata properties
10826 widthBytes = FlvTag.widthBytes || new Uint8Array('width'.length),
10827 heightBytes = FlvTag.heightBytes || new Uint8Array('height'.length),
10828 videocodecidBytes = FlvTag.videocodecidBytes || new Uint8Array('videocodecid'.length),
10829 i;
10830
10831 if (!FlvTag.widthBytes) {
10832 // calculating the bytes of common metadata names ahead of time makes the
10833 // corresponding writes faster because we don't have to loop over the
10834 // characters
10835 // re-test with test/perf.html if you're planning on changing this
10836 for (i = 0; i < 'width'.length; i++) {
10837 widthBytes[i] = 'width'.charCodeAt(i);
10838 }
10839 for (i = 0; i < 'height'.length; i++) {
10840 heightBytes[i] = 'height'.charCodeAt(i);
10841 }
10842 for (i = 0; i < 'videocodecid'.length; i++) {
10843 videocodecidBytes[i] = 'videocodecid'.charCodeAt(i);
10844 }
10845
10846 FlvTag.widthBytes = widthBytes;
10847 FlvTag.heightBytes = heightBytes;
10848 FlvTag.videocodecidBytes = videocodecidBytes;
10849 }
10850
10851 this.keyFrame = false; // :Boolean
10852
10853 switch (type) {
10854 case FlvTag.VIDEO_TAG:
10855 this.length = 16;
10856 // Start the buffer at 256k
10857 bufferStartSize *= 6;
10858 break;
10859 case FlvTag.AUDIO_TAG:
10860 this.length = 13;
10861 this.keyFrame = true;
10862 break;
10863 case FlvTag.METADATA_TAG:
10864 this.length = 29;
10865 this.keyFrame = true;
10866 break;
10867 default:
10868 throw new Error('Unknown FLV tag type');
10869 }
10870
10871 this.bytes = new Uint8Array(bufferStartSize);
10872 this.view = new DataView(this.bytes.buffer);
10873 this.bytes[0] = type;
10874 this.position = this.length;
10875 this.keyFrame = extraData; // Defaults to false
10876
10877 // presentation timestamp
10878 this.pts = 0;
10879 // decoder timestamp
10880 this.dts = 0;
10881
10882 // ByteArray#writeBytes(bytes:ByteArray, offset:uint = 0, length:uint = 0)
10883 this.writeBytes = function(bytes, offset, length) {
10884 var
10885 start = offset || 0,
10886 end;
10887 length = length || bytes.byteLength;
10888 end = start + length;
10889
10890 prepareWrite(this, length);
10891 this.bytes.set(bytes.subarray(start, end), this.position);
10892
10893 this.position += length;
10894 this.length = Math.max(this.length, this.position);
10895 };
10896
10897 // ByteArray#writeByte(value:int):void
10898 this.writeByte = function(byte) {
10899 prepareWrite(this, 1);
10900 this.bytes[this.position] = byte;
10901 this.position++;
10902 this.length = Math.max(this.length, this.position);
10903 };
10904
10905 // ByteArray#writeShort(value:int):void
10906 this.writeShort = function(short) {
10907 prepareWrite(this, 2);
10908 this.view.setUint16(this.position, short);
10909 this.position += 2;
10910 this.length = Math.max(this.length, this.position);
10911 };
10912
10913 // Negative index into array
10914 // (pos:uint):int
10915 this.negIndex = function(pos) {
10916 return this.bytes[this.length - pos];
10917 };
10918
10919 // The functions below ONLY work when this[0] == VIDEO_TAG.
10920 // We are not going to check for that because we dont want the overhead
10921 // (nal:ByteArray = null):int
10922 this.nalUnitSize = function() {
10923 if (adHoc === 0) {
10924 return 0;
10925 }
10926
10927 return this.length - (adHoc + 4);
10928 };
10929
10930 this.startNalUnit = function() {
10931 // remember position and add 4 bytes
10932 if (adHoc > 0) {
10933 throw new Error('Attempted to create new NAL wihout closing the old one');
10934 }
10935
10936 // reserve 4 bytes for nal unit size
10937 adHoc = this.length;
10938 this.length += 4;
10939 this.position = this.length;
10940 };
10941
10942 // (nal:ByteArray = null):void
10943 this.endNalUnit = function(nalContainer) {
10944 var
10945 nalStart, // :uint
10946 nalLength; // :uint
10947
10948 // Rewind to the marker and write the size
10949 if (this.length === adHoc + 4) {
10950 // we started a nal unit, but didnt write one, so roll back the 4 byte size value
10951 this.length -= 4;
10952 } else if (adHoc > 0) {
10953 nalStart = adHoc + 4;
10954 nalLength = this.length - nalStart;
10955
10956 this.position = adHoc;
10957 this.view.setUint32(this.position, nalLength);
10958 this.position = this.length;
10959
10960 if (nalContainer) {
10961 // Add the tag to the NAL unit
10962 nalContainer.push(this.bytes.subarray(nalStart, nalStart + nalLength));
10963 }
10964 }
10965
10966 adHoc = 0;
10967 };
10968
10969 /**
10970 * Write out a 64-bit floating point valued metadata property. This method is
10971 * called frequently during a typical parse and needs to be fast.
10972 */
10973 // (key:String, val:Number):void
10974 this.writeMetaDataDouble = function(key, val) {
10975 var i;
10976 prepareWrite(this, 2 + key.length + 9);
10977
10978 // write size of property name
10979 this.view.setUint16(this.position, key.length);
10980 this.position += 2;
10981
10982 // this next part looks terrible but it improves parser throughput by
10983 // 10kB/s in my testing
10984
10985 // write property name
10986 if (key === 'width') {
10987 this.bytes.set(widthBytes, this.position);
10988 this.position += 5;
10989 } else if (key === 'height') {
10990 this.bytes.set(heightBytes, this.position);
10991 this.position += 6;
10992 } else if (key === 'videocodecid') {
10993 this.bytes.set(videocodecidBytes, this.position);
10994 this.position += 12;
10995 } else {
10996 for (i = 0; i < key.length; i++) {
10997 this.bytes[this.position] = key.charCodeAt(i);
10998 this.position++;
10999 }
11000 }
11001
11002 // skip null byte
11003 this.position++;
11004
11005 // write property value
11006 this.view.setFloat64(this.position, val);
11007 this.position += 8;
11008
11009 // update flv tag length
11010 this.length = Math.max(this.length, this.position);
11011 ++adHoc;
11012 };
11013
11014 // (key:String, val:Boolean):void
11015 this.writeMetaDataBoolean = function(key, val) {
11016 var i;
11017 prepareWrite(this, 2);
11018 this.view.setUint16(this.position, key.length);
11019 this.position += 2;
11020 for (i = 0; i < key.length; i++) {
11021 // if key.charCodeAt(i) >= 255, handle error
11022 prepareWrite(this, 1);
11023 this.bytes[this.position] = key.charCodeAt(i);
11024 this.position++;
11025 }
11026 prepareWrite(this, 2);
11027 this.view.setUint8(this.position, 0x01);
11028 this.position++;
11029 this.view.setUint8(this.position, val ? 0x01 : 0x00);
11030 this.position++;
11031 this.length = Math.max(this.length, this.position);
11032 ++adHoc;
11033 };
11034
11035 // ():ByteArray
11036 this.finalize = function() {
11037 var
11038 dtsDelta, // :int
11039 len; // :int
11040
11041 switch (this.bytes[0]) {
11042 // Video Data
11043 case FlvTag.VIDEO_TAG:
11044 // We only support AVC, 1 = key frame (for AVC, a seekable
11045 // frame), 2 = inter frame (for AVC, a non-seekable frame)
11046 this.bytes[11] = ((this.keyFrame || extraData) ? 0x10 : 0x20) | 0x07;
11047 this.bytes[12] = extraData ? 0x00 : 0x01;
11048
11049 dtsDelta = this.pts - this.dts;
11050 this.bytes[13] = (dtsDelta & 0x00FF0000) >>> 16;
11051 this.bytes[14] = (dtsDelta & 0x0000FF00) >>> 8;
11052 this.bytes[15] = (dtsDelta & 0x000000FF) >>> 0;
11053 break;
11054
11055 case FlvTag.AUDIO_TAG:
11056 this.bytes[11] = 0xAF; // 44 kHz, 16-bit stereo
11057 this.bytes[12] = extraData ? 0x00 : 0x01;
11058 break;
11059
11060 case FlvTag.METADATA_TAG:
11061 this.position = 11;
11062 this.view.setUint8(this.position, 0x02); // String type
11063 this.position++;
11064 this.view.setUint16(this.position, 0x0A); // 10 Bytes
11065 this.position += 2;
11066 // set "onMetaData"
11067 this.bytes.set([0x6f, 0x6e, 0x4d, 0x65,
11068 0x74, 0x61, 0x44, 0x61,
11069 0x74, 0x61], this.position);
11070 this.position += 10;
11071 this.bytes[this.position] = 0x08; // Array type
11072 this.position++;
11073 this.view.setUint32(this.position, adHoc);
11074 this.position = this.length;
11075 this.bytes.set([0, 0, 9], this.position);
11076 this.position += 3; // End Data Tag
11077 this.length = this.position;
11078 break;
11079 }
11080
11081 len = this.length - 11;
11082
11083 // write the DataSize field
11084 this.bytes[ 1] = (len & 0x00FF0000) >>> 16;
11085 this.bytes[ 2] = (len & 0x0000FF00) >>> 8;
11086 this.bytes[ 3] = (len & 0x000000FF) >>> 0;
11087 // write the Timestamp
11088 this.bytes[ 4] = (this.dts & 0x00FF0000) >>> 16;
11089 this.bytes[ 5] = (this.dts & 0x0000FF00) >>> 8;
11090 this.bytes[ 6] = (this.dts & 0x000000FF) >>> 0;
11091 this.bytes[ 7] = (this.dts & 0xFF000000) >>> 24;
11092 // write the StreamID
11093 this.bytes[ 8] = 0;
11094 this.bytes[ 9] = 0;
11095 this.bytes[10] = 0;
11096
11097 // Sometimes we're at the end of the view and have one slot to write a
11098 // uint32, so, prepareWrite of count 4, since, view is uint8
11099 prepareWrite(this, 4);
11100 this.view.setUint32(this.length, this.length);
11101 this.length += 4;
11102 this.position += 4;
11103
11104 // trim down the byte buffer to what is actually being used
11105 this.bytes = this.bytes.subarray(0, this.length);
11106 this.frameTime = FlvTag.frameTime(this.bytes);
11107 // if bytes.bytelength isn't equal to this.length, handle error
11108 return this;
11109 };
11110};
11111
11112FlvTag.AUDIO_TAG = 0x08; // == 8, :uint
11113FlvTag.VIDEO_TAG = 0x09; // == 9, :uint
11114FlvTag.METADATA_TAG = 0x12; // == 18, :uint
11115
11116// (tag:ByteArray):Boolean {
11117FlvTag.isAudioFrame = function(tag) {
11118 return FlvTag.AUDIO_TAG === tag[0];
11119};
11120
11121// (tag:ByteArray):Boolean {
11122FlvTag.isVideoFrame = function(tag) {
11123 return FlvTag.VIDEO_TAG === tag[0];
11124};
11125
11126// (tag:ByteArray):Boolean {
11127FlvTag.isMetaData = function(tag) {
11128 return FlvTag.METADATA_TAG === tag[0];
11129};
11130
11131// (tag:ByteArray):Boolean {
11132FlvTag.isKeyFrame = function(tag) {
11133 if (FlvTag.isVideoFrame(tag)) {
11134 return tag[11] === 0x17;
11135 }
11136
11137 if (FlvTag.isAudioFrame(tag)) {
11138 return true;
11139 }
11140
11141 if (FlvTag.isMetaData(tag)) {
11142 return true;
11143 }
11144
11145 return false;
11146};
11147
11148// (tag:ByteArray):uint {
11149FlvTag.frameTime = function(tag) {
11150 var pts = tag[ 4] << 16; // :uint
11151 pts |= tag[ 5] << 8;
11152 pts |= tag[ 6] << 0;
11153 pts |= tag[ 7] << 24;
11154 return pts;
11155};
11156
11157module.exports = FlvTag;
11158
11159},{}],44:[function(require,module,exports){
11160module.exports = {
11161 tag: require('./flv-tag'),
11162 Transmuxer: require('./transmuxer'),
11163 getFlvHeader: require('./flv-header')
11164};
11165
11166},{"./flv-header":42,"./flv-tag":43,"./transmuxer":46}],45:[function(require,module,exports){
11167'use strict';
11168
11169var TagList = function() {
11170 var self = this;
11171
11172 this.list = [];
11173
11174 this.push = function(tag) {
11175 this.list.push({
11176 bytes: tag.bytes,
11177 dts: tag.dts,
11178 pts: tag.pts,
11179 keyFrame: tag.keyFrame,
11180 metaDataTag: tag.metaDataTag
11181 });
11182 };
11183
11184 Object.defineProperty(this, 'length', {
11185 get: function() {
11186 return self.list.length;
11187 }
11188 });
11189};
11190
11191module.exports = TagList;
11192
11193},{}],46:[function(require,module,exports){
11194'use strict';
11195
11196var Stream = require('../utils/stream.js');
11197var FlvTag = require('./flv-tag.js');
11198var m2ts = require('../m2ts/m2ts.js');
11199var AdtsStream = require('../codecs/adts.js');
11200var H264Stream = require('../codecs/h264').H264Stream;
11201var CoalesceStream = require('./coalesce-stream.js');
11202var TagList = require('./tag-list.js');
11203
11204var
11205 Transmuxer,
11206 VideoSegmentStream,
11207 AudioSegmentStream,
11208 collectTimelineInfo,
11209 metaDataTag,
11210 extraDataTag;
11211
11212/**
11213 * Store information about the start and end of the tracka and the
11214 * duration for each frame/sample we process in order to calculate
11215 * the baseMediaDecodeTime
11216 */
11217collectTimelineInfo = function(track, data) {
11218 if (typeof data.pts === 'number') {
11219 if (track.timelineStartInfo.pts === undefined) {
11220 track.timelineStartInfo.pts = data.pts;
11221 } else {
11222 track.timelineStartInfo.pts =
11223 Math.min(track.timelineStartInfo.pts, data.pts);
11224 }
11225 }
11226
11227 if (typeof data.dts === 'number') {
11228 if (track.timelineStartInfo.dts === undefined) {
11229 track.timelineStartInfo.dts = data.dts;
11230 } else {
11231 track.timelineStartInfo.dts =
11232 Math.min(track.timelineStartInfo.dts, data.dts);
11233 }
11234 }
11235};
11236
11237metaDataTag = function(track, pts) {
11238 var
11239 tag = new FlvTag(FlvTag.METADATA_TAG); // :FlvTag
11240
11241 tag.dts = pts;
11242 tag.pts = pts;
11243
11244 tag.writeMetaDataDouble('videocodecid', 7);
11245 tag.writeMetaDataDouble('width', track.width);
11246 tag.writeMetaDataDouble('height', track.height);
11247
11248 return tag;
11249};
11250
11251extraDataTag = function(track, pts) {
11252 var
11253 i,
11254 tag = new FlvTag(FlvTag.VIDEO_TAG, true);
11255
11256 tag.dts = pts;
11257 tag.pts = pts;
11258
11259 tag.writeByte(0x01);// version
11260 tag.writeByte(track.profileIdc);// profile
11261 tag.writeByte(track.profileCompatibility);// compatibility
11262 tag.writeByte(track.levelIdc);// level
11263 tag.writeByte(0xFC | 0x03); // reserved (6 bits), NULA length size - 1 (2 bits)
11264 tag.writeByte(0xE0 | 0x01); // reserved (3 bits), num of SPS (5 bits)
11265 tag.writeShort(track.sps[0].length); // data of SPS
11266 tag.writeBytes(track.sps[0]); // SPS
11267
11268 tag.writeByte(track.pps.length); // num of PPS (will there ever be more that 1 PPS?)
11269 for (i = 0; i < track.pps.length; ++i) {
11270 tag.writeShort(track.pps[i].length); // 2 bytes for length of PPS
11271 tag.writeBytes(track.pps[i]); // data of PPS
11272 }
11273
11274 return tag;
11275};
11276
11277/**
11278 * Constructs a single-track, media segment from AAC data
11279 * events. The output of this stream can be fed to flash.
11280 */
11281AudioSegmentStream = function(track) {
11282 var
11283 adtsFrames = [],
11284 oldExtraData;
11285
11286 AudioSegmentStream.prototype.init.call(this);
11287
11288 this.push = function(data) {
11289 collectTimelineInfo(track, data);
11290
11291 if (track && track.channelcount === undefined) {
11292 track.audioobjecttype = data.audioobjecttype;
11293 track.channelcount = data.channelcount;
11294 track.samplerate = data.samplerate;
11295 track.samplingfrequencyindex = data.samplingfrequencyindex;
11296 track.samplesize = data.samplesize;
11297 track.extraData = (track.audioobjecttype << 11) |
11298 (track.samplingfrequencyindex << 7) |
11299 (track.channelcount << 3);
11300 }
11301
11302 data.pts = Math.round(data.pts / 90);
11303 data.dts = Math.round(data.dts / 90);
11304
11305 // buffer audio data until end() is called
11306 adtsFrames.push(data);
11307 };
11308
11309 this.flush = function() {
11310 var currentFrame, adtsFrame, lastMetaPts, tags = new TagList();
11311 // return early if no audio data has been observed
11312 if (adtsFrames.length === 0) {
11313 this.trigger('done', 'AudioSegmentStream');
11314 return;
11315 }
11316
11317 lastMetaPts = -Infinity;
11318
11319 while (adtsFrames.length) {
11320 currentFrame = adtsFrames.shift();
11321
11322 // write out metadata tags every 1 second so that the decoder
11323 // is re-initialized quickly after seeking into a different
11324 // audio configuration
11325 if (track.extraData !== oldExtraData || currentFrame.pts - lastMetaPts >= 1000) {
11326 adtsFrame = new FlvTag(FlvTag.METADATA_TAG);
11327 adtsFrame.pts = currentFrame.pts;
11328 adtsFrame.dts = currentFrame.dts;
11329
11330 // AAC is always 10
11331 adtsFrame.writeMetaDataDouble('audiocodecid', 10);
11332 adtsFrame.writeMetaDataBoolean('stereo', track.channelcount === 2);
11333 adtsFrame.writeMetaDataDouble('audiosamplerate', track.samplerate);
11334 // Is AAC always 16 bit?
11335 adtsFrame.writeMetaDataDouble('audiosamplesize', 16);
11336
11337 tags.push(adtsFrame.finalize());
11338
11339 oldExtraData = track.extraData;
11340
11341 adtsFrame = new FlvTag(FlvTag.AUDIO_TAG, true);
11342 // For audio, DTS is always the same as PTS. We want to set the DTS
11343 // however so we can compare with video DTS to determine approximate
11344 // packet order
11345 adtsFrame.pts = currentFrame.pts;
11346 adtsFrame.dts = currentFrame.dts;
11347
11348 adtsFrame.view.setUint16(adtsFrame.position, track.extraData);
11349 adtsFrame.position += 2;
11350 adtsFrame.length = Math.max(adtsFrame.length, adtsFrame.position);
11351
11352 tags.push(adtsFrame.finalize());
11353
11354 lastMetaPts = currentFrame.pts;
11355 }
11356 adtsFrame = new FlvTag(FlvTag.AUDIO_TAG);
11357 adtsFrame.pts = currentFrame.pts;
11358 adtsFrame.dts = currentFrame.dts;
11359
11360 adtsFrame.writeBytes(currentFrame.data);
11361
11362 tags.push(adtsFrame.finalize());
11363 }
11364
11365 oldExtraData = null;
11366 this.trigger('data', {track: track, tags: tags.list});
11367
11368 this.trigger('done', 'AudioSegmentStream');
11369 };
11370};
11371AudioSegmentStream.prototype = new Stream();
11372
11373/**
11374 * Store FlvTags for the h264 stream
11375 * @param track {object} track metadata configuration
11376 */
11377VideoSegmentStream = function(track) {
11378 var
11379 nalUnits = [],
11380 config,
11381 h264Frame;
11382 VideoSegmentStream.prototype.init.call(this);
11383
11384 this.finishFrame = function(tags, frame) {
11385 if (!frame) {
11386 return;
11387 }
11388 // Check if keyframe and the length of tags.
11389 // This makes sure we write metadata on the first frame of a segment.
11390 if (config && track && track.newMetadata &&
11391 (frame.keyFrame || tags.length === 0)) {
11392 // Push extra data on every IDR frame in case we did a stream change + seek
11393 var metaTag = metaDataTag(config, frame.dts).finalize();
11394 var extraTag = extraDataTag(track, frame.dts).finalize();
11395
11396 metaTag.metaDataTag = extraTag.metaDataTag = true;
11397
11398 tags.push(metaTag);
11399 tags.push(extraTag);
11400 track.newMetadata = false;
11401 }
11402
11403 frame.endNalUnit();
11404 tags.push(frame.finalize());
11405 h264Frame = null;
11406 };
11407
11408 this.push = function(data) {
11409 collectTimelineInfo(track, data);
11410
11411 data.pts = Math.round(data.pts / 90);
11412 data.dts = Math.round(data.dts / 90);
11413
11414 // buffer video until flush() is called
11415 nalUnits.push(data);
11416 };
11417
11418 this.flush = function() {
11419 var
11420 currentNal,
11421 tags = new TagList();
11422
11423 // Throw away nalUnits at the start of the byte stream until we find
11424 // the first AUD
11425 while (nalUnits.length) {
11426 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
11427 break;
11428 }
11429 nalUnits.shift();
11430 }
11431
11432 // return early if no video data has been observed
11433 if (nalUnits.length === 0) {
11434 this.trigger('done', 'VideoSegmentStream');
11435 return;
11436 }
11437
11438 while (nalUnits.length) {
11439 currentNal = nalUnits.shift();
11440
11441 // record the track config
11442 if (currentNal.nalUnitType === 'seq_parameter_set_rbsp') {
11443 track.newMetadata = true;
11444 config = currentNal.config;
11445 track.width = config.width;
11446 track.height = config.height;
11447 track.sps = [currentNal.data];
11448 track.profileIdc = config.profileIdc;
11449 track.levelIdc = config.levelIdc;
11450 track.profileCompatibility = config.profileCompatibility;
11451 h264Frame.endNalUnit();
11452 } else if (currentNal.nalUnitType === 'pic_parameter_set_rbsp') {
11453 track.newMetadata = true;
11454 track.pps = [currentNal.data];
11455 h264Frame.endNalUnit();
11456 } else if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
11457 if (h264Frame) {
11458 this.finishFrame(tags, h264Frame);
11459 }
11460 h264Frame = new FlvTag(FlvTag.VIDEO_TAG);
11461 h264Frame.pts = currentNal.pts;
11462 h264Frame.dts = currentNal.dts;
11463 } else {
11464 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
11465 // the current sample is a key frame
11466 h264Frame.keyFrame = true;
11467 }
11468 h264Frame.endNalUnit();
11469 }
11470 h264Frame.startNalUnit();
11471 h264Frame.writeBytes(currentNal.data);
11472 }
11473 if (h264Frame) {
11474 this.finishFrame(tags, h264Frame);
11475 }
11476
11477 this.trigger('data', {track: track, tags: tags.list});
11478
11479 // Continue with the flush process now
11480 this.trigger('done', 'VideoSegmentStream');
11481 };
11482};
11483
11484VideoSegmentStream.prototype = new Stream();
11485
11486/**
11487 * An object that incrementally transmuxes MPEG2 Trasport Stream
11488 * chunks into an FLV.
11489 */
11490Transmuxer = function(options) {
11491 var
11492 self = this,
11493
11494 packetStream, parseStream, elementaryStream,
11495 videoTimestampRolloverStream, audioTimestampRolloverStream,
11496 timedMetadataTimestampRolloverStream,
11497 adtsStream, h264Stream,
11498 videoSegmentStream, audioSegmentStream, captionStream,
11499 coalesceStream;
11500
11501 Transmuxer.prototype.init.call(this);
11502
11503 options = options || {};
11504
11505 // expose the metadata stream
11506 this.metadataStream = new m2ts.MetadataStream();
11507
11508 options.metadataStream = this.metadataStream;
11509
11510 // set up the parsing pipeline
11511 packetStream = new m2ts.TransportPacketStream();
11512 parseStream = new m2ts.TransportParseStream();
11513 elementaryStream = new m2ts.ElementaryStream();
11514 videoTimestampRolloverStream = new m2ts.TimestampRolloverStream('video');
11515 audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
11516 timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
11517
11518 adtsStream = new AdtsStream();
11519 h264Stream = new H264Stream();
11520 coalesceStream = new CoalesceStream(options);
11521
11522 // disassemble MPEG2-TS packets into elementary streams
11523 packetStream
11524 .pipe(parseStream)
11525 .pipe(elementaryStream);
11526
11527 // !!THIS ORDER IS IMPORTANT!!
11528 // demux the streams
11529 elementaryStream
11530 .pipe(videoTimestampRolloverStream)
11531 .pipe(h264Stream);
11532 elementaryStream
11533 .pipe(audioTimestampRolloverStream)
11534 .pipe(adtsStream);
11535
11536 elementaryStream
11537 .pipe(timedMetadataTimestampRolloverStream)
11538 .pipe(this.metadataStream)
11539 .pipe(coalesceStream);
11540 // if CEA-708 parsing is available, hook up a caption stream
11541 captionStream = new m2ts.CaptionStream();
11542 h264Stream.pipe(captionStream)
11543 .pipe(coalesceStream);
11544
11545 // hook up the segment streams once track metadata is delivered
11546 elementaryStream.on('data', function(data) {
11547 var i, videoTrack, audioTrack;
11548
11549 if (data.type === 'metadata') {
11550 i = data.tracks.length;
11551
11552 // scan the tracks listed in the metadata
11553 while (i--) {
11554 if (data.tracks[i].type === 'video') {
11555 videoTrack = data.tracks[i];
11556 } else if (data.tracks[i].type === 'audio') {
11557 audioTrack = data.tracks[i];
11558 }
11559 }
11560
11561 // hook up the video segment stream to the first track with h264 data
11562 if (videoTrack && !videoSegmentStream) {
11563 coalesceStream.numberOfTracks++;
11564 videoSegmentStream = new VideoSegmentStream(videoTrack);
11565
11566 // Set up the final part of the video pipeline
11567 h264Stream
11568 .pipe(videoSegmentStream)
11569 .pipe(coalesceStream);
11570 }
11571
11572 if (audioTrack && !audioSegmentStream) {
11573 // hook up the audio segment stream to the first track with aac data
11574 coalesceStream.numberOfTracks++;
11575 audioSegmentStream = new AudioSegmentStream(audioTrack);
11576
11577 // Set up the final part of the audio pipeline
11578 adtsStream
11579 .pipe(audioSegmentStream)
11580 .pipe(coalesceStream);
11581 }
11582 }
11583 });
11584
11585 // feed incoming data to the front of the parsing pipeline
11586 this.push = function(data) {
11587 packetStream.push(data);
11588 };
11589
11590 // flush any buffered data
11591 this.flush = function() {
11592 // Start at the top of the pipeline and flush all pending work
11593 packetStream.flush();
11594 };
11595
11596 // Re-emit any data coming from the coalesce stream to the outside world
11597 coalesceStream.on('data', function(event) {
11598 self.trigger('data', event);
11599 });
11600
11601 // Let the consumer know we have finished flushing the entire pipeline
11602 coalesceStream.on('done', function() {
11603 self.trigger('done');
11604 });
11605};
11606Transmuxer.prototype = new Stream();
11607
11608// forward compatibility
11609module.exports = Transmuxer;
11610
11611},{"../codecs/adts.js":38,"../codecs/h264":39,"../m2ts/m2ts.js":48,"../utils/stream.js":60,"./coalesce-stream.js":41,"./flv-tag.js":43,"./tag-list.js":45}],47:[function(require,module,exports){
11612/**
11613 * mux.js
11614 *
11615 * Copyright (c) 2015 Brightcove
11616 * All rights reserved.
11617 *
11618 * Reads in-band caption information from a video elementary
11619 * stream. Captions must follow the CEA-708 standard for injection
11620 * into an MPEG-2 transport streams.
11621 * @see https://en.wikipedia.org/wiki/CEA-708
11622 */
11623
11624'use strict';
11625
11626// -----------------
11627// Link To Transport
11628// -----------------
11629
11630// Supplemental enhancement information (SEI) NAL units have a
11631// payload type field to indicate how they are to be
11632// interpreted. CEAS-708 caption content is always transmitted with
11633// payload type 0x04.
11634var USER_DATA_REGISTERED_ITU_T_T35 = 4,
11635 RBSP_TRAILING_BITS = 128,
11636 Stream = require('../utils/stream');
11637
11638/**
11639 * Parse a supplemental enhancement information (SEI) NAL unit.
11640 * Stops parsing once a message of type ITU T T35 has been found.
11641 *
11642 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
11643 * @return {object} the parsed SEI payload
11644 * @see Rec. ITU-T H.264, 7.3.2.3.1
11645 */
11646var parseSei = function(bytes) {
11647 var
11648 i = 0,
11649 result = {
11650 payloadType: -1,
11651 payloadSize: 0
11652 },
11653 payloadType = 0,
11654 payloadSize = 0;
11655
11656 // go through the sei_rbsp parsing each each individual sei_message
11657 while (i < bytes.byteLength) {
11658 // stop once we have hit the end of the sei_rbsp
11659 if (bytes[i] === RBSP_TRAILING_BITS) {
11660 break;
11661 }
11662
11663 // Parse payload type
11664 while (bytes[i] === 0xFF) {
11665 payloadType += 255;
11666 i++;
11667 }
11668 payloadType += bytes[i++];
11669
11670 // Parse payload size
11671 while (bytes[i] === 0xFF) {
11672 payloadSize += 255;
11673 i++;
11674 }
11675 payloadSize += bytes[i++];
11676
11677 // this sei_message is a 608/708 caption so save it and break
11678 // there can only ever be one caption message in a frame's sei
11679 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
11680 result.payloadType = payloadType;
11681 result.payloadSize = payloadSize;
11682 result.payload = bytes.subarray(i, i + payloadSize);
11683 break;
11684 }
11685
11686 // skip the payload and parse the next message
11687 i += payloadSize;
11688 payloadType = 0;
11689 payloadSize = 0;
11690 }
11691
11692 return result;
11693};
11694
11695// see ANSI/SCTE 128-1 (2013), section 8.1
11696var parseUserData = function(sei) {
11697 // itu_t_t35_contry_code must be 181 (United States) for
11698 // captions
11699 if (sei.payload[0] !== 181) {
11700 return null;
11701 }
11702
11703 // itu_t_t35_provider_code should be 49 (ATSC) for captions
11704 if (((sei.payload[1] << 8) | sei.payload[2]) !== 49) {
11705 return null;
11706 }
11707
11708 // the user_identifier should be "GA94" to indicate ATSC1 data
11709 if (String.fromCharCode(sei.payload[3],
11710 sei.payload[4],
11711 sei.payload[5],
11712 sei.payload[6]) !== 'GA94') {
11713 return null;
11714 }
11715
11716 // finally, user_data_type_code should be 0x03 for caption data
11717 if (sei.payload[7] !== 0x03) {
11718 return null;
11719 }
11720
11721 // return the user_data_type_structure and strip the trailing
11722 // marker bits
11723 return sei.payload.subarray(8, sei.payload.length - 1);
11724};
11725
11726// see CEA-708-D, section 4.4
11727var parseCaptionPackets = function(pts, userData) {
11728 var results = [], i, count, offset, data;
11729
11730 // if this is just filler, return immediately
11731 if (!(userData[0] & 0x40)) {
11732 return results;
11733 }
11734
11735 // parse out the cc_data_1 and cc_data_2 fields
11736 count = userData[0] & 0x1f;
11737 for (i = 0; i < count; i++) {
11738 offset = i * 3;
11739 data = {
11740 type: userData[offset + 2] & 0x03,
11741 pts: pts
11742 };
11743
11744 // capture cc data when cc_valid is 1
11745 if (userData[offset + 2] & 0x04) {
11746 data.ccData = (userData[offset + 3] << 8) | userData[offset + 4];
11747 results.push(data);
11748 }
11749 }
11750 return results;
11751};
11752
11753var CaptionStream = function() {
11754 CaptionStream.prototype.init.call(this);
11755
11756 this.captionPackets_ = [];
11757
11758 this.field1_ = new Cea608Stream(); // eslint-disable-line no-use-before-define
11759
11760 // forward data and done events from field1_ to this CaptionStream
11761 this.field1_.on('data', this.trigger.bind(this, 'data'));
11762 this.field1_.on('done', this.trigger.bind(this, 'done'));
11763};
11764CaptionStream.prototype = new Stream();
11765CaptionStream.prototype.push = function(event) {
11766 var sei, userData;
11767
11768 // only examine SEI NALs
11769 if (event.nalUnitType !== 'sei_rbsp') {
11770 return;
11771 }
11772
11773 // parse the sei
11774 sei = parseSei(event.escapedRBSP);
11775
11776 // ignore everything but user_data_registered_itu_t_t35
11777 if (sei.payloadType !== USER_DATA_REGISTERED_ITU_T_T35) {
11778 return;
11779 }
11780
11781 // parse out the user data payload
11782 userData = parseUserData(sei);
11783
11784 // ignore unrecognized userData
11785 if (!userData) {
11786 return;
11787 }
11788
11789 // parse out CC data packets and save them for later
11790 this.captionPackets_ = this.captionPackets_.concat(parseCaptionPackets(event.pts, userData));
11791};
11792
11793CaptionStream.prototype.flush = function() {
11794 // make sure we actually parsed captions before proceeding
11795 if (!this.captionPackets_.length) {
11796 this.field1_.flush();
11797 return;
11798 }
11799
11800 // In Chrome, the Array#sort function is not stable so add a
11801 // presortIndex that we can use to ensure we get a stable-sort
11802 this.captionPackets_.forEach(function(elem, idx) {
11803 elem.presortIndex = idx;
11804 });
11805
11806 // sort caption byte-pairs based on their PTS values
11807 this.captionPackets_.sort(function(a, b) {
11808 if (a.pts === b.pts) {
11809 return a.presortIndex - b.presortIndex;
11810 }
11811 return a.pts - b.pts;
11812 });
11813
11814 // Push each caption into Cea608Stream
11815 this.captionPackets_.forEach(this.field1_.push, this.field1_);
11816
11817 this.captionPackets_.length = 0;
11818 this.field1_.flush();
11819 return;
11820};
11821// ----------------------
11822// Session to Application
11823// ----------------------
11824
11825var BASIC_CHARACTER_TRANSLATION = {
11826 0x2a: 0xe1,
11827 0x5c: 0xe9,
11828 0x5e: 0xed,
11829 0x5f: 0xf3,
11830 0x60: 0xfa,
11831 0x7b: 0xe7,
11832 0x7c: 0xf7,
11833 0x7d: 0xd1,
11834 0x7e: 0xf1,
11835 0x7f: 0x2588
11836};
11837
11838var getCharFromCode = function(code) {
11839 if (code === null) {
11840 return '';
11841 }
11842 code = BASIC_CHARACTER_TRANSLATION[code] || code;
11843 return String.fromCharCode(code);
11844};
11845
11846// Constants for the byte codes recognized by Cea608Stream. This
11847// list is not exhaustive. For a more comprehensive listing and
11848// semantics see
11849// http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
11850var PADDING = 0x0000,
11851
11852 // Pop-on Mode
11853 RESUME_CAPTION_LOADING = 0x1420,
11854 END_OF_CAPTION = 0x142f,
11855
11856 // Roll-up Mode
11857 ROLL_UP_2_ROWS = 0x1425,
11858 ROLL_UP_3_ROWS = 0x1426,
11859 ROLL_UP_4_ROWS = 0x1427,
11860 CARRIAGE_RETURN = 0x142d,
11861 // Erasure
11862 BACKSPACE = 0x1421,
11863 ERASE_DISPLAYED_MEMORY = 0x142c,
11864 ERASE_NON_DISPLAYED_MEMORY = 0x142e;
11865
11866// the index of the last row in a CEA-608 display buffer
11867var BOTTOM_ROW = 14;
11868// CEA-608 captions are rendered onto a 34x15 matrix of character
11869// cells. The "bottom" row is the last element in the outer array.
11870var createDisplayBuffer = function() {
11871 var result = [], i = BOTTOM_ROW + 1;
11872 while (i--) {
11873 result.push('');
11874 }
11875 return result;
11876};
11877
11878var Cea608Stream = function() {
11879 Cea608Stream.prototype.init.call(this);
11880
11881 this.mode_ = 'popOn';
11882 // When in roll-up mode, the index of the last row that will
11883 // actually display captions. If a caption is shifted to a row
11884 // with a lower index than this, it is cleared from the display
11885 // buffer
11886 this.topRow_ = 0;
11887 this.startPts_ = 0;
11888 this.displayed_ = createDisplayBuffer();
11889 this.nonDisplayed_ = createDisplayBuffer();
11890 this.lastControlCode_ = null;
11891
11892 this.push = function(packet) {
11893 // Ignore other channels
11894 if (packet.type !== 0) {
11895 return;
11896 }
11897 var data, swap, char0, char1;
11898 // remove the parity bits
11899 data = packet.ccData & 0x7f7f;
11900
11901 // ignore duplicate control codes
11902 if (data === this.lastControlCode_) {
11903 this.lastControlCode_ = null;
11904 return;
11905 }
11906
11907 // Store control codes
11908 if ((data & 0xf000) === 0x1000) {
11909 this.lastControlCode_ = data;
11910 } else {
11911 this.lastControlCode_ = null;
11912 }
11913
11914 switch (data) {
11915 case PADDING:
11916 break;
11917 case RESUME_CAPTION_LOADING:
11918 this.mode_ = 'popOn';
11919 break;
11920 case END_OF_CAPTION:
11921 // if a caption was being displayed, it's gone now
11922 this.flushDisplayed(packet.pts);
11923
11924 // flip memory
11925 swap = this.displayed_;
11926 this.displayed_ = this.nonDisplayed_;
11927 this.nonDisplayed_ = swap;
11928
11929 // start measuring the time to display the caption
11930 this.startPts_ = packet.pts;
11931 break;
11932
11933 case ROLL_UP_2_ROWS:
11934 this.topRow_ = BOTTOM_ROW - 1;
11935 this.mode_ = 'rollUp';
11936 break;
11937 case ROLL_UP_3_ROWS:
11938 this.topRow_ = BOTTOM_ROW - 2;
11939 this.mode_ = 'rollUp';
11940 break;
11941 case ROLL_UP_4_ROWS:
11942 this.topRow_ = BOTTOM_ROW - 3;
11943 this.mode_ = 'rollUp';
11944 break;
11945 case CARRIAGE_RETURN:
11946 this.flushDisplayed(packet.pts);
11947 this.shiftRowsUp_();
11948 this.startPts_ = packet.pts;
11949 break;
11950
11951 case BACKSPACE:
11952 if (this.mode_ === 'popOn') {
11953 this.nonDisplayed_[BOTTOM_ROW] = this.nonDisplayed_[BOTTOM_ROW].slice(0, -1);
11954 } else {
11955 this.displayed_[BOTTOM_ROW] = this.displayed_[BOTTOM_ROW].slice(0, -1);
11956 }
11957 break;
11958 case ERASE_DISPLAYED_MEMORY:
11959 this.flushDisplayed(packet.pts);
11960 this.displayed_ = createDisplayBuffer();
11961 break;
11962 case ERASE_NON_DISPLAYED_MEMORY:
11963 this.nonDisplayed_ = createDisplayBuffer();
11964 break;
11965 default:
11966 char0 = data >>> 8;
11967 char1 = data & 0xff;
11968
11969 // Look for a Channel 1 Preamble Address Code
11970 if (char0 >= 0x10 && char0 <= 0x17 &&
11971 char1 >= 0x40 && char1 <= 0x7F &&
11972 (char0 !== 0x10 || char1 < 0x60)) {
11973 // Follow Safari's lead and replace the PAC with a space
11974 char0 = 0x20;
11975 // we only want one space so make the second character null
11976 // which will get become '' in getCharFromCode
11977 char1 = null;
11978 }
11979
11980 // Look for special character sets
11981 if ((char0 === 0x11 || char0 === 0x19) &&
11982 (char1 >= 0x30 && char1 <= 0x3F)) {
11983 // Put in eigth note and space
11984 char0 = 0x266A;
11985 char1 = '';
11986 }
11987
11988 // ignore unsupported control codes
11989 if ((char0 & 0xf0) === 0x10) {
11990 return;
11991 }
11992
11993 // remove null chars
11994 if (char0 === 0x00) {
11995 char0 = null;
11996 }
11997 if (char1 === 0x00) {
11998 char1 = null;
11999 }
12000
12001 // character handling is dependent on the current mode
12002 this[this.mode_](packet.pts, char0, char1);
12003 break;
12004 }
12005 };
12006};
12007Cea608Stream.prototype = new Stream();
12008// Trigger a cue point that captures the current state of the
12009// display buffer
12010Cea608Stream.prototype.flushDisplayed = function(pts) {
12011 var content = this.displayed_
12012 // remove spaces from the start and end of the string
12013 .map(function(row) {
12014 return row.trim();
12015 })
12016 // remove empty rows
12017 .filter(function(row) {
12018 return row.length;
12019 })
12020 // combine all text rows to display in one cue
12021 .join('\n');
12022
12023 if (content.length) {
12024 this.trigger('data', {
12025 startPts: this.startPts_,
12026 endPts: pts,
12027 text: content
12028 });
12029 }
12030};
12031
12032// Mode Implementations
12033Cea608Stream.prototype.popOn = function(pts, char0, char1) {
12034 var baseRow = this.nonDisplayed_[BOTTOM_ROW];
12035
12036 // buffer characters
12037 baseRow += getCharFromCode(char0);
12038 baseRow += getCharFromCode(char1);
12039 this.nonDisplayed_[BOTTOM_ROW] = baseRow;
12040};
12041
12042Cea608Stream.prototype.rollUp = function(pts, char0, char1) {
12043 var baseRow = this.displayed_[BOTTOM_ROW];
12044 if (baseRow === '') {
12045 // we're starting to buffer new display input, so flush out the
12046 // current display
12047 this.flushDisplayed(pts);
12048
12049 this.startPts_ = pts;
12050 }
12051
12052 baseRow += getCharFromCode(char0);
12053 baseRow += getCharFromCode(char1);
12054
12055 this.displayed_[BOTTOM_ROW] = baseRow;
12056};
12057Cea608Stream.prototype.shiftRowsUp_ = function() {
12058 var i;
12059 // clear out inactive rows
12060 for (i = 0; i < this.topRow_; i++) {
12061 this.displayed_[i] = '';
12062 }
12063 // shift displayed rows up
12064 for (i = this.topRow_; i < BOTTOM_ROW; i++) {
12065 this.displayed_[i] = this.displayed_[i + 1];
12066 }
12067 // clear out the bottom row
12068 this.displayed_[BOTTOM_ROW] = '';
12069};
12070
12071// exports
12072module.exports = {
12073 CaptionStream: CaptionStream,
12074 Cea608Stream: Cea608Stream
12075};
12076
12077},{"../utils/stream":60}],48:[function(require,module,exports){
12078/**
12079 * mux.js
12080 *
12081 * Copyright (c) 2015 Brightcove
12082 * All rights reserved.
12083 *
12084 * A stream-based mp2t to mp4 converter. This utility can be used to
12085 * deliver mp4s to a SourceBuffer on platforms that support native
12086 * Media Source Extensions.
12087 */
12088'use strict';
12089var Stream = require('../utils/stream.js'),
12090 CaptionStream = require('./caption-stream'),
12091 StreamTypes = require('./stream-types'),
12092 TimestampRolloverStream = require('./timestamp-rollover-stream').TimestampRolloverStream;
12093
12094var m2tsStreamTypes = require('./stream-types.js');
12095
12096// object types
12097var TransportPacketStream, TransportParseStream, ElementaryStream;
12098
12099// constants
12100var
12101 MP2T_PACKET_LENGTH = 188, // bytes
12102 SYNC_BYTE = 0x47;
12103
12104/**
12105 * Splits an incoming stream of binary data into MPEG-2 Transport
12106 * Stream packets.
12107 */
12108TransportPacketStream = function() {
12109 var
12110 buffer = new Uint8Array(MP2T_PACKET_LENGTH),
12111 bytesInBuffer = 0;
12112
12113 TransportPacketStream.prototype.init.call(this);
12114
12115 // Deliver new bytes to the stream.
12116
12117 this.push = function(bytes) {
12118 var
12119 startIndex = 0,
12120 endIndex = MP2T_PACKET_LENGTH,
12121 everything;
12122
12123 // If there are bytes remaining from the last segment, prepend them to the
12124 // bytes that were pushed in
12125 if (bytesInBuffer) {
12126 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
12127 everything.set(buffer.subarray(0, bytesInBuffer));
12128 everything.set(bytes, bytesInBuffer);
12129 bytesInBuffer = 0;
12130 } else {
12131 everything = bytes;
12132 }
12133
12134 // While we have enough data for a packet
12135 while (endIndex < everything.byteLength) {
12136 // Look for a pair of start and end sync bytes in the data..
12137 if (everything[startIndex] === SYNC_BYTE && everything[endIndex] === SYNC_BYTE) {
12138 // We found a packet so emit it and jump one whole packet forward in
12139 // the stream
12140 this.trigger('data', everything.subarray(startIndex, endIndex));
12141 startIndex += MP2T_PACKET_LENGTH;
12142 endIndex += MP2T_PACKET_LENGTH;
12143 continue;
12144 }
12145 // If we get here, we have somehow become de-synchronized and we need to step
12146 // forward one byte at a time until we find a pair of sync bytes that denote
12147 // a packet
12148 startIndex++;
12149 endIndex++;
12150 }
12151
12152 // If there was some data left over at the end of the segment that couldn't
12153 // possibly be a whole packet, keep it because it might be the start of a packet
12154 // that continues in the next segment
12155 if (startIndex < everything.byteLength) {
12156 buffer.set(everything.subarray(startIndex), 0);
12157 bytesInBuffer = everything.byteLength - startIndex;
12158 }
12159 };
12160
12161 this.flush = function() {
12162 // If the buffer contains a whole packet when we are being flushed, emit it
12163 // and empty the buffer. Otherwise hold onto the data because it may be
12164 // important for decoding the next segment
12165 if (bytesInBuffer === MP2T_PACKET_LENGTH && buffer[0] === SYNC_BYTE) {
12166 this.trigger('data', buffer);
12167 bytesInBuffer = 0;
12168 }
12169 this.trigger('done');
12170 };
12171};
12172TransportPacketStream.prototype = new Stream();
12173
12174/**
12175 * Accepts an MP2T TransportPacketStream and emits data events with parsed
12176 * forms of the individual transport stream packets.
12177 */
12178TransportParseStream = function() {
12179 var parsePsi, parsePat, parsePmt, self;
12180 TransportParseStream.prototype.init.call(this);
12181 self = this;
12182
12183 this.packetsWaitingForPmt = [];
12184 this.programMapTable = undefined;
12185
12186 parsePsi = function(payload, psi) {
12187 var offset = 0;
12188
12189 // PSI packets may be split into multiple sections and those
12190 // sections may be split into multiple packets. If a PSI
12191 // section starts in this packet, the payload_unit_start_indicator
12192 // will be true and the first byte of the payload will indicate
12193 // the offset from the current position to the start of the
12194 // section.
12195 if (psi.payloadUnitStartIndicator) {
12196 offset += payload[offset] + 1;
12197 }
12198
12199 if (psi.type === 'pat') {
12200 parsePat(payload.subarray(offset), psi);
12201 } else {
12202 parsePmt(payload.subarray(offset), psi);
12203 }
12204 };
12205
12206 parsePat = function(payload, pat) {
12207 pat.section_number = payload[7]; // eslint-disable-line camelcase
12208 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
12209
12210 // skip the PSI header and parse the first PMT entry
12211 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
12212 pat.pmtPid = self.pmtPid;
12213 };
12214
12215 /**
12216 * Parse out the relevant fields of a Program Map Table (PMT).
12217 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
12218 * packet. The first byte in this array should be the table_id
12219 * field.
12220 * @param pmt {object} the object that should be decorated with
12221 * fields parsed from the PMT.
12222 */
12223 parsePmt = function(payload, pmt) {
12224 var sectionLength, tableEnd, programInfoLength, offset;
12225
12226 // PMTs can be sent ahead of the time when they should actually
12227 // take effect. We don't believe this should ever be the case
12228 // for HLS but we'll ignore "forward" PMT declarations if we see
12229 // them. Future PMT declarations have the current_next_indicator
12230 // set to zero.
12231 if (!(payload[5] & 0x01)) {
12232 return;
12233 }
12234
12235 // overwrite any existing program map table
12236 self.programMapTable = {};
12237
12238 // the mapping table ends at the end of the current section
12239 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
12240 tableEnd = 3 + sectionLength - 4;
12241
12242 // to determine where the table is, we have to figure out how
12243 // long the program info descriptors are
12244 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11];
12245
12246 // advance the offset to the first entry in the mapping table
12247 offset = 12 + programInfoLength;
12248 while (offset < tableEnd) {
12249 // add an entry that maps the elementary_pid to the stream_type
12250 self.programMapTable[(payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]] = payload[offset];
12251
12252 // move to the next table entry
12253 // skip past the elementary stream descriptors, if present
12254 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
12255 }
12256
12257 // record the map on the packet as well
12258 pmt.programMapTable = self.programMapTable;
12259
12260 // if there are any packets waiting for a PMT to be found, process them now
12261 while (self.packetsWaitingForPmt.length) {
12262 self.processPes_.apply(self, self.packetsWaitingForPmt.shift());
12263 }
12264 };
12265
12266 /**
12267 * Deliver a new MP2T packet to the stream.
12268 */
12269 this.push = function(packet) {
12270 var
12271 result = {},
12272 offset = 4;
12273
12274 result.payloadUnitStartIndicator = !!(packet[1] & 0x40);
12275
12276 // pid is a 13-bit field starting at the last bit of packet[1]
12277 result.pid = packet[1] & 0x1f;
12278 result.pid <<= 8;
12279 result.pid |= packet[2];
12280
12281 // if an adaption field is present, its length is specified by the
12282 // fifth byte of the TS packet header. The adaptation field is
12283 // used to add stuffing to PES packets that don't fill a complete
12284 // TS packet, and to specify some forms of timing and control data
12285 // that we do not currently use.
12286 if (((packet[3] & 0x30) >>> 4) > 0x01) {
12287 offset += packet[offset] + 1;
12288 }
12289
12290 // parse the rest of the packet based on the type
12291 if (result.pid === 0) {
12292 result.type = 'pat';
12293 parsePsi(packet.subarray(offset), result);
12294 this.trigger('data', result);
12295 } else if (result.pid === this.pmtPid) {
12296 result.type = 'pmt';
12297 parsePsi(packet.subarray(offset), result);
12298 this.trigger('data', result);
12299 } else if (this.programMapTable === undefined) {
12300 // When we have not seen a PMT yet, defer further processing of
12301 // PES packets until one has been parsed
12302 this.packetsWaitingForPmt.push([packet, offset, result]);
12303 } else {
12304 this.processPes_(packet, offset, result);
12305 }
12306 };
12307
12308 this.processPes_ = function(packet, offset, result) {
12309 result.streamType = this.programMapTable[result.pid];
12310 result.type = 'pes';
12311 result.data = packet.subarray(offset);
12312
12313 this.trigger('data', result);
12314 };
12315
12316};
12317TransportParseStream.prototype = new Stream();
12318TransportParseStream.STREAM_TYPES = {
12319 h264: 0x1b,
12320 adts: 0x0f
12321};
12322
12323/**
12324 * Reconsistutes program elementary stream (PES) packets from parsed
12325 * transport stream packets. That is, if you pipe an
12326 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
12327 * events will be events which capture the bytes for individual PES
12328 * packets plus relevant metadata that has been extracted from the
12329 * container.
12330 */
12331ElementaryStream = function() {
12332 var
12333 self = this,
12334 // PES packet fragments
12335 video = {
12336 data: [],
12337 size: 0
12338 },
12339 audio = {
12340 data: [],
12341 size: 0
12342 },
12343 timedMetadata = {
12344 data: [],
12345 size: 0
12346 },
12347 parsePes = function(payload, pes) {
12348 var ptsDtsFlags;
12349
12350 // get the packet length, this will be 0 for video
12351 pes.packetLength = 6 + ((payload[4] << 8) | payload[5]);
12352
12353 // find out if this packets starts a new keyframe
12354 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0;
12355 // PES packets may be annotated with a PTS value, or a PTS value
12356 // and a DTS value. Determine what combination of values is
12357 // available to work with.
12358 ptsDtsFlags = payload[7];
12359
12360 // PTS and DTS are normally stored as a 33-bit number. Javascript
12361 // performs all bitwise operations on 32-bit integers but javascript
12362 // supports a much greater range (52-bits) of integer using standard
12363 // mathematical operations.
12364 // We construct a 31-bit value using bitwise operators over the 31
12365 // most significant bits and then multiply by 4 (equal to a left-shift
12366 // of 2) before we add the final 2 least significant bits of the
12367 // timestamp (equal to an OR.)
12368 if (ptsDtsFlags & 0xC0) {
12369 // the PTS and DTS are not written out directly. For information
12370 // on how they are encoded, see
12371 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
12372 pes.pts = (payload[9] & 0x0E) << 27 |
12373 (payload[10] & 0xFF) << 20 |
12374 (payload[11] & 0xFE) << 12 |
12375 (payload[12] & 0xFF) << 5 |
12376 (payload[13] & 0xFE) >>> 3;
12377 pes.pts *= 4; // Left shift by 2
12378 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
12379 pes.dts = pes.pts;
12380 if (ptsDtsFlags & 0x40) {
12381 pes.dts = (payload[14] & 0x0E) << 27 |
12382 (payload[15] & 0xFF) << 20 |
12383 (payload[16] & 0xFE) << 12 |
12384 (payload[17] & 0xFF) << 5 |
12385 (payload[18] & 0xFE) >>> 3;
12386 pes.dts *= 4; // Left shift by 2
12387 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
12388 }
12389 }
12390 // the data section starts immediately after the PES header.
12391 // pes_header_data_length specifies the number of header bytes
12392 // that follow the last byte of the field.
12393 pes.data = payload.subarray(9 + payload[8]);
12394 },
12395 flushStream = function(stream, type, forceFlush) {
12396 var
12397 packetData = new Uint8Array(stream.size),
12398 event = {
12399 type: type
12400 },
12401 i = 0,
12402 offset = 0,
12403 packetFlushable = false,
12404 fragment;
12405
12406 // do nothing if there is not enough buffered data for a complete
12407 // PES header
12408 if (!stream.data.length || stream.size < 9) {
12409 return;
12410 }
12411 event.trackId = stream.data[0].pid;
12412
12413 // reassemble the packet
12414 for (i = 0; i < stream.data.length; i++) {
12415 fragment = stream.data[i];
12416
12417 packetData.set(fragment.data, offset);
12418 offset += fragment.data.byteLength;
12419 }
12420
12421 // parse assembled packet's PES header
12422 parsePes(packetData, event);
12423
12424 // non-video PES packets MUST have a non-zero PES_packet_length
12425 // check that they match before we do a flush
12426 packetFlushable = type === 'video' || event.packetLength === stream.size;
12427
12428 // flush pending packets if the conditions are right
12429 if (forceFlush || packetFlushable) {
12430 stream.size = 0;
12431 stream.data.length = 0;
12432 }
12433
12434 // only emit packets that are complete. this is to avoid assembling
12435 // incomplete PES packets due to poor segmentation
12436 if (packetFlushable) {
12437 self.trigger('data', event);
12438 }
12439 };
12440
12441 ElementaryStream.prototype.init.call(this);
12442
12443 this.push = function(data) {
12444 ({
12445 pat: function() {
12446 // we have to wait for the PMT to arrive as well before we
12447 // have any meaningful metadata
12448 },
12449 pes: function() {
12450 var stream, streamType;
12451
12452 switch (data.streamType) {
12453 case StreamTypes.H264_STREAM_TYPE:
12454 case m2tsStreamTypes.H264_STREAM_TYPE:
12455 stream = video;
12456 streamType = 'video';
12457 break;
12458 case StreamTypes.ADTS_STREAM_TYPE:
12459 stream = audio;
12460 streamType = 'audio';
12461 break;
12462 case StreamTypes.METADATA_STREAM_TYPE:
12463 stream = timedMetadata;
12464 streamType = 'timed-metadata';
12465 break;
12466 default:
12467 // ignore unknown stream types
12468 return;
12469 }
12470
12471 // if a new packet is starting, we can flush the completed
12472 // packet
12473 if (data.payloadUnitStartIndicator) {
12474 flushStream(stream, streamType, true);
12475 }
12476
12477 // buffer this fragment until we are sure we've received the
12478 // complete payload
12479 stream.data.push(data);
12480 stream.size += data.data.byteLength;
12481 },
12482 pmt: function() {
12483 var
12484 event = {
12485 type: 'metadata',
12486 tracks: []
12487 },
12488 programMapTable = data.programMapTable,
12489 k,
12490 track;
12491
12492 // translate streams to tracks
12493 for (k in programMapTable) {
12494 if (programMapTable.hasOwnProperty(k)) {
12495 track = {
12496 timelineStartInfo: {
12497 baseMediaDecodeTime: 0
12498 }
12499 };
12500 track.id = +k;
12501 if (programMapTable[k] === m2tsStreamTypes.H264_STREAM_TYPE) {
12502 track.codec = 'avc';
12503 track.type = 'video';
12504 } else if (programMapTable[k] === m2tsStreamTypes.ADTS_STREAM_TYPE) {
12505 track.codec = 'adts';
12506 track.type = 'audio';
12507 }
12508 event.tracks.push(track);
12509 }
12510 }
12511 self.trigger('data', event);
12512 }
12513 })[data.type]();
12514 };
12515
12516 /**
12517 * Flush any remaining input. Video PES packets may be of variable
12518 * length. Normally, the start of a new video packet can trigger the
12519 * finalization of the previous packet. That is not possible if no
12520 * more video is forthcoming, however. In that case, some other
12521 * mechanism (like the end of the file) has to be employed. When it is
12522 * clear that no additional data is forthcoming, calling this method
12523 * will flush the buffered packets.
12524 */
12525 this.flush = function() {
12526 // !!THIS ORDER IS IMPORTANT!!
12527 // video first then audio
12528 flushStream(video, 'video');
12529 flushStream(audio, 'audio');
12530 flushStream(timedMetadata, 'timed-metadata');
12531 this.trigger('done');
12532 };
12533};
12534ElementaryStream.prototype = new Stream();
12535
12536var m2ts = {
12537 PAT_PID: 0x0000,
12538 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH,
12539 TransportPacketStream: TransportPacketStream,
12540 TransportParseStream: TransportParseStream,
12541 ElementaryStream: ElementaryStream,
12542 TimestampRolloverStream: TimestampRolloverStream,
12543 CaptionStream: CaptionStream.CaptionStream,
12544 Cea608Stream: CaptionStream.Cea608Stream,
12545 MetadataStream: require('./metadata-stream')
12546};
12547
12548for (var type in StreamTypes) {
12549 if (StreamTypes.hasOwnProperty(type)) {
12550 m2ts[type] = StreamTypes[type];
12551 }
12552}
12553
12554module.exports = m2ts;
12555
12556},{"../utils/stream.js":60,"./caption-stream":47,"./metadata-stream":49,"./stream-types":51,"./stream-types.js":51,"./timestamp-rollover-stream":52}],49:[function(require,module,exports){
12557/**
12558 * Accepts program elementary stream (PES) data events and parses out
12559 * ID3 metadata from them, if present.
12560 * @see http://id3.org/id3v2.3.0
12561 */
12562'use strict';
12563var
12564 Stream = require('../utils/stream'),
12565 StreamTypes = require('./stream-types'),
12566 // return a percent-encoded representation of the specified byte range
12567 // @see http://en.wikipedia.org/wiki/Percent-encoding
12568 percentEncode = function(bytes, start, end) {
12569 var i, result = '';
12570 for (i = start; i < end; i++) {
12571 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
12572 }
12573 return result;
12574 },
12575 // return the string representation of the specified byte range,
12576 // interpreted as UTf-8.
12577 parseUtf8 = function(bytes, start, end) {
12578 return decodeURIComponent(percentEncode(bytes, start, end));
12579 },
12580 // return the string representation of the specified byte range,
12581 // interpreted as ISO-8859-1.
12582 parseIso88591 = function(bytes, start, end) {
12583 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
12584 },
12585 parseSyncSafeInteger = function(data) {
12586 return (data[0] << 21) |
12587 (data[1] << 14) |
12588 (data[2] << 7) |
12589 (data[3]);
12590 },
12591 tagParsers = {
12592 TXXX: function(tag) {
12593 var i;
12594 if (tag.data[0] !== 3) {
12595 // ignore frames with unrecognized character encodings
12596 return;
12597 }
12598
12599 for (i = 1; i < tag.data.length; i++) {
12600 if (tag.data[i] === 0) {
12601 // parse the text fields
12602 tag.description = parseUtf8(tag.data, 1, i);
12603 // do not include the null terminator in the tag value
12604 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
12605 break;
12606 }
12607 }
12608 tag.data = tag.value;
12609 },
12610 WXXX: function(tag) {
12611 var i;
12612 if (tag.data[0] !== 3) {
12613 // ignore frames with unrecognized character encodings
12614 return;
12615 }
12616
12617 for (i = 1; i < tag.data.length; i++) {
12618 if (tag.data[i] === 0) {
12619 // parse the description and URL fields
12620 tag.description = parseUtf8(tag.data, 1, i);
12621 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
12622 break;
12623 }
12624 }
12625 },
12626 PRIV: function(tag) {
12627 var i;
12628
12629 for (i = 0; i < tag.data.length; i++) {
12630 if (tag.data[i] === 0) {
12631 // parse the description and URL fields
12632 tag.owner = parseIso88591(tag.data, 0, i);
12633 break;
12634 }
12635 }
12636 tag.privateData = tag.data.subarray(i + 1);
12637 tag.data = tag.privateData;
12638 }
12639 },
12640 MetadataStream;
12641
12642MetadataStream = function(options) {
12643 var
12644 settings = {
12645 debug: !!(options && options.debug),
12646
12647 // the bytes of the program-level descriptor field in MP2T
12648 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
12649 // program element descriptors"
12650 descriptor: options && options.descriptor
12651 },
12652 // the total size in bytes of the ID3 tag being parsed
12653 tagSize = 0,
12654 // tag data that is not complete enough to be parsed
12655 buffer = [],
12656 // the total number of bytes currently in the buffer
12657 bufferSize = 0,
12658 i;
12659
12660 MetadataStream.prototype.init.call(this);
12661
12662 // calculate the text track in-band metadata track dispatch type
12663 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
12664 this.dispatchType = StreamTypes.METADATA_STREAM_TYPE.toString(16);
12665 if (settings.descriptor) {
12666 for (i = 0; i < settings.descriptor.length; i++) {
12667 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
12668 }
12669 }
12670
12671 this.push = function(chunk) {
12672 var tag, frameStart, frameSize, frame, i, frameHeader;
12673 if (chunk.type !== 'timed-metadata') {
12674 return;
12675 }
12676
12677 // if data_alignment_indicator is set in the PES header,
12678 // we must have the start of a new ID3 tag. Assume anything
12679 // remaining in the buffer was malformed and throw it out
12680 if (chunk.dataAlignmentIndicator) {
12681 bufferSize = 0;
12682 buffer.length = 0;
12683 }
12684
12685 // ignore events that don't look like ID3 data
12686 if (buffer.length === 0 &&
12687 (chunk.data.length < 10 ||
12688 chunk.data[0] !== 'I'.charCodeAt(0) ||
12689 chunk.data[1] !== 'D'.charCodeAt(0) ||
12690 chunk.data[2] !== '3'.charCodeAt(0))) {
12691 if (settings.debug) {
12692 // eslint-disable-next-line no-console
12693 console.log('Skipping unrecognized metadata packet');
12694 }
12695 return;
12696 }
12697
12698 // add this chunk to the data we've collected so far
12699
12700 buffer.push(chunk);
12701 bufferSize += chunk.data.byteLength;
12702
12703 // grab the size of the entire frame from the ID3 header
12704 if (buffer.length === 1) {
12705 // the frame size is transmitted as a 28-bit integer in the
12706 // last four bytes of the ID3 header.
12707 // The most significant bit of each byte is dropped and the
12708 // results concatenated to recover the actual value.
12709 tagSize = parseSyncSafeInteger(chunk.data.subarray(6, 10));
12710
12711 // ID3 reports the tag size excluding the header but it's more
12712 // convenient for our comparisons to include it
12713 tagSize += 10;
12714 }
12715
12716 // if the entire frame has not arrived, wait for more data
12717 if (bufferSize < tagSize) {
12718 return;
12719 }
12720
12721 // collect the entire frame so it can be parsed
12722 tag = {
12723 data: new Uint8Array(tagSize),
12724 frames: [],
12725 pts: buffer[0].pts,
12726 dts: buffer[0].dts
12727 };
12728 for (i = 0; i < tagSize;) {
12729 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
12730 i += buffer[0].data.byteLength;
12731 bufferSize -= buffer[0].data.byteLength;
12732 buffer.shift();
12733 }
12734
12735 // find the start of the first frame and the end of the tag
12736 frameStart = 10;
12737 if (tag.data[5] & 0x40) {
12738 // advance the frame start past the extended header
12739 frameStart += 4; // header size field
12740 frameStart += parseSyncSafeInteger(tag.data.subarray(10, 14));
12741
12742 // clip any padding off the end
12743 tagSize -= parseSyncSafeInteger(tag.data.subarray(16, 20));
12744 }
12745
12746 // parse one or more ID3 frames
12747 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
12748 do {
12749 // determine the number of bytes in this frame
12750 frameSize = parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
12751 if (frameSize < 1) {
12752 // eslint-disable-next-line no-console
12753 return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
12754 }
12755 frameHeader = String.fromCharCode(tag.data[frameStart],
12756 tag.data[frameStart + 1],
12757 tag.data[frameStart + 2],
12758 tag.data[frameStart + 3]);
12759
12760
12761 frame = {
12762 id: frameHeader,
12763 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
12764 };
12765 frame.key = frame.id;
12766 if (tagParsers[frame.id]) {
12767 tagParsers[frame.id](frame);
12768
12769 // handle the special PRIV frame used to indicate the start
12770 // time for raw AAC data
12771 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
12772 var
12773 d = frame.data,
12774 size = ((d[3] & 0x01) << 30) |
12775 (d[4] << 22) |
12776 (d[5] << 14) |
12777 (d[6] << 6) |
12778 (d[7] >>> 2);
12779
12780 size *= 4;
12781 size += d[7] & 0x03;
12782 frame.timeStamp = size;
12783 // in raw AAC, all subsequent data will be timestamped based
12784 // on the value of this frame
12785 // we couldn't have known the appropriate pts and dts before
12786 // parsing this ID3 tag so set those values now
12787 if (tag.pts === undefined && tag.dts === undefined) {
12788 tag.pts = frame.timeStamp;
12789 tag.dts = frame.timeStamp;
12790 }
12791 this.trigger('timestamp', frame);
12792 }
12793 }
12794 tag.frames.push(frame);
12795
12796 frameStart += 10; // advance past the frame header
12797 frameStart += frameSize; // advance past the frame body
12798 } while (frameStart < tagSize);
12799 this.trigger('data', tag);
12800 };
12801};
12802MetadataStream.prototype = new Stream();
12803
12804module.exports = MetadataStream;
12805
12806},{"../utils/stream":60,"./stream-types":51}],50:[function(require,module,exports){
12807/**
12808 * mux.js
12809 *
12810 * Copyright (c) 2016 Brightcove
12811 * All rights reserved.
12812 *
12813 * Utilities to detect basic properties and metadata about TS Segments.
12814 */
12815'use strict';
12816
12817var StreamTypes = require('./stream-types.js');
12818
12819var parsePid = function(packet) {
12820 var pid = packet[1] & 0x1f;
12821 pid <<= 8;
12822 pid |= packet[2];
12823 return pid;
12824};
12825
12826var parsePayloadUnitStartIndicator = function(packet) {
12827 return !!(packet[1] & 0x40);
12828};
12829
12830var parseAdaptionField = function(packet) {
12831 var offset = 0;
12832 // if an adaption field is present, its length is specified by the
12833 // fifth byte of the TS packet header. The adaptation field is
12834 // used to add stuffing to PES packets that don't fill a complete
12835 // TS packet, and to specify some forms of timing and control data
12836 // that we do not currently use.
12837 if (((packet[3] & 0x30) >>> 4) > 0x01) {
12838 offset += packet[4] + 1;
12839 }
12840 return offset;
12841};
12842
12843var parseType = function(packet, pmtPid) {
12844 var pid = parsePid(packet);
12845 if (pid === 0) {
12846 return 'pat';
12847 } else if (pid === pmtPid) {
12848 return 'pmt';
12849 } else if (pmtPid) {
12850 return 'pes';
12851 }
12852 return null;
12853};
12854
12855var parsePat = function(packet) {
12856 var pusi = parsePayloadUnitStartIndicator(packet);
12857 var offset = 4 + parseAdaptionField(packet);
12858
12859 if (pusi) {
12860 offset += packet[offset] + 1;
12861 }
12862
12863 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
12864};
12865
12866var parsePmt = function(packet) {
12867 var programMapTable = {};
12868 var pusi = parsePayloadUnitStartIndicator(packet);
12869 var payloadOffset = 4 + parseAdaptionField(packet);
12870
12871 if (pusi) {
12872 payloadOffset += packet[payloadOffset] + 1;
12873 }
12874
12875 // PMTs can be sent ahead of the time when they should actually
12876 // take effect. We don't believe this should ever be the case
12877 // for HLS but we'll ignore "forward" PMT declarations if we see
12878 // them. Future PMT declarations have the current_next_indicator
12879 // set to zero.
12880 if (!(packet[payloadOffset + 5] & 0x01)) {
12881 return;
12882 }
12883
12884 var sectionLength, tableEnd, programInfoLength;
12885 // the mapping table ends at the end of the current section
12886 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
12887 tableEnd = 3 + sectionLength - 4;
12888
12889 // to determine where the table is, we have to figure out how
12890 // long the program info descriptors are
12891 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11];
12892
12893 // advance the offset to the first entry in the mapping table
12894 var offset = 12 + programInfoLength;
12895 while (offset < tableEnd) {
12896 var i = payloadOffset + offset;
12897 // add an entry that maps the elementary_pid to the stream_type
12898 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i];
12899
12900 // move to the next table entry
12901 // skip past the elementary stream descriptors, if present
12902 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
12903 }
12904 return programMapTable;
12905};
12906
12907var parsePesType = function(packet, programMapTable) {
12908 var pid = parsePid(packet);
12909 var type = programMapTable[pid];
12910 switch (type) {
12911 case StreamTypes.H264_STREAM_TYPE:
12912 return 'video';
12913 case StreamTypes.ADTS_STREAM_TYPE:
12914 return 'audio';
12915 case StreamTypes.METADATA_STREAM_TYPE:
12916 return 'timed-metadata';
12917 default:
12918 return null;
12919 }
12920};
12921
12922var parsePesTime = function(packet) {
12923 var pusi = parsePayloadUnitStartIndicator(packet);
12924 if (!pusi) {
12925 return null;
12926 }
12927
12928 var offset = 4 + parseAdaptionField(packet);
12929
12930 if (offset >= packet.byteLength) {
12931 // From the H 222.0 MPEG-TS spec
12932 // "For transport stream packets carrying PES packets, stuffing is needed when there
12933 // is insufficient PES packet data to completely fill the transport stream packet
12934 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
12935 // the sum of the lengths of the data elements in it, so that the payload bytes
12936 // remaining after the adaptation field exactly accommodates the available PES packet
12937 // data."
12938 //
12939 // If the offset is >= the length of the packet, then the packet contains no data
12940 // and instead is just adaption field stuffing bytes
12941 return null;
12942 }
12943
12944 var pes = null;
12945 var ptsDtsFlags;
12946
12947 // PES packets may be annotated with a PTS value, or a PTS value
12948 // and a DTS value. Determine what combination of values is
12949 // available to work with.
12950 ptsDtsFlags = packet[offset + 7];
12951
12952 // PTS and DTS are normally stored as a 33-bit number. Javascript
12953 // performs all bitwise operations on 32-bit integers but javascript
12954 // supports a much greater range (52-bits) of integer using standard
12955 // mathematical operations.
12956 // We construct a 31-bit value using bitwise operators over the 31
12957 // most significant bits and then multiply by 4 (equal to a left-shift
12958 // of 2) before we add the final 2 least significant bits of the
12959 // timestamp (equal to an OR.)
12960 if (ptsDtsFlags & 0xC0) {
12961 pes = {};
12962 // the PTS and DTS are not written out directly. For information
12963 // on how they are encoded, see
12964 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
12965 pes.pts = (packet[offset + 9] & 0x0E) << 27 |
12966 (packet[offset + 10] & 0xFF) << 20 |
12967 (packet[offset + 11] & 0xFE) << 12 |
12968 (packet[offset + 12] & 0xFF) << 5 |
12969 (packet[offset + 13] & 0xFE) >>> 3;
12970 pes.pts *= 4; // Left shift by 2
12971 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
12972 pes.dts = pes.pts;
12973 if (ptsDtsFlags & 0x40) {
12974 pes.dts = (packet[offset + 14] & 0x0E) << 27 |
12975 (packet[offset + 15] & 0xFF) << 20 |
12976 (packet[offset + 16] & 0xFE) << 12 |
12977 (packet[offset + 17] & 0xFF) << 5 |
12978 (packet[offset + 18] & 0xFE) >>> 3;
12979 pes.dts *= 4; // Left shift by 2
12980 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
12981 }
12982 }
12983 return pes;
12984};
12985
12986var parseNalUnitType = function(type) {
12987 switch (type) {
12988 case 0x05:
12989 return 'slice_layer_without_partitioning_rbsp_idr';
12990 case 0x06:
12991 return 'sei_rbsp';
12992 case 0x07:
12993 return 'seq_parameter_set_rbsp';
12994 case 0x08:
12995 return 'pic_parameter_set_rbsp';
12996 case 0x09:
12997 return 'access_unit_delimiter_rbsp';
12998 default:
12999 return null;
13000 }
13001};
13002
13003var videoPacketContainsKeyFrame = function(packet) {
13004 var offset = 4 + parseAdaptionField(packet);
13005 var frameBuffer = packet.subarray(offset);
13006 var frameI = 0;
13007 var frameSyncPoint = 0;
13008 var foundKeyFrame = false;
13009 var nalType;
13010
13011 // advance the sync point to a NAL start, if necessary
13012 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
13013 if (frameBuffer[frameSyncPoint + 2] === 1) {
13014 // the sync point is properly aligned
13015 frameI = frameSyncPoint + 5;
13016 break;
13017 }
13018 }
13019
13020 while (frameI < frameBuffer.byteLength) {
13021 // look at the current byte to determine if we've hit the end of
13022 // a NAL unit boundary
13023 switch (frameBuffer[frameI]) {
13024 case 0:
13025 // skip past non-sync sequences
13026 if (frameBuffer[frameI - 1] !== 0) {
13027 frameI += 2;
13028 break;
13029 } else if (frameBuffer[frameI - 2] !== 0) {
13030 frameI++;
13031 break;
13032 }
13033
13034 if (frameSyncPoint + 3 !== frameI - 2) {
13035 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
13036 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
13037 foundKeyFrame = true;
13038 }
13039 }
13040
13041 // drop trailing zeroes
13042 do {
13043 frameI++;
13044 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
13045 frameSyncPoint = frameI - 2;
13046 frameI += 3;
13047 break;
13048 case 1:
13049 // skip past non-sync sequences
13050 if (frameBuffer[frameI - 1] !== 0 ||
13051 frameBuffer[frameI - 2] !== 0) {
13052 frameI += 3;
13053 break;
13054 }
13055
13056 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
13057 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
13058 foundKeyFrame = true;
13059 }
13060 frameSyncPoint = frameI - 2;
13061 frameI += 3;
13062 break;
13063 default:
13064 // the current byte isn't a one or zero, so it cannot be part
13065 // of a sync sequence
13066 frameI += 3;
13067 break;
13068 }
13069 }
13070 frameBuffer = frameBuffer.subarray(frameSyncPoint);
13071 frameI -= frameSyncPoint;
13072 frameSyncPoint = 0;
13073 // parse the final nal
13074 if (frameBuffer && frameBuffer.byteLength > 3) {
13075 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
13076 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
13077 foundKeyFrame = true;
13078 }
13079 }
13080
13081 return foundKeyFrame;
13082};
13083
13084
13085module.exports = {
13086 parseType: parseType,
13087 parsePat: parsePat,
13088 parsePmt: parsePmt,
13089 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
13090 parsePesType: parsePesType,
13091 parsePesTime: parsePesTime,
13092 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
13093};
13094
13095},{"./stream-types.js":51}],51:[function(require,module,exports){
13096'use strict';
13097
13098module.exports = {
13099 H264_STREAM_TYPE: 0x1B,
13100 ADTS_STREAM_TYPE: 0x0F,
13101 METADATA_STREAM_TYPE: 0x15
13102};
13103
13104},{}],52:[function(require,module,exports){
13105/**
13106 * mux.js
13107 *
13108 * Copyright (c) 2016 Brightcove
13109 * All rights reserved.
13110 *
13111 * Accepts program elementary stream (PES) data events and corrects
13112 * decode and presentation time stamps to account for a rollover
13113 * of the 33 bit value.
13114 */
13115
13116'use strict';
13117
13118var Stream = require('../utils/stream');
13119
13120var MAX_TS = 8589934592;
13121
13122var RO_THRESH = 4294967296;
13123
13124var handleRollover = function(value, reference) {
13125 var direction = 1;
13126
13127 if (value > reference) {
13128 // If the current timestamp value is greater than our reference timestamp and we detect a
13129 // timestamp rollover, this means the roll over is happening in the opposite direction.
13130 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
13131 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
13132 // rollover point. In loading this segment, the timestamp values will be very large,
13133 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
13134 // the time stamp to be `value - 2^33`.
13135 direction = -1;
13136 }
13137
13138 // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
13139 // cause an incorrect adjustment.
13140 while (Math.abs(reference - value) > RO_THRESH) {
13141 value += (direction * MAX_TS);
13142 }
13143
13144 return value;
13145};
13146
13147var TimestampRolloverStream = function(type) {
13148 var lastDTS, referenceDTS;
13149
13150 TimestampRolloverStream.prototype.init.call(this);
13151
13152 this.type_ = type;
13153
13154 this.push = function(data) {
13155 if (data.type !== this.type_) {
13156 return;
13157 }
13158
13159 if (referenceDTS === undefined) {
13160 referenceDTS = data.dts;
13161 }
13162
13163 data.dts = handleRollover(data.dts, referenceDTS);
13164 data.pts = handleRollover(data.pts, referenceDTS);
13165
13166 lastDTS = data.dts;
13167
13168 this.trigger('data', data);
13169 };
13170
13171 this.flush = function() {
13172 referenceDTS = lastDTS;
13173 this.trigger('done');
13174 };
13175
13176 this.discontinuity = function() {
13177 referenceDTS = void 0;
13178 lastDTS = void 0;
13179 };
13180
13181};
13182
13183TimestampRolloverStream.prototype = new Stream();
13184
13185module.exports = {
13186 TimestampRolloverStream: TimestampRolloverStream,
13187 handleRollover: handleRollover
13188};
13189
13190},{"../utils/stream":60}],53:[function(require,module,exports){
13191module.exports = {
13192 generator: require('./mp4-generator'),
13193 Transmuxer: require('./transmuxer').Transmuxer,
13194 AudioSegmentStream: require('./transmuxer').AudioSegmentStream,
13195 VideoSegmentStream: require('./transmuxer').VideoSegmentStream
13196};
13197
13198},{"./mp4-generator":54,"./transmuxer":56}],54:[function(require,module,exports){
13199/**
13200 * mux.js
13201 *
13202 * Copyright (c) 2015 Brightcove
13203 * All rights reserved.
13204 *
13205 * Functions that generate fragmented MP4s suitable for use with Media
13206 * Source Extensions.
13207 */
13208'use strict';
13209
13210var UINT32_MAX = Math.pow(2, 32) - 1;
13211
13212var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd,
13213 trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex,
13214 trun, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR,
13215 AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS;
13216
13217// pre-calculate constants
13218(function() {
13219 var i;
13220 types = {
13221 avc1: [], // codingname
13222 avcC: [],
13223 btrt: [],
13224 dinf: [],
13225 dref: [],
13226 esds: [],
13227 ftyp: [],
13228 hdlr: [],
13229 mdat: [],
13230 mdhd: [],
13231 mdia: [],
13232 mfhd: [],
13233 minf: [],
13234 moof: [],
13235 moov: [],
13236 mp4a: [], // codingname
13237 mvex: [],
13238 mvhd: [],
13239 sdtp: [],
13240 smhd: [],
13241 stbl: [],
13242 stco: [],
13243 stsc: [],
13244 stsd: [],
13245 stsz: [],
13246 stts: [],
13247 styp: [],
13248 tfdt: [],
13249 tfhd: [],
13250 traf: [],
13251 trak: [],
13252 trun: [],
13253 trex: [],
13254 tkhd: [],
13255 vmhd: []
13256 };
13257
13258 // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
13259 // don't throw an error
13260 if (typeof Uint8Array === 'undefined') {
13261 return;
13262 }
13263
13264 for (i in types) {
13265 if (types.hasOwnProperty(i)) {
13266 types[i] = [
13267 i.charCodeAt(0),
13268 i.charCodeAt(1),
13269 i.charCodeAt(2),
13270 i.charCodeAt(3)
13271 ];
13272 }
13273 }
13274
13275 MAJOR_BRAND = new Uint8Array([
13276 'i'.charCodeAt(0),
13277 's'.charCodeAt(0),
13278 'o'.charCodeAt(0),
13279 'm'.charCodeAt(0)
13280 ]);
13281 AVC1_BRAND = new Uint8Array([
13282 'a'.charCodeAt(0),
13283 'v'.charCodeAt(0),
13284 'c'.charCodeAt(0),
13285 '1'.charCodeAt(0)
13286 ]);
13287 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
13288 VIDEO_HDLR = new Uint8Array([
13289 0x00, // version 0
13290 0x00, 0x00, 0x00, // flags
13291 0x00, 0x00, 0x00, 0x00, // pre_defined
13292 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
13293 0x00, 0x00, 0x00, 0x00, // reserved
13294 0x00, 0x00, 0x00, 0x00, // reserved
13295 0x00, 0x00, 0x00, 0x00, // reserved
13296 0x56, 0x69, 0x64, 0x65,
13297 0x6f, 0x48, 0x61, 0x6e,
13298 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
13299 ]);
13300 AUDIO_HDLR = new Uint8Array([
13301 0x00, // version 0
13302 0x00, 0x00, 0x00, // flags
13303 0x00, 0x00, 0x00, 0x00, // pre_defined
13304 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
13305 0x00, 0x00, 0x00, 0x00, // reserved
13306 0x00, 0x00, 0x00, 0x00, // reserved
13307 0x00, 0x00, 0x00, 0x00, // reserved
13308 0x53, 0x6f, 0x75, 0x6e,
13309 0x64, 0x48, 0x61, 0x6e,
13310 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
13311 ]);
13312 HDLR_TYPES = {
13313 video: VIDEO_HDLR,
13314 audio: AUDIO_HDLR
13315 };
13316 DREF = new Uint8Array([
13317 0x00, // version 0
13318 0x00, 0x00, 0x00, // flags
13319 0x00, 0x00, 0x00, 0x01, // entry_count
13320 0x00, 0x00, 0x00, 0x0c, // entry_size
13321 0x75, 0x72, 0x6c, 0x20, // 'url' type
13322 0x00, // version 0
13323 0x00, 0x00, 0x01 // entry_flags
13324 ]);
13325 SMHD = new Uint8Array([
13326 0x00, // version
13327 0x00, 0x00, 0x00, // flags
13328 0x00, 0x00, // balance, 0 means centered
13329 0x00, 0x00 // reserved
13330 ]);
13331 STCO = new Uint8Array([
13332 0x00, // version
13333 0x00, 0x00, 0x00, // flags
13334 0x00, 0x00, 0x00, 0x00 // entry_count
13335 ]);
13336 STSC = STCO;
13337 STSZ = new Uint8Array([
13338 0x00, // version
13339 0x00, 0x00, 0x00, // flags
13340 0x00, 0x00, 0x00, 0x00, // sample_size
13341 0x00, 0x00, 0x00, 0x00 // sample_count
13342 ]);
13343 STTS = STCO;
13344 VMHD = new Uint8Array([
13345 0x00, // version
13346 0x00, 0x00, 0x01, // flags
13347 0x00, 0x00, // graphicsmode
13348 0x00, 0x00,
13349 0x00, 0x00,
13350 0x00, 0x00 // opcolor
13351 ]);
13352}());
13353
13354box = function(type) {
13355 var
13356 payload = [],
13357 size = 0,
13358 i,
13359 result,
13360 view;
13361
13362 for (i = 1; i < arguments.length; i++) {
13363 payload.push(arguments[i]);
13364 }
13365
13366 i = payload.length;
13367
13368 // calculate the total size we need to allocate
13369 while (i--) {
13370 size += payload[i].byteLength;
13371 }
13372 result = new Uint8Array(size + 8);
13373 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
13374 view.setUint32(0, result.byteLength);
13375 result.set(type, 4);
13376
13377 // copy the payload into the result
13378 for (i = 0, size = 8; i < payload.length; i++) {
13379 result.set(payload[i], size);
13380 size += payload[i].byteLength;
13381 }
13382 return result;
13383};
13384
13385dinf = function() {
13386 return box(types.dinf, box(types.dref, DREF));
13387};
13388
13389esds = function(track) {
13390 return box(types.esds, new Uint8Array([
13391 0x00, // version
13392 0x00, 0x00, 0x00, // flags
13393
13394 // ES_Descriptor
13395 0x03, // tag, ES_DescrTag
13396 0x19, // length
13397 0x00, 0x00, // ES_ID
13398 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
13399
13400 // DecoderConfigDescriptor
13401 0x04, // tag, DecoderConfigDescrTag
13402 0x11, // length
13403 0x40, // object type
13404 0x15, // streamType
13405 0x00, 0x06, 0x00, // bufferSizeDB
13406 0x00, 0x00, 0xda, 0xc0, // maxBitrate
13407 0x00, 0x00, 0xda, 0xc0, // avgBitrate
13408
13409 // DecoderSpecificInfo
13410 0x05, // tag, DecoderSpecificInfoTag
13411 0x02, // length
13412 // ISO/IEC 14496-3, AudioSpecificConfig
13413 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
13414 (track.audioobjecttype << 3) | (track.samplingfrequencyindex >>> 1),
13415 (track.samplingfrequencyindex << 7) | (track.channelcount << 3),
13416 0x06, 0x01, 0x02 // GASpecificConfig
13417 ]));
13418};
13419
13420ftyp = function() {
13421 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
13422};
13423
13424hdlr = function(type) {
13425 return box(types.hdlr, HDLR_TYPES[type]);
13426};
13427mdat = function(data) {
13428 return box(types.mdat, data);
13429};
13430mdhd = function(track) {
13431 var result = new Uint8Array([
13432 0x00, // version 0
13433 0x00, 0x00, 0x00, // flags
13434 0x00, 0x00, 0x00, 0x02, // creation_time
13435 0x00, 0x00, 0x00, 0x03, // modification_time
13436 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
13437
13438 (track.duration >>> 24) & 0xFF,
13439 (track.duration >>> 16) & 0xFF,
13440 (track.duration >>> 8) & 0xFF,
13441 track.duration & 0xFF, // duration
13442 0x55, 0xc4, // 'und' language (undetermined)
13443 0x00, 0x00
13444 ]);
13445
13446 // Use the sample rate from the track metadata, when it is
13447 // defined. The sample rate can be parsed out of an ADTS header, for
13448 // instance.
13449 if (track.samplerate) {
13450 result[12] = (track.samplerate >>> 24) & 0xFF;
13451 result[13] = (track.samplerate >>> 16) & 0xFF;
13452 result[14] = (track.samplerate >>> 8) & 0xFF;
13453 result[15] = (track.samplerate) & 0xFF;
13454 }
13455
13456 return box(types.mdhd, result);
13457};
13458mdia = function(track) {
13459 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
13460};
13461mfhd = function(sequenceNumber) {
13462 return box(types.mfhd, new Uint8Array([
13463 0x00,
13464 0x00, 0x00, 0x00, // flags
13465 (sequenceNumber & 0xFF000000) >> 24,
13466 (sequenceNumber & 0xFF0000) >> 16,
13467 (sequenceNumber & 0xFF00) >> 8,
13468 sequenceNumber & 0xFF // sequence_number
13469 ]));
13470};
13471minf = function(track) {
13472 return box(types.minf,
13473 track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD),
13474 dinf(),
13475 stbl(track));
13476};
13477moof = function(sequenceNumber, tracks) {
13478 var
13479 trackFragments = [],
13480 i = tracks.length;
13481 // build traf boxes for each track fragment
13482 while (i--) {
13483 trackFragments[i] = traf(tracks[i]);
13484 }
13485 return box.apply(null, [
13486 types.moof,
13487 mfhd(sequenceNumber)
13488 ].concat(trackFragments));
13489};
13490/**
13491 * Returns a movie box.
13492 * @param tracks {array} the tracks associated with this movie
13493 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
13494 */
13495moov = function(tracks) {
13496 var
13497 i = tracks.length,
13498 boxes = [];
13499
13500 while (i--) {
13501 boxes[i] = trak(tracks[i]);
13502 }
13503
13504 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
13505};
13506mvex = function(tracks) {
13507 var
13508 i = tracks.length,
13509 boxes = [];
13510
13511 while (i--) {
13512 boxes[i] = trex(tracks[i]);
13513 }
13514 return box.apply(null, [types.mvex].concat(boxes));
13515};
13516mvhd = function(duration) {
13517 var
13518 bytes = new Uint8Array([
13519 0x00, // version 0
13520 0x00, 0x00, 0x00, // flags
13521 0x00, 0x00, 0x00, 0x01, // creation_time
13522 0x00, 0x00, 0x00, 0x02, // modification_time
13523 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
13524 (duration & 0xFF000000) >> 24,
13525 (duration & 0xFF0000) >> 16,
13526 (duration & 0xFF00) >> 8,
13527 duration & 0xFF, // duration
13528 0x00, 0x01, 0x00, 0x00, // 1.0 rate
13529 0x01, 0x00, // 1.0 volume
13530 0x00, 0x00, // reserved
13531 0x00, 0x00, 0x00, 0x00, // reserved
13532 0x00, 0x00, 0x00, 0x00, // reserved
13533 0x00, 0x01, 0x00, 0x00,
13534 0x00, 0x00, 0x00, 0x00,
13535 0x00, 0x00, 0x00, 0x00,
13536 0x00, 0x00, 0x00, 0x00,
13537 0x00, 0x01, 0x00, 0x00,
13538 0x00, 0x00, 0x00, 0x00,
13539 0x00, 0x00, 0x00, 0x00,
13540 0x00, 0x00, 0x00, 0x00,
13541 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
13542 0x00, 0x00, 0x00, 0x00,
13543 0x00, 0x00, 0x00, 0x00,
13544 0x00, 0x00, 0x00, 0x00,
13545 0x00, 0x00, 0x00, 0x00,
13546 0x00, 0x00, 0x00, 0x00,
13547 0x00, 0x00, 0x00, 0x00, // pre_defined
13548 0xff, 0xff, 0xff, 0xff // next_track_ID
13549 ]);
13550 return box(types.mvhd, bytes);
13551};
13552
13553sdtp = function(track) {
13554 var
13555 samples = track.samples || [],
13556 bytes = new Uint8Array(4 + samples.length),
13557 flags,
13558 i;
13559
13560 // leave the full box header (4 bytes) all zero
13561
13562 // write the sample table
13563 for (i = 0; i < samples.length; i++) {
13564 flags = samples[i].flags;
13565
13566 bytes[i + 4] = (flags.dependsOn << 4) |
13567 (flags.isDependedOn << 2) |
13568 (flags.hasRedundancy);
13569 }
13570
13571 return box(types.sdtp,
13572 bytes);
13573};
13574
13575stbl = function(track) {
13576 return box(types.stbl,
13577 stsd(track),
13578 box(types.stts, STTS),
13579 box(types.stsc, STSC),
13580 box(types.stsz, STSZ),
13581 box(types.stco, STCO));
13582};
13583
13584(function() {
13585 var videoSample, audioSample;
13586
13587 stsd = function(track) {
13588
13589 return box(types.stsd, new Uint8Array([
13590 0x00, // version 0
13591 0x00, 0x00, 0x00, // flags
13592 0x00, 0x00, 0x00, 0x01
13593 ]), track.type === 'video' ? videoSample(track) : audioSample(track));
13594 };
13595
13596 videoSample = function(track) {
13597 var
13598 sps = track.sps || [],
13599 pps = track.pps || [],
13600 sequenceParameterSets = [],
13601 pictureParameterSets = [],
13602 i;
13603
13604 // assemble the SPSs
13605 for (i = 0; i < sps.length; i++) {
13606 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
13607 sequenceParameterSets.push((sps[i].byteLength & 0xFF)); // sequenceParameterSetLength
13608 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
13609 }
13610
13611 // assemble the PPSs
13612 for (i = 0; i < pps.length; i++) {
13613 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
13614 pictureParameterSets.push((pps[i].byteLength & 0xFF));
13615 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
13616 }
13617
13618 return box(types.avc1, new Uint8Array([
13619 0x00, 0x00, 0x00,
13620 0x00, 0x00, 0x00, // reserved
13621 0x00, 0x01, // data_reference_index
13622 0x00, 0x00, // pre_defined
13623 0x00, 0x00, // reserved
13624 0x00, 0x00, 0x00, 0x00,
13625 0x00, 0x00, 0x00, 0x00,
13626 0x00, 0x00, 0x00, 0x00, // pre_defined
13627 (track.width & 0xff00) >> 8,
13628 track.width & 0xff, // width
13629 (track.height & 0xff00) >> 8,
13630 track.height & 0xff, // height
13631 0x00, 0x48, 0x00, 0x00, // horizresolution
13632 0x00, 0x48, 0x00, 0x00, // vertresolution
13633 0x00, 0x00, 0x00, 0x00, // reserved
13634 0x00, 0x01, // frame_count
13635 0x13,
13636 0x76, 0x69, 0x64, 0x65,
13637 0x6f, 0x6a, 0x73, 0x2d,
13638 0x63, 0x6f, 0x6e, 0x74,
13639 0x72, 0x69, 0x62, 0x2d,
13640 0x68, 0x6c, 0x73, 0x00,
13641 0x00, 0x00, 0x00, 0x00,
13642 0x00, 0x00, 0x00, 0x00,
13643 0x00, 0x00, 0x00, // compressorname
13644 0x00, 0x18, // depth = 24
13645 0x11, 0x11 // pre_defined = -1
13646 ]), box(types.avcC, new Uint8Array([
13647 0x01, // configurationVersion
13648 track.profileIdc, // AVCProfileIndication
13649 track.profileCompatibility, // profile_compatibility
13650 track.levelIdc, // AVCLevelIndication
13651 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
13652 ].concat([
13653 sps.length // numOfSequenceParameterSets
13654 ]).concat(sequenceParameterSets).concat([
13655 pps.length // numOfPictureParameterSets
13656 ]).concat(pictureParameterSets))), // "PPS"
13657 box(types.btrt, new Uint8Array([
13658 0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
13659 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
13660 0x00, 0x2d, 0xc6, 0xc0
13661 ])) // avgBitrate
13662 );
13663 };
13664
13665 audioSample = function(track) {
13666 return box(types.mp4a, new Uint8Array([
13667
13668 // SampleEntry, ISO/IEC 14496-12
13669 0x00, 0x00, 0x00,
13670 0x00, 0x00, 0x00, // reserved
13671 0x00, 0x01, // data_reference_index
13672
13673 // AudioSampleEntry, ISO/IEC 14496-12
13674 0x00, 0x00, 0x00, 0x00, // reserved
13675 0x00, 0x00, 0x00, 0x00, // reserved
13676 (track.channelcount & 0xff00) >> 8,
13677 (track.channelcount & 0xff), // channelcount
13678
13679 (track.samplesize & 0xff00) >> 8,
13680 (track.samplesize & 0xff), // samplesize
13681 0x00, 0x00, // pre_defined
13682 0x00, 0x00, // reserved
13683
13684 (track.samplerate & 0xff00) >> 8,
13685 (track.samplerate & 0xff),
13686 0x00, 0x00 // samplerate, 16.16
13687
13688 // MP4AudioSampleEntry, ISO/IEC 14496-14
13689 ]), esds(track));
13690 };
13691}());
13692
13693tkhd = function(track) {
13694 var result = new Uint8Array([
13695 0x00, // version 0
13696 0x00, 0x00, 0x07, // flags
13697 0x00, 0x00, 0x00, 0x00, // creation_time
13698 0x00, 0x00, 0x00, 0x00, // modification_time
13699 (track.id & 0xFF000000) >> 24,
13700 (track.id & 0xFF0000) >> 16,
13701 (track.id & 0xFF00) >> 8,
13702 track.id & 0xFF, // track_ID
13703 0x00, 0x00, 0x00, 0x00, // reserved
13704 (track.duration & 0xFF000000) >> 24,
13705 (track.duration & 0xFF0000) >> 16,
13706 (track.duration & 0xFF00) >> 8,
13707 track.duration & 0xFF, // duration
13708 0x00, 0x00, 0x00, 0x00,
13709 0x00, 0x00, 0x00, 0x00, // reserved
13710 0x00, 0x00, // layer
13711 0x00, 0x00, // alternate_group
13712 0x01, 0x00, // non-audio track volume
13713 0x00, 0x00, // reserved
13714 0x00, 0x01, 0x00, 0x00,
13715 0x00, 0x00, 0x00, 0x00,
13716 0x00, 0x00, 0x00, 0x00,
13717 0x00, 0x00, 0x00, 0x00,
13718 0x00, 0x01, 0x00, 0x00,
13719 0x00, 0x00, 0x00, 0x00,
13720 0x00, 0x00, 0x00, 0x00,
13721 0x00, 0x00, 0x00, 0x00,
13722 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
13723 (track.width & 0xFF00) >> 8,
13724 track.width & 0xFF,
13725 0x00, 0x00, // width
13726 (track.height & 0xFF00) >> 8,
13727 track.height & 0xFF,
13728 0x00, 0x00 // height
13729 ]);
13730
13731 return box(types.tkhd, result);
13732};
13733
13734/**
13735 * Generate a track fragment (traf) box. A traf box collects metadata
13736 * about tracks in a movie fragment (moof) box.
13737 */
13738traf = function(track) {
13739 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun,
13740 sampleDependencyTable, dataOffset,
13741 upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
13742
13743 trackFragmentHeader = box(types.tfhd, new Uint8Array([
13744 0x00, // version 0
13745 0x00, 0x00, 0x3a, // flags
13746 (track.id & 0xFF000000) >> 24,
13747 (track.id & 0xFF0000) >> 16,
13748 (track.id & 0xFF00) >> 8,
13749 (track.id & 0xFF), // track_ID
13750 0x00, 0x00, 0x00, 0x01, // sample_description_index
13751 0x00, 0x00, 0x00, 0x00, // default_sample_duration
13752 0x00, 0x00, 0x00, 0x00, // default_sample_size
13753 0x00, 0x00, 0x00, 0x00 // default_sample_flags
13754 ]));
13755
13756 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
13757 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
13758
13759 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([
13760 0x01, // version 1
13761 0x00, 0x00, 0x00, // flags
13762 // baseMediaDecodeTime
13763 (upperWordBaseMediaDecodeTime >>> 24) & 0xFF,
13764 (upperWordBaseMediaDecodeTime >>> 16) & 0xFF,
13765 (upperWordBaseMediaDecodeTime >>> 8) & 0xFF,
13766 upperWordBaseMediaDecodeTime & 0xFF,
13767 (lowerWordBaseMediaDecodeTime >>> 24) & 0xFF,
13768 (lowerWordBaseMediaDecodeTime >>> 16) & 0xFF,
13769 (lowerWordBaseMediaDecodeTime >>> 8) & 0xFF,
13770 lowerWordBaseMediaDecodeTime & 0xFF
13771 ]));
13772
13773 // the data offset specifies the number of bytes from the start of
13774 // the containing moof to the first payload byte of the associated
13775 // mdat
13776 dataOffset = (32 + // tfhd
13777 20 + // tfdt
13778 8 + // traf header
13779 16 + // mfhd
13780 8 + // moof header
13781 8); // mdat header
13782
13783 // audio tracks require less metadata
13784 if (track.type === 'audio') {
13785 trackFragmentRun = trun(track, dataOffset);
13786 return box(types.traf,
13787 trackFragmentHeader,
13788 trackFragmentDecodeTime,
13789 trackFragmentRun);
13790 }
13791
13792 // video tracks should contain an independent and disposable samples
13793 // box (sdtp)
13794 // generate one and adjust offsets to match
13795 sampleDependencyTable = sdtp(track);
13796 trackFragmentRun = trun(track,
13797 sampleDependencyTable.length + dataOffset);
13798 return box(types.traf,
13799 trackFragmentHeader,
13800 trackFragmentDecodeTime,
13801 trackFragmentRun,
13802 sampleDependencyTable);
13803};
13804
13805/**
13806 * Generate a track box.
13807 * @param track {object} a track definition
13808 * @return {Uint8Array} the track box
13809 */
13810trak = function(track) {
13811 track.duration = track.duration || 0xffffffff;
13812 return box(types.trak,
13813 tkhd(track),
13814 mdia(track));
13815};
13816
13817trex = function(track) {
13818 var result = new Uint8Array([
13819 0x00, // version 0
13820 0x00, 0x00, 0x00, // flags
13821 (track.id & 0xFF000000) >> 24,
13822 (track.id & 0xFF0000) >> 16,
13823 (track.id & 0xFF00) >> 8,
13824 (track.id & 0xFF), // track_ID
13825 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
13826 0x00, 0x00, 0x00, 0x00, // default_sample_duration
13827 0x00, 0x00, 0x00, 0x00, // default_sample_size
13828 0x00, 0x01, 0x00, 0x01 // default_sample_flags
13829 ]);
13830 // the last two bytes of default_sample_flags is the sample
13831 // degradation priority, a hint about the importance of this sample
13832 // relative to others. Lower the degradation priority for all sample
13833 // types other than video.
13834 if (track.type !== 'video') {
13835 result[result.length - 1] = 0x00;
13836 }
13837
13838 return box(types.trex, result);
13839};
13840
13841(function() {
13842 var audioTrun, videoTrun, trunHeader;
13843
13844 // This method assumes all samples are uniform. That is, if a
13845 // duration is present for the first sample, it will be present for
13846 // all subsequent samples.
13847 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
13848 trunHeader = function(samples, offset) {
13849 var durationPresent = 0, sizePresent = 0,
13850 flagsPresent = 0, compositionTimeOffset = 0;
13851
13852 // trun flag constants
13853 if (samples.length) {
13854 if (samples[0].duration !== undefined) {
13855 durationPresent = 0x1;
13856 }
13857 if (samples[0].size !== undefined) {
13858 sizePresent = 0x2;
13859 }
13860 if (samples[0].flags !== undefined) {
13861 flagsPresent = 0x4;
13862 }
13863 if (samples[0].compositionTimeOffset !== undefined) {
13864 compositionTimeOffset = 0x8;
13865 }
13866 }
13867
13868 return [
13869 0x00, // version 0
13870 0x00,
13871 durationPresent | sizePresent | flagsPresent | compositionTimeOffset,
13872 0x01, // flags
13873 (samples.length & 0xFF000000) >>> 24,
13874 (samples.length & 0xFF0000) >>> 16,
13875 (samples.length & 0xFF00) >>> 8,
13876 samples.length & 0xFF, // sample_count
13877 (offset & 0xFF000000) >>> 24,
13878 (offset & 0xFF0000) >>> 16,
13879 (offset & 0xFF00) >>> 8,
13880 offset & 0xFF // data_offset
13881 ];
13882 };
13883
13884 videoTrun = function(track, offset) {
13885 var bytes, samples, sample, i;
13886
13887 samples = track.samples || [];
13888 offset += 8 + 12 + (16 * samples.length);
13889
13890 bytes = trunHeader(samples, offset);
13891
13892 for (i = 0; i < samples.length; i++) {
13893 sample = samples[i];
13894 bytes = bytes.concat([
13895 (sample.duration & 0xFF000000) >>> 24,
13896 (sample.duration & 0xFF0000) >>> 16,
13897 (sample.duration & 0xFF00) >>> 8,
13898 sample.duration & 0xFF, // sample_duration
13899 (sample.size & 0xFF000000) >>> 24,
13900 (sample.size & 0xFF0000) >>> 16,
13901 (sample.size & 0xFF00) >>> 8,
13902 sample.size & 0xFF, // sample_size
13903 (sample.flags.isLeading << 2) | sample.flags.dependsOn,
13904 (sample.flags.isDependedOn << 6) |
13905 (sample.flags.hasRedundancy << 4) |
13906 (sample.flags.paddingValue << 1) |
13907 sample.flags.isNonSyncSample,
13908 sample.flags.degradationPriority & 0xF0 << 8,
13909 sample.flags.degradationPriority & 0x0F, // sample_flags
13910 (sample.compositionTimeOffset & 0xFF000000) >>> 24,
13911 (sample.compositionTimeOffset & 0xFF0000) >>> 16,
13912 (sample.compositionTimeOffset & 0xFF00) >>> 8,
13913 sample.compositionTimeOffset & 0xFF // sample_composition_time_offset
13914 ]);
13915 }
13916 return box(types.trun, new Uint8Array(bytes));
13917 };
13918
13919 audioTrun = function(track, offset) {
13920 var bytes, samples, sample, i;
13921
13922 samples = track.samples || [];
13923 offset += 8 + 12 + (8 * samples.length);
13924
13925 bytes = trunHeader(samples, offset);
13926
13927 for (i = 0; i < samples.length; i++) {
13928 sample = samples[i];
13929 bytes = bytes.concat([
13930 (sample.duration & 0xFF000000) >>> 24,
13931 (sample.duration & 0xFF0000) >>> 16,
13932 (sample.duration & 0xFF00) >>> 8,
13933 sample.duration & 0xFF, // sample_duration
13934 (sample.size & 0xFF000000) >>> 24,
13935 (sample.size & 0xFF0000) >>> 16,
13936 (sample.size & 0xFF00) >>> 8,
13937 sample.size & 0xFF]); // sample_size
13938 }
13939
13940 return box(types.trun, new Uint8Array(bytes));
13941 };
13942
13943 trun = function(track, offset) {
13944 if (track.type === 'audio') {
13945 return audioTrun(track, offset);
13946 }
13947
13948 return videoTrun(track, offset);
13949 };
13950}());
13951
13952module.exports = {
13953 ftyp: ftyp,
13954 mdat: mdat,
13955 moof: moof,
13956 moov: moov,
13957 initSegment: function(tracks) {
13958 var
13959 fileType = ftyp(),
13960 movie = moov(tracks),
13961 result;
13962
13963 result = new Uint8Array(fileType.byteLength + movie.byteLength);
13964 result.set(fileType);
13965 result.set(movie, fileType.byteLength);
13966 return result;
13967 }
13968};
13969
13970},{}],55:[function(require,module,exports){
13971/**
13972 * mux.js
13973 *
13974 * Copyright (c) 2015 Brightcove
13975 * All rights reserved.
13976 *
13977 * Utilities to detect basic properties and metadata about MP4s.
13978 */
13979'use strict';
13980
13981var findBox, parseType, timescale, startTime;
13982
13983// Find the data for a box specified by its path
13984findBox = function(data, path) {
13985 var results = [],
13986 i, size, type, end, subresults;
13987
13988 if (!path.length) {
13989 // short-circuit the search for empty paths
13990 return null;
13991 }
13992
13993 for (i = 0; i < data.byteLength;) {
13994 size = data[i] << 24;
13995 size |= data[i + 1] << 16;
13996 size |= data[i + 2] << 8;
13997 size |= data[i + 3];
13998
13999 type = parseType(data.subarray(i + 4, i + 8));
14000
14001 end = size > 1 ? i + size : data.byteLength;
14002
14003 if (type === path[0]) {
14004 if (path.length === 1) {
14005 // this is the end of the path and we've found the box we were
14006 // looking for
14007 results.push(data.subarray(i + 8, end));
14008 } else {
14009 // recursively search for the next box along the path
14010 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
14011 if (subresults.length) {
14012 results = results.concat(subresults);
14013 }
14014 }
14015 }
14016 i = end;
14017 }
14018
14019 // we've finished searching all of data
14020 return results;
14021};
14022
14023/**
14024 * Returns the string representation of an ASCII encoded four byte buffer.
14025 * @param buffer {Uint8Array} a four-byte buffer to translate
14026 * @return {string} the corresponding string
14027 */
14028parseType = function(buffer) {
14029 var result = '';
14030 result += String.fromCharCode(buffer[0]);
14031 result += String.fromCharCode(buffer[1]);
14032 result += String.fromCharCode(buffer[2]);
14033 result += String.fromCharCode(buffer[3]);
14034 return result;
14035};
14036
14037/**
14038 * Parses an MP4 initialization segment and extracts the timescale
14039 * values for any declared tracks. Timescale values indicate the
14040 * number of clock ticks per second to assume for time-based values
14041 * elsewhere in the MP4.
14042 *
14043 * To determine the start time of an MP4, you need two pieces of
14044 * information: the timescale unit and the earliest base media decode
14045 * time. Multiple timescales can be specified within an MP4 but the
14046 * base media decode time is always expressed in the timescale from
14047 * the media header box for the track:
14048 * ```
14049 * moov > trak > mdia > mdhd.timescale
14050 * ```
14051 * @param init {Uint8Array} the bytes of the init segment
14052 * @return {object} a hash of track ids to timescale values or null if
14053 * the init segment is malformed.
14054 */
14055timescale = function(init) {
14056 var
14057 result = {},
14058 traks = findBox(init, ['moov', 'trak']);
14059
14060 // mdhd timescale
14061 return traks.reduce(function(result, trak) {
14062 var tkhd, version, index, id, mdhd;
14063
14064 tkhd = findBox(trak, ['tkhd'])[0];
14065 if (!tkhd) {
14066 return null;
14067 }
14068 version = tkhd[0];
14069 index = version === 0 ? 12 : 20;
14070 id = tkhd[index] << 24 |
14071 tkhd[index + 1] << 16 |
14072 tkhd[index + 2] << 8 |
14073 tkhd[index + 3];
14074
14075 mdhd = findBox(trak, ['mdia', 'mdhd'])[0];
14076 if (!mdhd) {
14077 return null;
14078 }
14079 version = mdhd[0];
14080 index = version === 0 ? 12 : 20;
14081 result[id] = mdhd[index] << 24 |
14082 mdhd[index + 1] << 16 |
14083 mdhd[index + 2] << 8 |
14084 mdhd[index + 3];
14085 return result;
14086 }, result);
14087};
14088
14089/**
14090 * Determine the base media decode start time, in seconds, for an MP4
14091 * fragment. If multiple fragments are specified, the earliest time is
14092 * returned.
14093 *
14094 * The base media decode time can be parsed from track fragment
14095 * metadata:
14096 * ```
14097 * moof > traf > tfdt.baseMediaDecodeTime
14098 * ```
14099 * It requires the timescale value from the mdhd to interpret.
14100 *
14101 * @param timescale {object} a hash of track ids to timescale values.
14102 * @return {number} the earliest base media decode start time for the
14103 * fragment, in seconds
14104 */
14105startTime = function(timescale, fragment) {
14106 var trafs, baseTimes, result;
14107
14108 // we need info from two childrend of each track fragment box
14109 trafs = findBox(fragment, ['moof', 'traf']);
14110
14111 // determine the start times for each track
14112 baseTimes = [].concat.apply([], trafs.map(function(traf) {
14113 return findBox(traf, ['tfhd']).map(function(tfhd) {
14114 var id, scale, baseTime;
14115
14116 // get the track id from the tfhd
14117 id = tfhd[4] << 24 |
14118 tfhd[5] << 16 |
14119 tfhd[6] << 8 |
14120 tfhd[7];
14121 // assume a 90kHz clock if no timescale was specified
14122 scale = timescale[id] || 90e3;
14123
14124 // get the base media decode time from the tfdt
14125 baseTime = findBox(traf, ['tfdt']).map(function(tfdt) {
14126 var version, result;
14127
14128 version = tfdt[0];
14129 result = tfdt[4] << 24 |
14130 tfdt[5] << 16 |
14131 tfdt[6] << 8 |
14132 tfdt[7];
14133 if (version === 1) {
14134 result *= Math.pow(2, 32);
14135 result += tfdt[8] << 24 |
14136 tfdt[9] << 16 |
14137 tfdt[10] << 8 |
14138 tfdt[11];
14139 }
14140 return result;
14141 })[0];
14142 baseTime = baseTime || Infinity;
14143
14144 // convert base time to seconds
14145 return baseTime / scale;
14146 });
14147 }));
14148
14149 // return the minimum
14150 result = Math.min.apply(null, baseTimes);
14151 return isFinite(result) ? result : 0;
14152};
14153
14154module.exports = {
14155 parseType: parseType,
14156 timescale: timescale,
14157 startTime: startTime
14158};
14159
14160},{}],56:[function(require,module,exports){
14161/**
14162 * mux.js
14163 *
14164 * Copyright (c) 2015 Brightcove
14165 * All rights reserved.
14166 *
14167 * A stream-based mp2t to mp4 converter. This utility can be used to
14168 * deliver mp4s to a SourceBuffer on platforms that support native
14169 * Media Source Extensions.
14170 */
14171'use strict';
14172
14173var Stream = require('../utils/stream.js');
14174var mp4 = require('./mp4-generator.js');
14175var m2ts = require('../m2ts/m2ts.js');
14176var AdtsStream = require('../codecs/adts.js');
14177var H264Stream = require('../codecs/h264').H264Stream;
14178var AacStream = require('../aac');
14179var coneOfSilence = require('../data/silence');
14180var clock = require('../utils/clock');
14181
14182// constants
14183var AUDIO_PROPERTIES = [
14184 'audioobjecttype',
14185 'channelcount',
14186 'samplerate',
14187 'samplingfrequencyindex',
14188 'samplesize'
14189];
14190
14191var VIDEO_PROPERTIES = [
14192 'width',
14193 'height',
14194 'profileIdc',
14195 'levelIdc',
14196 'profileCompatibility'
14197];
14198
14199var ONE_SECOND_IN_TS = 90000; // 90kHz clock
14200
14201// object types
14202var VideoSegmentStream, AudioSegmentStream, Transmuxer, CoalesceStream;
14203
14204// Helper functions
14205var
14206 createDefaultSample,
14207 isLikelyAacData,
14208 collectDtsInfo,
14209 clearDtsInfo,
14210 calculateTrackBaseMediaDecodeTime,
14211 arrayEquals,
14212 sumFrameByteLengths;
14213
14214/**
14215 * Default sample object
14216 * see ISO/IEC 14496-12:2012, section 8.6.4.3
14217 */
14218createDefaultSample = function() {
14219 return {
14220 size: 0,
14221 flags: {
14222 isLeading: 0,
14223 dependsOn: 1,
14224 isDependedOn: 0,
14225 hasRedundancy: 0,
14226 degradationPriority: 0
14227 }
14228 };
14229};
14230
14231isLikelyAacData = function(data) {
14232 if ((data[0] === 'I'.charCodeAt(0)) &&
14233 (data[1] === 'D'.charCodeAt(0)) &&
14234 (data[2] === '3'.charCodeAt(0))) {
14235 return true;
14236 }
14237 return false;
14238};
14239
14240/**
14241 * Compare two arrays (even typed) for same-ness
14242 */
14243arrayEquals = function(a, b) {
14244 var
14245 i;
14246
14247 if (a.length !== b.length) {
14248 return false;
14249 }
14250
14251 // compare the value of each element in the array
14252 for (i = 0; i < a.length; i++) {
14253 if (a[i] !== b[i]) {
14254 return false;
14255 }
14256 }
14257
14258 return true;
14259};
14260
14261/**
14262 * Sum the `byteLength` properties of the data in each AAC frame
14263 */
14264sumFrameByteLengths = function(array) {
14265 var
14266 i,
14267 currentObj,
14268 sum = 0;
14269
14270 // sum the byteLength's all each nal unit in the frame
14271 for (i = 0; i < array.length; i++) {
14272 currentObj = array[i];
14273 sum += currentObj.data.byteLength;
14274 }
14275
14276 return sum;
14277};
14278
14279/**
14280 * Constructs a single-track, ISO BMFF media segment from AAC data
14281 * events. The output of this stream can be fed to a SourceBuffer
14282 * configured with a suitable initialization segment.
14283 */
14284AudioSegmentStream = function(track) {
14285 var
14286 adtsFrames = [],
14287 sequenceNumber = 0,
14288 earliestAllowedDts = 0,
14289 audioAppendStartTs = 0,
14290 videoBaseMediaDecodeTime = Infinity;
14291
14292 AudioSegmentStream.prototype.init.call(this);
14293
14294 this.push = function(data) {
14295 collectDtsInfo(track, data);
14296
14297 if (track) {
14298 AUDIO_PROPERTIES.forEach(function(prop) {
14299 track[prop] = data[prop];
14300 });
14301 }
14302
14303 // buffer audio data until end() is called
14304 adtsFrames.push(data);
14305 };
14306
14307 this.setEarliestDts = function(earliestDts) {
14308 earliestAllowedDts = earliestDts - track.timelineStartInfo.baseMediaDecodeTime;
14309 };
14310
14311 this.setVideoBaseMediaDecodeTime = function(baseMediaDecodeTime) {
14312 videoBaseMediaDecodeTime = baseMediaDecodeTime;
14313 };
14314
14315 this.setAudioAppendStart = function(timestamp) {
14316 audioAppendStartTs = timestamp;
14317 };
14318
14319 this.flush = function() {
14320 var
14321 frames,
14322 moof,
14323 mdat,
14324 boxes;
14325
14326 // return early if no audio data has been observed
14327 if (adtsFrames.length === 0) {
14328 this.trigger('done', 'AudioSegmentStream');
14329 return;
14330 }
14331
14332 frames = this.trimAdtsFramesByEarliestDts_(adtsFrames);
14333 track.baseMediaDecodeTime = calculateTrackBaseMediaDecodeTime(track);
14334
14335 this.prefixWithSilence_(track, frames);
14336
14337 // we have to build the index from byte locations to
14338 // samples (that is, adts frames) in the audio data
14339 track.samples = this.generateSampleTable_(frames);
14340
14341 // concatenate the audio data to constuct the mdat
14342 mdat = mp4.mdat(this.concatenateFrameData_(frames));
14343
14344 adtsFrames = [];
14345
14346 moof = mp4.moof(sequenceNumber, [track]);
14347 boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
14348
14349 // bump the sequence number for next time
14350 sequenceNumber++;
14351
14352 boxes.set(moof);
14353 boxes.set(mdat, moof.byteLength);
14354
14355 clearDtsInfo(track);
14356
14357 this.trigger('data', {track: track, boxes: boxes});
14358 this.trigger('done', 'AudioSegmentStream');
14359 };
14360
14361 // Possibly pad (prefix) the audio track with silence if appending this track
14362 // would lead to the introduction of a gap in the audio buffer
14363 this.prefixWithSilence_ = function(track, frames) {
14364 var
14365 baseMediaDecodeTimeTs,
14366 frameDuration = 0,
14367 audioGapDuration = 0,
14368 audioFillFrameCount = 0,
14369 audioFillDuration = 0,
14370 silentFrame,
14371 i;
14372
14373 if (!frames.length) {
14374 return;
14375 }
14376
14377 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate);
14378 // determine frame clock duration based on sample rate, round up to avoid overfills
14379 frameDuration = Math.ceil(ONE_SECOND_IN_TS / (track.samplerate / 1024));
14380
14381 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
14382 // insert the shortest possible amount (audio gap or audio to video gap)
14383 audioGapDuration =
14384 baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime);
14385 // number of full frames in the audio gap
14386 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
14387 audioFillDuration = audioFillFrameCount * frameDuration;
14388 }
14389
14390 // don't attempt to fill gaps smaller than a single frame or larger
14391 // than a half second
14392 if (audioFillFrameCount < 1 || audioFillDuration > ONE_SECOND_IN_TS / 2) {
14393 return;
14394 }
14395
14396 silentFrame = coneOfSilence[track.samplerate];
14397
14398 if (!silentFrame) {
14399 // we don't have a silent frame pregenerated for the sample rate, so use a frame
14400 // from the content instead
14401 silentFrame = frames[0].data;
14402 }
14403
14404 for (i = 0; i < audioFillFrameCount; i++) {
14405 frames.splice(i, 0, {
14406 data: silentFrame
14407 });
14408 }
14409
14410 track.baseMediaDecodeTime -=
14411 Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
14412 };
14413
14414 // If the audio segment extends before the earliest allowed dts
14415 // value, remove AAC frames until starts at or after the earliest
14416 // allowed DTS so that we don't end up with a negative baseMedia-
14417 // DecodeTime for the audio track
14418 this.trimAdtsFramesByEarliestDts_ = function(adtsFrames) {
14419 if (track.minSegmentDts >= earliestAllowedDts) {
14420 return adtsFrames;
14421 }
14422
14423 // We will need to recalculate the earliest segment Dts
14424 track.minSegmentDts = Infinity;
14425
14426 return adtsFrames.filter(function(currentFrame) {
14427 // If this is an allowed frame, keep it and record it's Dts
14428 if (currentFrame.dts >= earliestAllowedDts) {
14429 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
14430 track.minSegmentPts = track.minSegmentDts;
14431 return true;
14432 }
14433 // Otherwise, discard it
14434 return false;
14435 });
14436 };
14437
14438 // generate the track's raw mdat data from an array of frames
14439 this.generateSampleTable_ = function(frames) {
14440 var
14441 i,
14442 currentFrame,
14443 samples = [];
14444
14445 for (i = 0; i < frames.length; i++) {
14446 currentFrame = frames[i];
14447 samples.push({
14448 size: currentFrame.data.byteLength,
14449 duration: 1024 // For AAC audio, all samples contain 1024 samples
14450 });
14451 }
14452 return samples;
14453 };
14454
14455 // generate the track's sample table from an array of frames
14456 this.concatenateFrameData_ = function(frames) {
14457 var
14458 i,
14459 currentFrame,
14460 dataOffset = 0,
14461 data = new Uint8Array(sumFrameByteLengths(frames));
14462
14463 for (i = 0; i < frames.length; i++) {
14464 currentFrame = frames[i];
14465
14466 data.set(currentFrame.data, dataOffset);
14467 dataOffset += currentFrame.data.byteLength;
14468 }
14469 return data;
14470 };
14471};
14472
14473AudioSegmentStream.prototype = new Stream();
14474
14475/**
14476 * Constructs a single-track, ISO BMFF media segment from H264 data
14477 * events. The output of this stream can be fed to a SourceBuffer
14478 * configured with a suitable initialization segment.
14479 * @param track {object} track metadata configuration
14480 */
14481VideoSegmentStream = function(track) {
14482 var
14483 sequenceNumber = 0,
14484 nalUnits = [],
14485 config,
14486 pps;
14487
14488 VideoSegmentStream.prototype.init.call(this);
14489
14490 delete track.minPTS;
14491
14492 this.gopCache_ = [];
14493
14494 this.push = function(nalUnit) {
14495 collectDtsInfo(track, nalUnit);
14496
14497 // record the track config
14498 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
14499 config = nalUnit.config;
14500 track.sps = [nalUnit.data];
14501
14502 VIDEO_PROPERTIES.forEach(function(prop) {
14503 track[prop] = config[prop];
14504 }, this);
14505 }
14506
14507 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' &&
14508 !pps) {
14509 pps = nalUnit.data;
14510 track.pps = [nalUnit.data];
14511 }
14512
14513 // buffer video until flush() is called
14514 nalUnits.push(nalUnit);
14515 };
14516
14517 this.flush = function() {
14518 var
14519 frames,
14520 gopForFusion,
14521 gops,
14522 moof,
14523 mdat,
14524 boxes;
14525
14526 // Throw away nalUnits at the start of the byte stream until
14527 // we find the first AUD
14528 while (nalUnits.length) {
14529 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
14530 break;
14531 }
14532 nalUnits.shift();
14533 }
14534
14535 // Return early if no video data has been observed
14536 if (nalUnits.length === 0) {
14537 this.resetStream_();
14538 this.trigger('done', 'VideoSegmentStream');
14539 return;
14540 }
14541
14542 // Organize the raw nal-units into arrays that represent
14543 // higher-level constructs such as frames and gops
14544 // (group-of-pictures)
14545 frames = this.groupNalsIntoFrames_(nalUnits);
14546 gops = this.groupFramesIntoGops_(frames);
14547
14548 // If the first frame of this fragment is not a keyframe we have
14549 // a problem since MSE (on Chrome) requires a leading keyframe.
14550 //
14551 // We have two approaches to repairing this situation:
14552 // 1) GOP-FUSION:
14553 // This is where we keep track of the GOPS (group-of-pictures)
14554 // from previous fragments and attempt to find one that we can
14555 // prepend to the current fragment in order to create a valid
14556 // fragment.
14557 // 2) KEYFRAME-PULLING:
14558 // Here we search for the first keyframe in the fragment and
14559 // throw away all the frames between the start of the fragment
14560 // and that keyframe. We then extend the duration and pull the
14561 // PTS of the keyframe forward so that it covers the time range
14562 // of the frames that were disposed of.
14563 //
14564 // #1 is far prefereable over #2 which can cause "stuttering" but
14565 // requires more things to be just right.
14566 if (!gops[0][0].keyFrame) {
14567 // Search for a gop for fusion from our gopCache
14568 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
14569
14570 if (gopForFusion) {
14571 gops.unshift(gopForFusion);
14572 // Adjust Gops' metadata to account for the inclusion of the
14573 // new gop at the beginning
14574 gops.byteLength += gopForFusion.byteLength;
14575 gops.nalCount += gopForFusion.nalCount;
14576 gops.pts = gopForFusion.pts;
14577 gops.dts = gopForFusion.dts;
14578 gops.duration += gopForFusion.duration;
14579 } else {
14580 // If we didn't find a candidate gop fall back to keyrame-pulling
14581 gops = this.extendFirstKeyFrame_(gops);
14582 }
14583 }
14584 collectDtsInfo(track, gops);
14585
14586 // First, we have to build the index from byte locations to
14587 // samples (that is, frames) in the video data
14588 track.samples = this.generateSampleTable_(gops);
14589
14590 // Concatenate the video data and construct the mdat
14591 mdat = mp4.mdat(this.concatenateNalData_(gops));
14592
14593 // save all the nals in the last GOP into the gop cache
14594 this.gopCache_.unshift({
14595 gop: gops.pop(),
14596 pps: track.pps,
14597 sps: track.sps
14598 });
14599
14600 // Keep a maximum of 6 GOPs in the cache
14601 this.gopCache_.length = Math.min(6, this.gopCache_.length);
14602
14603 // Clear nalUnits
14604 nalUnits = [];
14605
14606 track.baseMediaDecodeTime = calculateTrackBaseMediaDecodeTime(track);
14607
14608 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
14609 this.trigger('timelineStartInfo', track.timelineStartInfo);
14610
14611 moof = mp4.moof(sequenceNumber, [track]);
14612
14613 // it would be great to allocate this array up front instead of
14614 // throwing away hundreds of media segment fragments
14615 boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
14616
14617 // Bump the sequence number for next time
14618 sequenceNumber++;
14619
14620 boxes.set(moof);
14621 boxes.set(mdat, moof.byteLength);
14622
14623 this.trigger('data', {track: track, boxes: boxes});
14624
14625 this.resetStream_();
14626
14627 // Continue with the flush process now
14628 this.trigger('done', 'VideoSegmentStream');
14629 };
14630
14631 this.resetStream_ = function() {
14632 clearDtsInfo(track);
14633
14634 // reset config and pps because they may differ across segments
14635 // for instance, when we are rendition switching
14636 config = undefined;
14637 pps = undefined;
14638 };
14639
14640 // Search for a candidate Gop for gop-fusion from the gop cache and
14641 // return it or return null if no good candidate was found
14642 this.getGopForFusion_ = function(nalUnit) {
14643 var
14644 halfSecond = 45000, // Half-a-second in a 90khz clock
14645 allowableOverlap = 10000, // About 3 frames @ 30fps
14646 nearestDistance = Infinity,
14647 dtsDistance,
14648 nearestGopObj,
14649 currentGop,
14650 currentGopObj,
14651 i;
14652
14653 // Search for the GOP nearest to the beginning of this nal unit
14654 for (i = 0; i < this.gopCache_.length; i++) {
14655 currentGopObj = this.gopCache_[i];
14656 currentGop = currentGopObj.gop;
14657
14658 // Reject Gops with different SPS or PPS
14659 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) ||
14660 !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
14661 continue;
14662 }
14663
14664 // Reject Gops that would require a negative baseMediaDecodeTime
14665 if (currentGop.dts < track.timelineStartInfo.dts) {
14666 continue;
14667 }
14668
14669 // The distance between the end of the gop and the start of the nalUnit
14670 dtsDistance = (nalUnit.dts - currentGop.dts) - currentGop.duration;
14671
14672 // Only consider GOPS that start before the nal unit and end within
14673 // a half-second of the nal unit
14674 if (dtsDistance >= -allowableOverlap &&
14675 dtsDistance <= halfSecond) {
14676
14677 // Always use the closest GOP we found if there is more than
14678 // one candidate
14679 if (!nearestGopObj ||
14680 nearestDistance > dtsDistance) {
14681 nearestGopObj = currentGopObj;
14682 nearestDistance = dtsDistance;
14683 }
14684 }
14685 }
14686
14687 if (nearestGopObj) {
14688 return nearestGopObj.gop;
14689 }
14690 return null;
14691 };
14692
14693 this.extendFirstKeyFrame_ = function(gops) {
14694 var currentGop;
14695
14696 if (!gops[0][0].keyFrame && gops.length > 1) {
14697 // Remove the first GOP
14698 currentGop = gops.shift();
14699
14700 gops.byteLength -= currentGop.byteLength;
14701 gops.nalCount -= currentGop.nalCount;
14702
14703 // Extend the first frame of what is now the
14704 // first gop to cover the time period of the
14705 // frames we just removed
14706 gops[0][0].dts = currentGop.dts;
14707 gops[0][0].pts = currentGop.pts;
14708 gops[0][0].duration += currentGop.duration;
14709 }
14710
14711 return gops;
14712 };
14713
14714 // Convert an array of nal units into an array of frames with each frame being
14715 // composed of the nal units that make up that frame
14716 // Also keep track of cummulative data about the frame from the nal units such
14717 // as the frame duration, starting pts, etc.
14718 this.groupNalsIntoFrames_ = function(nalUnits) {
14719 var
14720 i,
14721 currentNal,
14722 currentFrame = [],
14723 frames = [];
14724
14725 currentFrame.byteLength = 0;
14726
14727 for (i = 0; i < nalUnits.length; i++) {
14728 currentNal = nalUnits[i];
14729
14730 // Split on 'aud'-type nal units
14731 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
14732 // Since the very first nal unit is expected to be an AUD
14733 // only push to the frames array when currentFrame is not empty
14734 if (currentFrame.length) {
14735 currentFrame.duration = currentNal.dts - currentFrame.dts;
14736 frames.push(currentFrame);
14737 }
14738 currentFrame = [currentNal];
14739 currentFrame.byteLength = currentNal.data.byteLength;
14740 currentFrame.pts = currentNal.pts;
14741 currentFrame.dts = currentNal.dts;
14742 } else {
14743 // Specifically flag key frames for ease of use later
14744 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
14745 currentFrame.keyFrame = true;
14746 }
14747 currentFrame.duration = currentNal.dts - currentFrame.dts;
14748 currentFrame.byteLength += currentNal.data.byteLength;
14749 currentFrame.push(currentNal);
14750 }
14751 }
14752
14753 // For the last frame, use the duration of the previous frame if we
14754 // have nothing better to go on
14755 if (frames.length &&
14756 (!currentFrame.duration ||
14757 currentFrame.duration <= 0)) {
14758 currentFrame.duration = frames[frames.length - 1].duration;
14759 }
14760
14761 // Push the final frame
14762 frames.push(currentFrame);
14763 return frames;
14764 };
14765
14766 // Convert an array of frames into an array of Gop with each Gop being composed
14767 // of the frames that make up that Gop
14768 // Also keep track of cummulative data about the Gop from the frames such as the
14769 // Gop duration, starting pts, etc.
14770 this.groupFramesIntoGops_ = function(frames) {
14771 var
14772 i,
14773 currentFrame,
14774 currentGop = [],
14775 gops = [];
14776
14777 // We must pre-set some of the values on the Gop since we
14778 // keep running totals of these values
14779 currentGop.byteLength = 0;
14780 currentGop.nalCount = 0;
14781 currentGop.duration = 0;
14782 currentGop.pts = frames[0].pts;
14783 currentGop.dts = frames[0].dts;
14784
14785 // store some metadata about all the Gops
14786 gops.byteLength = 0;
14787 gops.nalCount = 0;
14788 gops.duration = 0;
14789 gops.pts = frames[0].pts;
14790 gops.dts = frames[0].dts;
14791
14792 for (i = 0; i < frames.length; i++) {
14793 currentFrame = frames[i];
14794
14795 if (currentFrame.keyFrame) {
14796 // Since the very first frame is expected to be an keyframe
14797 // only push to the gops array when currentGop is not empty
14798 if (currentGop.length) {
14799 gops.push(currentGop);
14800 gops.byteLength += currentGop.byteLength;
14801 gops.nalCount += currentGop.nalCount;
14802 gops.duration += currentGop.duration;
14803 }
14804
14805 currentGop = [currentFrame];
14806 currentGop.nalCount = currentFrame.length;
14807 currentGop.byteLength = currentFrame.byteLength;
14808 currentGop.pts = currentFrame.pts;
14809 currentGop.dts = currentFrame.dts;
14810 currentGop.duration = currentFrame.duration;
14811 } else {
14812 currentGop.duration += currentFrame.duration;
14813 currentGop.nalCount += currentFrame.length;
14814 currentGop.byteLength += currentFrame.byteLength;
14815 currentGop.push(currentFrame);
14816 }
14817 }
14818
14819 if (gops.length && currentGop.duration <= 0) {
14820 currentGop.duration = gops[gops.length - 1].duration;
14821 }
14822 gops.byteLength += currentGop.byteLength;
14823 gops.nalCount += currentGop.nalCount;
14824 gops.duration += currentGop.duration;
14825
14826 // push the final Gop
14827 gops.push(currentGop);
14828 return gops;
14829 };
14830
14831 // generate the track's sample table from an array of gops
14832 this.generateSampleTable_ = function(gops, baseDataOffset) {
14833 var
14834 h, i,
14835 sample,
14836 currentGop,
14837 currentFrame,
14838 dataOffset = baseDataOffset || 0,
14839 samples = [];
14840
14841 for (h = 0; h < gops.length; h++) {
14842 currentGop = gops[h];
14843
14844 for (i = 0; i < currentGop.length; i++) {
14845 currentFrame = currentGop[i];
14846
14847 sample = createDefaultSample();
14848
14849 sample.dataOffset = dataOffset;
14850 sample.compositionTimeOffset = currentFrame.pts - currentFrame.dts;
14851 sample.duration = currentFrame.duration;
14852 sample.size = 4 * currentFrame.length; // Space for nal unit size
14853 sample.size += currentFrame.byteLength;
14854
14855 if (currentFrame.keyFrame) {
14856 sample.flags.dependsOn = 2;
14857 }
14858
14859 dataOffset += sample.size;
14860
14861 samples.push(sample);
14862 }
14863 }
14864 return samples;
14865 };
14866
14867 // generate the track's raw mdat data from an array of gops
14868 this.concatenateNalData_ = function(gops) {
14869 var
14870 h, i, j,
14871 currentGop,
14872 currentFrame,
14873 currentNal,
14874 dataOffset = 0,
14875 nalsByteLength = gops.byteLength,
14876 numberOfNals = gops.nalCount,
14877 totalByteLength = nalsByteLength + 4 * numberOfNals,
14878 data = new Uint8Array(totalByteLength),
14879 view = new DataView(data.buffer);
14880
14881 // For each Gop..
14882 for (h = 0; h < gops.length; h++) {
14883 currentGop = gops[h];
14884
14885 // For each Frame..
14886 for (i = 0; i < currentGop.length; i++) {
14887 currentFrame = currentGop[i];
14888
14889 // For each NAL..
14890 for (j = 0; j < currentFrame.length; j++) {
14891 currentNal = currentFrame[j];
14892
14893 view.setUint32(dataOffset, currentNal.data.byteLength);
14894 dataOffset += 4;
14895 data.set(currentNal.data, dataOffset);
14896 dataOffset += currentNal.data.byteLength;
14897 }
14898 }
14899 }
14900 return data;
14901 };
14902};
14903
14904VideoSegmentStream.prototype = new Stream();
14905
14906/**
14907 * Store information about the start and end of the track and the
14908 * duration for each frame/sample we process in order to calculate
14909 * the baseMediaDecodeTime
14910 */
14911collectDtsInfo = function(track, data) {
14912 if (typeof data.pts === 'number') {
14913 if (track.timelineStartInfo.pts === undefined) {
14914 track.timelineStartInfo.pts = data.pts;
14915 }
14916
14917 if (track.minSegmentPts === undefined) {
14918 track.minSegmentPts = data.pts;
14919 } else {
14920 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
14921 }
14922
14923 if (track.maxSegmentPts === undefined) {
14924 track.maxSegmentPts = data.pts;
14925 } else {
14926 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
14927 }
14928 }
14929
14930 if (typeof data.dts === 'number') {
14931 if (track.timelineStartInfo.dts === undefined) {
14932 track.timelineStartInfo.dts = data.dts;
14933 }
14934
14935 if (track.minSegmentDts === undefined) {
14936 track.minSegmentDts = data.dts;
14937 } else {
14938 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
14939 }
14940
14941 if (track.maxSegmentDts === undefined) {
14942 track.maxSegmentDts = data.dts;
14943 } else {
14944 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
14945 }
14946 }
14947};
14948
14949/**
14950 * Clear values used to calculate the baseMediaDecodeTime between
14951 * tracks
14952 */
14953clearDtsInfo = function(track) {
14954 delete track.minSegmentDts;
14955 delete track.maxSegmentDts;
14956 delete track.minSegmentPts;
14957 delete track.maxSegmentPts;
14958};
14959
14960/**
14961 * Calculate the track's baseMediaDecodeTime based on the earliest
14962 * DTS the transmuxer has ever seen and the minimum DTS for the
14963 * current track
14964 */
14965calculateTrackBaseMediaDecodeTime = function(track) {
14966 var
14967 baseMediaDecodeTime,
14968 scale,
14969 // Calculate the distance, in time, that this segment starts from the start
14970 // of the timeline (earliest time seen since the transmuxer initialized)
14971 timeSinceStartOfTimeline = track.minSegmentDts - track.timelineStartInfo.dts;
14972
14973 // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
14974 // we want the start of the first segment to be placed
14975 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime;
14976
14977 // Add to that the distance this segment is from the very first
14978 baseMediaDecodeTime += timeSinceStartOfTimeline;
14979
14980 // baseMediaDecodeTime must not become negative
14981 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
14982
14983 if (track.type === 'audio') {
14984 // Audio has a different clock equal to the sampling_rate so we need to
14985 // scale the PTS values into the clock rate of the track
14986 scale = track.samplerate / ONE_SECOND_IN_TS;
14987 baseMediaDecodeTime *= scale;
14988 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
14989 }
14990
14991 return baseMediaDecodeTime;
14992};
14993
14994/**
14995 * A Stream that can combine multiple streams (ie. audio & video)
14996 * into a single output segment for MSE. Also supports audio-only
14997 * and video-only streams.
14998 */
14999CoalesceStream = function(options, metadataStream) {
15000 // Number of Tracks per output segment
15001 // If greater than 1, we combine multiple
15002 // tracks into a single segment
15003 this.numberOfTracks = 0;
15004 this.metadataStream = metadataStream;
15005
15006 if (typeof options.remux !== 'undefined') {
15007 this.remuxTracks = !!options.remux;
15008 } else {
15009 this.remuxTracks = true;
15010 }
15011
15012 this.pendingTracks = [];
15013 this.videoTrack = null;
15014 this.pendingBoxes = [];
15015 this.pendingCaptions = [];
15016 this.pendingMetadata = [];
15017 this.pendingBytes = 0;
15018 this.emittedTracks = 0;
15019
15020 CoalesceStream.prototype.init.call(this);
15021
15022 // Take output from multiple
15023 this.push = function(output) {
15024 // buffer incoming captions until the associated video segment
15025 // finishes
15026 if (output.text) {
15027 return this.pendingCaptions.push(output);
15028 }
15029 // buffer incoming id3 tags until the final flush
15030 if (output.frames) {
15031 return this.pendingMetadata.push(output);
15032 }
15033
15034 // Add this track to the list of pending tracks and store
15035 // important information required for the construction of
15036 // the final segment
15037 this.pendingTracks.push(output.track);
15038 this.pendingBoxes.push(output.boxes);
15039 this.pendingBytes += output.boxes.byteLength;
15040
15041 if (output.track.type === 'video') {
15042 this.videoTrack = output.track;
15043 }
15044 if (output.track.type === 'audio') {
15045 this.audioTrack = output.track;
15046 }
15047 };
15048};
15049
15050CoalesceStream.prototype = new Stream();
15051CoalesceStream.prototype.flush = function(flushSource) {
15052 var
15053 offset = 0,
15054 event = {
15055 captions: [],
15056 metadata: [],
15057 info: {}
15058 },
15059 caption,
15060 id3,
15061 initSegment,
15062 timelineStartPts = 0,
15063 i;
15064
15065 if (this.pendingTracks.length < this.numberOfTracks) {
15066 if (flushSource !== 'VideoSegmentStream' &&
15067 flushSource !== 'AudioSegmentStream') {
15068 // Return because we haven't received a flush from a data-generating
15069 // portion of the segment (meaning that we have only recieved meta-data
15070 // or captions.)
15071 return;
15072 } else if (this.remuxTracks) {
15073 // Return until we have enough tracks from the pipeline to remux (if we
15074 // are remuxing audio and video into a single MP4)
15075 return;
15076 } else if (this.pendingTracks.length === 0) {
15077 // In the case where we receive a flush without any data having been
15078 // received we consider it an emitted track for the purposes of coalescing
15079 // `done` events.
15080 // We do this for the case where there is an audio and video track in the
15081 // segment but no audio data. (seen in several playlists with alternate
15082 // audio tracks and no audio present in the main TS segments.)
15083 this.emittedTracks++;
15084
15085 if (this.emittedTracks >= this.numberOfTracks) {
15086 this.trigger('done');
15087 this.emittedTracks = 0;
15088 }
15089 return;
15090 }
15091 }
15092
15093 if (this.videoTrack) {
15094 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
15095 VIDEO_PROPERTIES.forEach(function(prop) {
15096 event.info[prop] = this.videoTrack[prop];
15097 }, this);
15098 } else if (this.audioTrack) {
15099 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
15100 AUDIO_PROPERTIES.forEach(function(prop) {
15101 event.info[prop] = this.audioTrack[prop];
15102 }, this);
15103 }
15104
15105 if (this.pendingTracks.length === 1) {
15106 event.type = this.pendingTracks[0].type;
15107 } else {
15108 event.type = 'combined';
15109 }
15110
15111 this.emittedTracks += this.pendingTracks.length;
15112
15113 initSegment = mp4.initSegment(this.pendingTracks);
15114
15115 // Create a new typed array to hold the init segment
15116 event.initSegment = new Uint8Array(initSegment.byteLength);
15117
15118 // Create an init segment containing a moov
15119 // and track definitions
15120 event.initSegment.set(initSegment);
15121
15122 // Create a new typed array to hold the moof+mdats
15123 event.data = new Uint8Array(this.pendingBytes);
15124
15125 // Append each moof+mdat (one per track) together
15126 for (i = 0; i < this.pendingBoxes.length; i++) {
15127 event.data.set(this.pendingBoxes[i], offset);
15128 offset += this.pendingBoxes[i].byteLength;
15129 }
15130
15131 // Translate caption PTS times into second offsets into the
15132 // video timeline for the segment
15133 for (i = 0; i < this.pendingCaptions.length; i++) {
15134 caption = this.pendingCaptions[i];
15135 caption.startTime = (caption.startPts - timelineStartPts);
15136 caption.startTime /= 90e3;
15137 caption.endTime = (caption.endPts - timelineStartPts);
15138 caption.endTime /= 90e3;
15139 event.captions.push(caption);
15140 }
15141
15142 // Translate ID3 frame PTS times into second offsets into the
15143 // video timeline for the segment
15144 for (i = 0; i < this.pendingMetadata.length; i++) {
15145 id3 = this.pendingMetadata[i];
15146 id3.cueTime = (id3.pts - timelineStartPts);
15147 id3.cueTime /= 90e3;
15148 event.metadata.push(id3);
15149 }
15150 // We add this to every single emitted segment even though we only need
15151 // it for the first
15152 event.metadata.dispatchType = this.metadataStream.dispatchType;
15153
15154 // Reset stream state
15155 this.pendingTracks.length = 0;
15156 this.videoTrack = null;
15157 this.pendingBoxes.length = 0;
15158 this.pendingCaptions.length = 0;
15159 this.pendingBytes = 0;
15160 this.pendingMetadata.length = 0;
15161
15162 // Emit the built segment
15163 this.trigger('data', event);
15164
15165 // Only emit `done` if all tracks have been flushed and emitted
15166 if (this.emittedTracks >= this.numberOfTracks) {
15167 this.trigger('done');
15168 this.emittedTracks = 0;
15169 }
15170};
15171/**
15172 * A Stream that expects MP2T binary data as input and produces
15173 * corresponding media segments, suitable for use with Media Source
15174 * Extension (MSE) implementations that support the ISO BMFF byte
15175 * stream format, like Chrome.
15176 */
15177Transmuxer = function(options) {
15178 var
15179 self = this,
15180 hasFlushed = true,
15181 videoTrack,
15182 audioTrack;
15183
15184 Transmuxer.prototype.init.call(this);
15185
15186 options = options || {};
15187 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
15188 this.transmuxPipeline_ = {};
15189
15190 this.setupAacPipeline = function() {
15191 var pipeline = {};
15192 this.transmuxPipeline_ = pipeline;
15193
15194 pipeline.type = 'aac';
15195 pipeline.metadataStream = new m2ts.MetadataStream();
15196
15197 // set up the parsing pipeline
15198 pipeline.aacStream = new AacStream();
15199 pipeline.audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
15200 pipeline.timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
15201 pipeline.adtsStream = new AdtsStream();
15202 pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
15203 pipeline.headOfPipeline = pipeline.aacStream;
15204
15205 pipeline.aacStream
15206 .pipe(pipeline.audioTimestampRolloverStream)
15207 .pipe(pipeline.adtsStream);
15208 pipeline.aacStream
15209 .pipe(pipeline.timedMetadataTimestampRolloverStream)
15210 .pipe(pipeline.metadataStream)
15211 .pipe(pipeline.coalesceStream);
15212
15213 pipeline.metadataStream.on('timestamp', function(frame) {
15214 pipeline.aacStream.setTimestamp(frame.timeStamp);
15215 });
15216
15217 pipeline.aacStream.on('data', function(data) {
15218 if (data.type === 'timed-metadata' && !pipeline.audioSegmentStream) {
15219 audioTrack = audioTrack || {
15220 timelineStartInfo: {
15221 baseMediaDecodeTime: self.baseMediaDecodeTime
15222 },
15223 codec: 'adts',
15224 type: 'audio'
15225 };
15226 // hook up the audio segment stream to the first track with aac data
15227 pipeline.coalesceStream.numberOfTracks++;
15228 pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack);
15229 // Set up the final part of the audio pipeline
15230 pipeline.adtsStream
15231 .pipe(pipeline.audioSegmentStream)
15232 .pipe(pipeline.coalesceStream);
15233 }
15234 });
15235
15236 // Re-emit any data coming from the coalesce stream to the outside world
15237 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
15238 // Let the consumer know we have finished flushing the entire pipeline
15239 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
15240 };
15241
15242 this.setupTsPipeline = function() {
15243 var pipeline = {};
15244 this.transmuxPipeline_ = pipeline;
15245
15246 pipeline.type = 'ts';
15247 pipeline.metadataStream = new m2ts.MetadataStream();
15248
15249 // set up the parsing pipeline
15250 pipeline.packetStream = new m2ts.TransportPacketStream();
15251 pipeline.parseStream = new m2ts.TransportParseStream();
15252 pipeline.elementaryStream = new m2ts.ElementaryStream();
15253 pipeline.videoTimestampRolloverStream = new m2ts.TimestampRolloverStream('video');
15254 pipeline.audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
15255 pipeline.timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
15256 pipeline.adtsStream = new AdtsStream();
15257 pipeline.h264Stream = new H264Stream();
15258 pipeline.captionStream = new m2ts.CaptionStream();
15259 pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
15260 pipeline.headOfPipeline = pipeline.packetStream;
15261
15262 // disassemble MPEG2-TS packets into elementary streams
15263 pipeline.packetStream
15264 .pipe(pipeline.parseStream)
15265 .pipe(pipeline.elementaryStream);
15266
15267 // !!THIS ORDER IS IMPORTANT!!
15268 // demux the streams
15269 pipeline.elementaryStream
15270 .pipe(pipeline.videoTimestampRolloverStream)
15271 .pipe(pipeline.h264Stream);
15272 pipeline.elementaryStream
15273 .pipe(pipeline.audioTimestampRolloverStream)
15274 .pipe(pipeline.adtsStream);
15275
15276 pipeline.elementaryStream
15277 .pipe(pipeline.timedMetadataTimestampRolloverStream)
15278 .pipe(pipeline.metadataStream)
15279 .pipe(pipeline.coalesceStream);
15280
15281 // Hook up CEA-608/708 caption stream
15282 pipeline.h264Stream.pipe(pipeline.captionStream)
15283 .pipe(pipeline.coalesceStream);
15284
15285 pipeline.elementaryStream.on('data', function(data) {
15286 var i;
15287
15288 if (data.type === 'metadata') {
15289 i = data.tracks.length;
15290
15291 // scan the tracks listed in the metadata
15292 while (i--) {
15293 if (!videoTrack && data.tracks[i].type === 'video') {
15294 videoTrack = data.tracks[i];
15295 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
15296 } else if (!audioTrack && data.tracks[i].type === 'audio') {
15297 audioTrack = data.tracks[i];
15298 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
15299 }
15300 }
15301
15302 // hook up the video segment stream to the first track with h264 data
15303 if (videoTrack && !pipeline.videoSegmentStream) {
15304 pipeline.coalesceStream.numberOfTracks++;
15305 pipeline.videoSegmentStream = new VideoSegmentStream(videoTrack);
15306
15307 pipeline.videoSegmentStream.on('timelineStartInfo', function(timelineStartInfo) {
15308 // When video emits timelineStartInfo data after a flush, we forward that
15309 // info to the AudioSegmentStream, if it exists, because video timeline
15310 // data takes precedence.
15311 if (audioTrack) {
15312 audioTrack.timelineStartInfo = timelineStartInfo;
15313 // On the first segment we trim AAC frames that exist before the
15314 // very earliest DTS we have seen in video because Chrome will
15315 // interpret any video track with a baseMediaDecodeTime that is
15316 // non-zero as a gap.
15317 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts);
15318 }
15319 });
15320
15321 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function(baseMediaDecodeTime) {
15322 if (audioTrack) {
15323 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
15324 }
15325 });
15326
15327 // Set up the final part of the video pipeline
15328 pipeline.h264Stream
15329 .pipe(pipeline.videoSegmentStream)
15330 .pipe(pipeline.coalesceStream);
15331 }
15332
15333 if (audioTrack && !pipeline.audioSegmentStream) {
15334 // hook up the audio segment stream to the first track with aac data
15335 pipeline.coalesceStream.numberOfTracks++;
15336 pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack);
15337
15338 // Set up the final part of the audio pipeline
15339 pipeline.adtsStream
15340 .pipe(pipeline.audioSegmentStream)
15341 .pipe(pipeline.coalesceStream);
15342 }
15343 }
15344 });
15345
15346 // Re-emit any data coming from the coalesce stream to the outside world
15347 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
15348 // Let the consumer know we have finished flushing the entire pipeline
15349 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
15350 };
15351
15352 // hook up the segment streams once track metadata is delivered
15353 this.setBaseMediaDecodeTime = function(baseMediaDecodeTime) {
15354 var pipeline = this.transmuxPipeline_;
15355
15356 this.baseMediaDecodeTime = baseMediaDecodeTime;
15357 if (audioTrack) {
15358 audioTrack.timelineStartInfo.dts = undefined;
15359 audioTrack.timelineStartInfo.pts = undefined;
15360 clearDtsInfo(audioTrack);
15361 audioTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
15362 if (pipeline.audioTimestampRolloverStream) {
15363 pipeline.audioTimestampRolloverStream.discontinuity();
15364 }
15365 }
15366 if (videoTrack) {
15367 if (pipeline.videoSegmentStream) {
15368 pipeline.videoSegmentStream.gopCache_ = [];
15369 pipeline.videoTimestampRolloverStream.discontinuity();
15370 }
15371 videoTrack.timelineStartInfo.dts = undefined;
15372 videoTrack.timelineStartInfo.pts = undefined;
15373 clearDtsInfo(videoTrack);
15374 videoTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
15375 }
15376
15377 if (pipeline.timedMetadataTimestampRolloverStream) {
15378 pipeline.timedMetadataTimestampRolloverStream.discontinuity();
15379 }
15380 };
15381
15382 this.setAudioAppendStart = function(timestamp) {
15383 if (audioTrack) {
15384 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
15385 }
15386 };
15387
15388 // feed incoming data to the front of the parsing pipeline
15389 this.push = function(data) {
15390 if (hasFlushed) {
15391 var isAac = isLikelyAacData(data);
15392
15393 if (isAac && this.transmuxPipeline_.type !== 'aac') {
15394 this.setupAacPipeline();
15395 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
15396 this.setupTsPipeline();
15397 }
15398 hasFlushed = false;
15399 }
15400 this.transmuxPipeline_.headOfPipeline.push(data);
15401 };
15402
15403 // flush any buffered data
15404 this.flush = function() {
15405 hasFlushed = true;
15406 // Start at the top of the pipeline and flush all pending work
15407 this.transmuxPipeline_.headOfPipeline.flush();
15408 };
15409};
15410Transmuxer.prototype = new Stream();
15411
15412module.exports = {
15413 Transmuxer: Transmuxer,
15414 VideoSegmentStream: VideoSegmentStream,
15415 AudioSegmentStream: AudioSegmentStream,
15416 AUDIO_PROPERTIES: AUDIO_PROPERTIES,
15417 VIDEO_PROPERTIES: VIDEO_PROPERTIES
15418};
15419
15420},{"../aac":36,"../codecs/adts.js":38,"../codecs/h264":39,"../data/silence":40,"../m2ts/m2ts.js":48,"../utils/clock":58,"../utils/stream.js":60,"./mp4-generator.js":54}],57:[function(require,module,exports){
15421/**
15422 * mux.js
15423 *
15424 * Copyright (c) 2016 Brightcove
15425 * All rights reserved.
15426 *
15427 * Parse mpeg2 transport stream packets to extract basic timing information
15428 */
15429'use strict';
15430
15431var StreamTypes = require('../m2ts/stream-types.js');
15432var handleRollover = require('../m2ts/timestamp-rollover-stream.js').handleRollover;
15433var probe = {};
15434probe.ts = require('../m2ts/probe.js');
15435probe.aac = require('../aac/probe.js');
15436
15437
15438var
15439 PES_TIMESCALE = 90000,
15440 MP2T_PACKET_LENGTH = 188, // bytes
15441 SYNC_BYTE = 0x47;
15442
15443var isLikelyAacData = function(data) {
15444 if ((data[0] === 'I'.charCodeAt(0)) &&
15445 (data[1] === 'D'.charCodeAt(0)) &&
15446 (data[2] === '3'.charCodeAt(0))) {
15447 return true;
15448 }
15449 return false;
15450};
15451
15452/**
15453 * walks through segment data looking for pat and pmt packets to parse out
15454 * program map table information
15455 */
15456var parsePsi_ = function(bytes, pmt) {
15457 var
15458 startIndex = 0,
15459 endIndex = MP2T_PACKET_LENGTH,
15460 packet, type;
15461
15462 while (endIndex < bytes.byteLength) {
15463 // Look for a pair of start and end sync bytes in the data..
15464 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
15465 // We found a packet
15466 packet = bytes.subarray(startIndex, endIndex);
15467 type = probe.ts.parseType(packet, pmt.pid);
15468
15469 switch (type) {
15470 case 'pat':
15471 if (!pmt.pid) {
15472 pmt.pid = probe.ts.parsePat(packet);
15473 }
15474 break;
15475 case 'pmt':
15476 if (!pmt.table) {
15477 pmt.table = probe.ts.parsePmt(packet);
15478 }
15479 break;
15480 default:
15481 break;
15482 }
15483
15484 // Found the pat and pmt, we can stop walking the segment
15485 if (pmt.pid && pmt.table) {
15486 return;
15487 }
15488
15489 startIndex += MP2T_PACKET_LENGTH;
15490 endIndex += MP2T_PACKET_LENGTH;
15491 continue;
15492 }
15493
15494 // If we get here, we have somehow become de-synchronized and we need to step
15495 // forward one byte at a time until we find a pair of sync bytes that denote
15496 // a packet
15497 startIndex++;
15498 endIndex++;
15499 }
15500};
15501
15502/**
15503 * walks through the segment data from the start and end to get timing information
15504 * for the first and last audio pes packets
15505 */
15506var parseAudioPes_ = function(bytes, pmt, result) {
15507 var
15508 startIndex = 0,
15509 endIndex = MP2T_PACKET_LENGTH,
15510 packet, type, pesType, pusi, parsed;
15511
15512 var endLoop = false;
15513
15514 // Start walking from start of segment to get first audio packet
15515 while (endIndex < bytes.byteLength) {
15516 // Look for a pair of start and end sync bytes in the data..
15517 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
15518 // We found a packet
15519 packet = bytes.subarray(startIndex, endIndex);
15520 type = probe.ts.parseType(packet, pmt.pid);
15521
15522 switch (type) {
15523 case 'pes':
15524 pesType = probe.ts.parsePesType(packet, pmt.table);
15525 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
15526 if (pesType === 'audio' && pusi) {
15527 parsed = probe.ts.parsePesTime(packet);
15528 if (parsed) {
15529 parsed.type = 'audio';
15530 result.audio.push(parsed);
15531 endLoop = true;
15532 }
15533 }
15534 break;
15535 default:
15536 break;
15537 }
15538
15539 if (endLoop) {
15540 break;
15541 }
15542
15543 startIndex += MP2T_PACKET_LENGTH;
15544 endIndex += MP2T_PACKET_LENGTH;
15545 continue;
15546 }
15547
15548 // If we get here, we have somehow become de-synchronized and we need to step
15549 // forward one byte at a time until we find a pair of sync bytes that denote
15550 // a packet
15551 startIndex++;
15552 endIndex++;
15553 }
15554
15555 // Start walking from end of segment to get last audio packet
15556 endIndex = bytes.byteLength;
15557 startIndex = endIndex - MP2T_PACKET_LENGTH;
15558 endLoop = false;
15559 while (startIndex >= 0) {
15560 // Look for a pair of start and end sync bytes in the data..
15561 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
15562 // We found a packet
15563 packet = bytes.subarray(startIndex, endIndex);
15564 type = probe.ts.parseType(packet, pmt.pid);
15565
15566 switch (type) {
15567 case 'pes':
15568 pesType = probe.ts.parsePesType(packet, pmt.table);
15569 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
15570 if (pesType === 'audio' && pusi) {
15571 parsed = probe.ts.parsePesTime(packet);
15572 if (parsed) {
15573 parsed.type = 'audio';
15574 result.audio.push(parsed);
15575 endLoop = true;
15576 }
15577 }
15578 break;
15579 default:
15580 break;
15581 }
15582
15583 if (endLoop) {
15584 break;
15585 }
15586
15587 startIndex -= MP2T_PACKET_LENGTH;
15588 endIndex -= MP2T_PACKET_LENGTH;
15589 continue;
15590 }
15591
15592 // If we get here, we have somehow become de-synchronized and we need to step
15593 // forward one byte at a time until we find a pair of sync bytes that denote
15594 // a packet
15595 startIndex--;
15596 endIndex--;
15597 }
15598};
15599
15600/**
15601 * walks through the segment data from the start and end to get timing information
15602 * for the first and last video pes packets as well as timing information for the first
15603 * key frame.
15604 */
15605var parseVideoPes_ = function(bytes, pmt, result) {
15606 var
15607 startIndex = 0,
15608 endIndex = MP2T_PACKET_LENGTH,
15609 packet, type, pesType, pusi, parsed, frame, i, pes;
15610
15611 var endLoop = false;
15612
15613 var currentFrame = {
15614 data: [],
15615 size: 0
15616 };
15617
15618 // Start walking from start of segment to get first video packet
15619 while (endIndex < bytes.byteLength) {
15620 // Look for a pair of start and end sync bytes in the data..
15621 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
15622 // We found a packet
15623 packet = bytes.subarray(startIndex, endIndex);
15624 type = probe.ts.parseType(packet, pmt.pid);
15625
15626 switch (type) {
15627 case 'pes':
15628 pesType = probe.ts.parsePesType(packet, pmt.table);
15629 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
15630 if (pesType === 'video') {
15631 if (pusi && !endLoop) {
15632 parsed = probe.ts.parsePesTime(packet);
15633 if (parsed) {
15634 parsed.type = 'video';
15635 result.video.push(parsed);
15636 endLoop = true;
15637 }
15638 }
15639 if (!result.firstKeyFrame) {
15640 if (pusi) {
15641 if (currentFrame.size !== 0) {
15642 frame = new Uint8Array(currentFrame.size);
15643 i = 0;
15644 while (currentFrame.data.length) {
15645 pes = currentFrame.data.shift();
15646 frame.set(pes, i);
15647 i += pes.byteLength;
15648 }
15649 if (probe.ts.videoPacketContainsKeyFrame(frame)) {
15650 result.firstKeyFrame = probe.ts.parsePesTime(frame);
15651 result.firstKeyFrame.type = 'video';
15652 }
15653 currentFrame.size = 0;
15654 }
15655 }
15656 currentFrame.data.push(packet);
15657 currentFrame.size += packet.byteLength;
15658 }
15659 }
15660 break;
15661 default:
15662 break;
15663 }
15664
15665 if (endLoop && result.firstKeyFrame) {
15666 break;
15667 }
15668
15669 startIndex += MP2T_PACKET_LENGTH;
15670 endIndex += MP2T_PACKET_LENGTH;
15671 continue;
15672 }
15673
15674 // If we get here, we have somehow become de-synchronized and we need to step
15675 // forward one byte at a time until we find a pair of sync bytes that denote
15676 // a packet
15677 startIndex++;
15678 endIndex++;
15679 }
15680
15681 // Start walking from end of segment to get last video packet
15682 endIndex = bytes.byteLength;
15683 startIndex = endIndex - MP2T_PACKET_LENGTH;
15684 endLoop = false;
15685 while (startIndex >= 0) {
15686 // Look for a pair of start and end sync bytes in the data..
15687 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
15688 // We found a packet
15689 packet = bytes.subarray(startIndex, endIndex);
15690 type = probe.ts.parseType(packet, pmt.pid);
15691
15692 switch (type) {
15693 case 'pes':
15694 pesType = probe.ts.parsePesType(packet, pmt.table);
15695 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
15696 if (pesType === 'video' && pusi) {
15697 parsed = probe.ts.parsePesTime(packet);
15698 if (parsed) {
15699 parsed.type = 'video';
15700 result.video.push(parsed);
15701 endLoop = true;
15702 }
15703 }
15704 break;
15705 default:
15706 break;
15707 }
15708
15709 if (endLoop) {
15710 break;
15711 }
15712
15713 startIndex -= MP2T_PACKET_LENGTH;
15714 endIndex -= MP2T_PACKET_LENGTH;
15715 continue;
15716 }
15717
15718 // If we get here, we have somehow become de-synchronized and we need to step
15719 // forward one byte at a time until we find a pair of sync bytes that denote
15720 // a packet
15721 startIndex--;
15722 endIndex--;
15723 }
15724};
15725
15726/**
15727 * Adjusts the timestamp information for the segment to account for
15728 * rollover and convert to seconds based on pes packet timescale (90khz clock)
15729 */
15730var adjustTimestamp_ = function(segmentInfo, baseTimestamp) {
15731 if (segmentInfo.audio && segmentInfo.audio.length) {
15732 var audioBaseTimestamp = baseTimestamp;
15733 if (typeof audioBaseTimestamp === 'undefined') {
15734 audioBaseTimestamp = segmentInfo.audio[0].dts;
15735 }
15736 segmentInfo.audio.forEach(function(info) {
15737 info.dts = handleRollover(info.dts, audioBaseTimestamp);
15738 info.pts = handleRollover(info.pts, audioBaseTimestamp);
15739 // time in seconds
15740 info.dtsTime = info.dts / PES_TIMESCALE;
15741 info.ptsTime = info.pts / PES_TIMESCALE;
15742 });
15743 }
15744
15745 if (segmentInfo.video && segmentInfo.video.length) {
15746 var videoBaseTimestamp = baseTimestamp;
15747 if (typeof videoBaseTimestamp === 'undefined') {
15748 videoBaseTimestamp = segmentInfo.video[0].dts;
15749 }
15750 segmentInfo.video.forEach(function(info) {
15751 info.dts = handleRollover(info.dts, videoBaseTimestamp);
15752 info.pts = handleRollover(info.pts, videoBaseTimestamp);
15753 // time in seconds
15754 info.dtsTime = info.dts / PES_TIMESCALE;
15755 info.ptsTime = info.pts / PES_TIMESCALE;
15756 });
15757 if (segmentInfo.firstKeyFrame) {
15758 var frame = segmentInfo.firstKeyFrame;
15759 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
15760 frame.pts = handleRollover(frame.pts, videoBaseTimestamp);
15761 // time in seconds
15762 frame.dtsTime = frame.dts / PES_TIMESCALE;
15763 frame.ptsTime = frame.dts / PES_TIMESCALE;
15764 }
15765 }
15766};
15767
15768/**
15769 * inspects the aac data stream for start and end time information
15770 */
15771var inspectAac_ = function(bytes) {
15772 var
15773 endLoop = false,
15774 audioCount = 0,
15775 sampleRate = null,
15776 timestamp = null,
15777 frameSize = 0,
15778 byteIndex = 0,
15779 packet;
15780
15781 while (bytes.length - byteIndex >= 3) {
15782 var type = probe.aac.parseType(bytes, byteIndex);
15783 switch (type) {
15784 case 'timed-metadata':
15785 // Exit early because we don't have enough to parse
15786 // the ID3 tag header
15787 if (bytes.length - byteIndex < 10) {
15788 endLoop = true;
15789 break;
15790 }
15791
15792 frameSize = probe.aac.parseId3TagSize(bytes, byteIndex);
15793
15794 // Exit early if we don't have enough in the buffer
15795 // to emit a full packet
15796 if (frameSize > bytes.length) {
15797 endLoop = true;
15798 break;
15799 }
15800 if (timestamp === null) {
15801 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
15802 timestamp = probe.aac.parseAacTimestamp(packet);
15803 }
15804 byteIndex += frameSize;
15805 break;
15806 case 'audio':
15807 // Exit early because we don't have enough to parse
15808 // the ADTS frame header
15809 if (bytes.length - byteIndex < 7) {
15810 endLoop = true;
15811 break;
15812 }
15813
15814 frameSize = probe.aac.parseAdtsSize(bytes, byteIndex);
15815
15816 // Exit early if we don't have enough in the buffer
15817 // to emit a full packet
15818 if (frameSize > bytes.length) {
15819 endLoop = true;
15820 break;
15821 }
15822 if (sampleRate === null) {
15823 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
15824 sampleRate = probe.aac.parseSampleRate(packet);
15825 }
15826 audioCount++;
15827 byteIndex += frameSize;
15828 break;
15829 default:
15830 byteIndex++;
15831 break;
15832 }
15833 if (endLoop) {
15834 return null;
15835 }
15836 }
15837 if (sampleRate === null || timestamp === null) {
15838 return null;
15839 }
15840
15841 var audioTimescale = PES_TIMESCALE / sampleRate;
15842
15843 var result = {
15844 audio: [
15845 {
15846 type: 'audio',
15847 dts: timestamp,
15848 pts: timestamp
15849 },
15850 {
15851 type: 'audio',
15852 dts: timestamp + (audioCount * 1024 * audioTimescale),
15853 pts: timestamp + (audioCount * 1024 * audioTimescale)
15854 }
15855 ]
15856 };
15857
15858 return result;
15859};
15860
15861/**
15862 * inspects the transport stream segment data for start and end time information
15863 * of the audio and video tracks (when present) as well as the first key frame's
15864 * start time.
15865 */
15866var inspectTs_ = function(bytes) {
15867 var pmt = {
15868 pid: null,
15869 table: null
15870 };
15871
15872 var result = {};
15873
15874 parsePsi_(bytes, pmt);
15875
15876 for (var pid in pmt.table) {
15877 if (pmt.table.hasOwnProperty(pid)) {
15878 var type = pmt.table[pid];
15879 switch (type) {
15880 case StreamTypes.H264_STREAM_TYPE:
15881 result.video = [];
15882 parseVideoPes_(bytes, pmt, result);
15883 if (result.video.length === 0) {
15884 delete result.video;
15885 }
15886 break;
15887 case StreamTypes.ADTS_STREAM_TYPE:
15888 result.audio = [];
15889 parseAudioPes_(bytes, pmt, result);
15890 if (result.audio.length === 0) {
15891 delete result.audio;
15892 }
15893 break;
15894 default:
15895 break;
15896 }
15897 }
15898 }
15899 return result;
15900};
15901
15902/**
15903 * Inspects segment byte data and returns an object with start and end timing information
15904 *
15905 * @param {Uint8Array} bytes The segment byte data
15906 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
15907 * timestamps for rollover. This value must be in 90khz clock.
15908 * @return {Object} Object containing start and end frame timing info of segment.
15909 */
15910var inspect = function(bytes, baseTimestamp) {
15911 var isAacData = isLikelyAacData(bytes);
15912
15913 var result;
15914
15915 if (isAacData) {
15916 result = inspectAac_(bytes);
15917 } else {
15918 result = inspectTs_(bytes);
15919 }
15920
15921 if (!result || (!result.audio && !result.video)) {
15922 return null;
15923 }
15924
15925 adjustTimestamp_(result, baseTimestamp);
15926
15927 return result;
15928};
15929
15930module.exports = {
15931 inspect: inspect
15932};
15933
15934},{"../aac/probe.js":37,"../m2ts/probe.js":50,"../m2ts/stream-types.js":51,"../m2ts/timestamp-rollover-stream.js":52}],58:[function(require,module,exports){
15935var
15936 ONE_SECOND_IN_TS = 90000, // 90kHz clock
15937 secondsToVideoTs,
15938 secondsToAudioTs,
15939 videoTsToSeconds,
15940 audioTsToSeconds,
15941 audioTsToVideoTs,
15942 videoTsToAudioTs;
15943
15944secondsToVideoTs = function(seconds) {
15945 return seconds * ONE_SECOND_IN_TS;
15946};
15947
15948secondsToAudioTs = function(seconds, sampleRate) {
15949 return seconds * sampleRate;
15950};
15951
15952videoTsToSeconds = function(timestamp) {
15953 return timestamp / ONE_SECOND_IN_TS;
15954};
15955
15956audioTsToSeconds = function(timestamp, sampleRate) {
15957 return timestamp / sampleRate;
15958};
15959
15960audioTsToVideoTs = function(timestamp, sampleRate) {
15961 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
15962};
15963
15964videoTsToAudioTs = function(timestamp, sampleRate) {
15965 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
15966};
15967
15968module.exports = {
15969 secondsToVideoTs: secondsToVideoTs,
15970 secondsToAudioTs: secondsToAudioTs,
15971 videoTsToSeconds: videoTsToSeconds,
15972 audioTsToSeconds: audioTsToSeconds,
15973 audioTsToVideoTs: audioTsToVideoTs,
15974 videoTsToAudioTs: videoTsToAudioTs
15975};
15976
15977},{}],59:[function(require,module,exports){
15978'use strict';
15979
15980var ExpGolomb;
15981
15982/**
15983 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
15984 * scheme used by h264.
15985 */
15986ExpGolomb = function(workingData) {
15987 var
15988 // the number of bytes left to examine in workingData
15989 workingBytesAvailable = workingData.byteLength,
15990
15991 // the current word being examined
15992 workingWord = 0, // :uint
15993
15994 // the number of bits left to examine in the current word
15995 workingBitsAvailable = 0; // :uint;
15996
15997 // ():uint
15998 this.length = function() {
15999 return (8 * workingBytesAvailable);
16000 };
16001
16002 // ():uint
16003 this.bitsAvailable = function() {
16004 return (8 * workingBytesAvailable) + workingBitsAvailable;
16005 };
16006
16007 // ():void
16008 this.loadWord = function() {
16009 var
16010 position = workingData.byteLength - workingBytesAvailable,
16011 workingBytes = new Uint8Array(4),
16012 availableBytes = Math.min(4, workingBytesAvailable);
16013
16014 if (availableBytes === 0) {
16015 throw new Error('no bytes available');
16016 }
16017
16018 workingBytes.set(workingData.subarray(position,
16019 position + availableBytes));
16020 workingWord = new DataView(workingBytes.buffer).getUint32(0);
16021
16022 // track the amount of workingData that has been processed
16023 workingBitsAvailable = availableBytes * 8;
16024 workingBytesAvailable -= availableBytes;
16025 };
16026
16027 // (count:int):void
16028 this.skipBits = function(count) {
16029 var skipBytes; // :int
16030 if (workingBitsAvailable > count) {
16031 workingWord <<= count;
16032 workingBitsAvailable -= count;
16033 } else {
16034 count -= workingBitsAvailable;
16035 skipBytes = Math.floor(count / 8);
16036
16037 count -= (skipBytes * 8);
16038 workingBytesAvailable -= skipBytes;
16039
16040 this.loadWord();
16041
16042 workingWord <<= count;
16043 workingBitsAvailable -= count;
16044 }
16045 };
16046
16047 // (size:int):uint
16048 this.readBits = function(size) {
16049 var
16050 bits = Math.min(workingBitsAvailable, size), // :uint
16051 valu = workingWord >>> (32 - bits); // :uint
16052 // if size > 31, handle error
16053 workingBitsAvailable -= bits;
16054 if (workingBitsAvailable > 0) {
16055 workingWord <<= bits;
16056 } else if (workingBytesAvailable > 0) {
16057 this.loadWord();
16058 }
16059
16060 bits = size - bits;
16061 if (bits > 0) {
16062 return valu << bits | this.readBits(bits);
16063 }
16064 return valu;
16065 };
16066
16067 // ():uint
16068 this.skipLeadingZeros = function() {
16069 var leadingZeroCount; // :uint
16070 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
16071 if ((workingWord & (0x80000000 >>> leadingZeroCount)) !== 0) {
16072 // the first bit of working word is 1
16073 workingWord <<= leadingZeroCount;
16074 workingBitsAvailable -= leadingZeroCount;
16075 return leadingZeroCount;
16076 }
16077 }
16078
16079 // we exhausted workingWord and still have not found a 1
16080 this.loadWord();
16081 return leadingZeroCount + this.skipLeadingZeros();
16082 };
16083
16084 // ():void
16085 this.skipUnsignedExpGolomb = function() {
16086 this.skipBits(1 + this.skipLeadingZeros());
16087 };
16088
16089 // ():void
16090 this.skipExpGolomb = function() {
16091 this.skipBits(1 + this.skipLeadingZeros());
16092 };
16093
16094 // ():uint
16095 this.readUnsignedExpGolomb = function() {
16096 var clz = this.skipLeadingZeros(); // :uint
16097 return this.readBits(clz + 1) - 1;
16098 };
16099
16100 // ():int
16101 this.readExpGolomb = function() {
16102 var valu = this.readUnsignedExpGolomb(); // :int
16103 if (0x01 & valu) {
16104 // the number is odd if the low order bit is set
16105 return (1 + valu) >>> 1; // add 1 to make it even, and divide by 2
16106 }
16107 return -1 * (valu >>> 1); // divide by two then make it negative
16108 };
16109
16110 // Some convenience functions
16111 // :Boolean
16112 this.readBoolean = function() {
16113 return this.readBits(1) === 1;
16114 };
16115
16116 // ():int
16117 this.readUnsignedByte = function() {
16118 return this.readBits(8);
16119 };
16120
16121 this.loadWord();
16122};
16123
16124module.exports = ExpGolomb;
16125
16126},{}],60:[function(require,module,exports){
16127/**
16128 * mux.js
16129 *
16130 * Copyright (c) 2014 Brightcove
16131 * All rights reserved.
16132 *
16133 * A lightweight readable stream implemention that handles event dispatching.
16134 * Objects that inherit from streams should call init in their constructors.
16135 */
16136'use strict';
16137
16138var Stream = function() {
16139 this.init = function() {
16140 var listeners = {};
16141 /**
16142 * Add a listener for a specified event type.
16143 * @param type {string} the event name
16144 * @param listener {function} the callback to be invoked when an event of
16145 * the specified type occurs
16146 */
16147 this.on = function(type, listener) {
16148 if (!listeners[type]) {
16149 listeners[type] = [];
16150 }
16151 listeners[type] = listeners[type].concat(listener);
16152 };
16153 /**
16154 * Remove a listener for a specified event type.
16155 * @param type {string} the event name
16156 * @param listener {function} a function previously registered for this
16157 * type of event through `on`
16158 */
16159 this.off = function(type, listener) {
16160 var index;
16161 if (!listeners[type]) {
16162 return false;
16163 }
16164 index = listeners[type].indexOf(listener);
16165 listeners[type] = listeners[type].slice();
16166 listeners[type].splice(index, 1);
16167 return index > -1;
16168 };
16169 /**
16170 * Trigger an event of the specified type on this stream. Any additional
16171 * arguments to this function are passed as parameters to event listeners.
16172 * @param type {string} the event name
16173 */
16174 this.trigger = function(type) {
16175 var callbacks, i, length, args;
16176 callbacks = listeners[type];
16177 if (!callbacks) {
16178 return;
16179 }
16180 // Slicing the arguments on every invocation of this method
16181 // can add a significant amount of overhead. Avoid the
16182 // intermediate object creation for the common case of a
16183 // single callback argument
16184 if (arguments.length === 2) {
16185 length = callbacks.length;
16186 for (i = 0; i < length; ++i) {
16187 callbacks[i].call(this, arguments[1]);
16188 }
16189 } else {
16190 args = [];
16191 i = arguments.length;
16192 for (i = 1; i < arguments.length; ++i) {
16193 args.push(arguments[i]);
16194 }
16195 length = callbacks.length;
16196 for (i = 0; i < length; ++i) {
16197 callbacks[i].apply(this, args);
16198 }
16199 }
16200 };
16201 /**
16202 * Destroys the stream and cleans up.
16203 */
16204 this.dispose = function() {
16205 listeners = {};
16206 };
16207 };
16208};
16209
16210/**
16211 * Forwards all `data` events on this stream to the destination stream. The
16212 * destination stream should provide a method `push` to receive the data
16213 * events as they arrive.
16214 * @param destination {stream} the stream that will receive all `data` events
16215 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
16216 * when the current stream emits a 'done' event
16217 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
16218 */
16219Stream.prototype.pipe = function(destination) {
16220 this.on('data', function(data) {
16221 destination.push(data);
16222 });
16223
16224 this.on('done', function(flushSource) {
16225 destination.flush(flushSource);
16226 });
16227
16228 return destination;
16229};
16230
16231// Default stream functions that are expected to be overridden to perform
16232// actual work. These are provided by the prototype as a sort of no-op
16233// implementation so that we don't have to check for their existence in the
16234// `pipe` function above.
16235Stream.prototype.push = function(data) {
16236 this.trigger('data', data);
16237};
16238
16239Stream.prototype.flush = function(flushSource) {
16240 this.trigger('done', flushSource);
16241};
16242
16243module.exports = Stream;
16244
16245},{}],61:[function(require,module,exports){
16246/* jshint ignore:start */
16247(function(root) {
16248/* jshint ignore:end */
16249 var URLToolkit = {
16250 // build an absolute URL from a relative one using the provided baseURL
16251 // if relativeURL is an absolute URL it will be returned as is.
16252 buildAbsoluteURL: function(baseURL, relativeURL) {
16253 // remove any remaining space and CRLF
16254 relativeURL = relativeURL.trim();
16255 if (/^[a-z]+:/i.test(relativeURL)) {
16256 // complete url, not relative
16257 return relativeURL;
16258 }
16259
16260 var relativeURLQuery = null;
16261 var relativeURLHash = null;
16262
16263 var relativeURLHashSplit = /^([^#]*)(.*)$/.exec(relativeURL);
16264 if (relativeURLHashSplit) {
16265 relativeURLHash = relativeURLHashSplit[2];
16266 relativeURL = relativeURLHashSplit[1];
16267 }
16268 var relativeURLQuerySplit = /^([^\?]*)(.*)$/.exec(relativeURL);
16269 if (relativeURLQuerySplit) {
16270 relativeURLQuery = relativeURLQuerySplit[2];
16271 relativeURL = relativeURLQuerySplit[1];
16272 }
16273
16274 var baseURLHashSplit = /^([^#]*)(.*)$/.exec(baseURL);
16275 if (baseURLHashSplit) {
16276 baseURL = baseURLHashSplit[1];
16277 }
16278 var baseURLQuerySplit = /^([^\?]*)(.*)$/.exec(baseURL);
16279 if (baseURLQuerySplit) {
16280 baseURL = baseURLQuerySplit[1];
16281 }
16282
16283 var baseURLDomainSplit = /^(([a-z]+:)?\/\/[^:\/]+(:[0-9]+)?)?(\/?.*)$/i.exec(baseURL);
16284 if (!baseURLDomainSplit) {
16285 throw new Error('Error trying to parse base URL.');
16286 }
16287
16288 // e.g. 'http:', 'https:', ''
16289 var baseURLProtocol = baseURLDomainSplit[2] || '';
16290 // e.g. 'http://example.com', '//example.com', ''
16291 var baseURLProtocolDomain = baseURLDomainSplit[1] || '';
16292 // e.g. '/a/b/c/playlist.m3u8', 'a/b/c/playlist.m3u8'
16293 var baseURLPath = baseURLDomainSplit[4];
16294 if (baseURLPath.indexOf('/') !== 0 && baseURLProtocolDomain !== '') {
16295 // this handles a base url of http://example.com (missing last slash)
16296 baseURLPath = '/'+baseURLPath;
16297 }
16298
16299 var builtURL = null;
16300 if (/^\/\//.test(relativeURL)) {
16301 // relative url starts wth '//' so copy protocol (which may be '' if baseUrl didn't provide one)
16302 builtURL = baseURLProtocol+'//'+URLToolkit.buildAbsolutePath('', relativeURL.substring(2));
16303 }
16304 else if (/^\//.test(relativeURL)) {
16305 // relative url starts with '/' so start from root of domain
16306 builtURL = baseURLProtocolDomain+'/'+URLToolkit.buildAbsolutePath('', relativeURL.substring(1));
16307 }
16308 else {
16309 builtURL = URLToolkit.buildAbsolutePath(baseURLProtocolDomain+baseURLPath, relativeURL);
16310 }
16311
16312 // put the query and hash parts back
16313 if (relativeURLQuery) {
16314 builtURL += relativeURLQuery;
16315 }
16316 if (relativeURLHash) {
16317 builtURL += relativeURLHash;
16318 }
16319 return builtURL;
16320 },
16321
16322 // build an absolute path using the provided basePath
16323 // adapted from https://developer.mozilla.org/en-US/docs/Web/API/document/cookie#Using_relative_URLs_in_the_path_parameter
16324 // this does not handle the case where relativePath is "/" or "//". These cases should be handled outside this.
16325 buildAbsolutePath: function(basePath, relativePath) {
16326 var sRelPath = relativePath;
16327 var nUpLn, sDir = '', sPath = basePath.replace(/[^\/]*$/, sRelPath.replace(/(\/|^)(?:\.?\/+)+/g, '$1'));
16328 for (var nEnd, nStart = 0; nEnd = sPath.indexOf('/../', nStart), nEnd > -1; nStart = nEnd + nUpLn) {
16329 nUpLn = /^\/(?:\.\.\/)*/.exec(sPath.slice(nEnd))[0].length;
16330 sDir = (sDir + sPath.substring(nStart, nEnd)).replace(new RegExp('(?:\\\/+[^\\\/]*){0,' + ((nUpLn - 1) / 3) + '}$'), '/');
16331 }
16332 return sDir + sPath.substr(nStart);
16333 }
16334 };
16335
16336/* jshint ignore:start */
16337 if(typeof exports === 'object' && typeof module === 'object')
16338 module.exports = URLToolkit;
16339 else if(typeof define === 'function' && define.amd)
16340 define([], function() { return URLToolkit; });
16341 else if(typeof exports === 'object')
16342 exports["URLToolkit"] = URLToolkit;
16343 else
16344 root["URLToolkit"] = URLToolkit;
16345})(this);
16346/* jshint ignore:end */
16347
16348},{}],62:[function(require,module,exports){
16349(function (global){
16350/**
16351 * @file add-text-track-data.js
16352 */
16353'use strict';
16354
16355Object.defineProperty(exports, '__esModule', {
16356 value: true
16357});
16358
16359function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
16360
16361var _globalWindow = require('global/window');
16362
16363var _globalWindow2 = _interopRequireDefault(_globalWindow);
16364
16365var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
16366
16367var _videoJs2 = _interopRequireDefault(_videoJs);
16368
16369/**
16370 * Define properties on a cue for backwards compatability,
16371 * but warn the user that the way that they are using it
16372 * is depricated and will be removed at a later date.
16373 *
16374 * @param {Cue} cue the cue to add the properties on
16375 * @private
16376 */
16377var deprecateOldCue = function deprecateOldCue(cue) {
16378 Object.defineProperties(cue.frame, {
16379 id: {
16380 get: function get() {
16381 _videoJs2['default'].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
16382 return cue.value.key;
16383 }
16384 },
16385 value: {
16386 get: function get() {
16387 _videoJs2['default'].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
16388 return cue.value.data;
16389 }
16390 },
16391 privateData: {
16392 get: function get() {
16393 _videoJs2['default'].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
16394 return cue.value.data;
16395 }
16396 }
16397 });
16398};
16399
16400var durationOfVideo = function durationOfVideo(duration) {
16401 var dur = undefined;
16402
16403 if (isNaN(duration) || Math.abs(duration) === Infinity) {
16404 dur = Number.MAX_VALUE;
16405 } else {
16406 dur = duration;
16407 }
16408 return dur;
16409};
16410/**
16411 * Add text track data to a source handler given the captions and
16412 * metadata from the buffer.
16413 *
16414 * @param {Object} sourceHandler the flash or virtual source buffer
16415 * @param {Array} captionArray an array of caption data
16416 * @param {Array} metadataArray an array of meta data
16417 * @private
16418 */
16419var addTextTrackData = function addTextTrackData(sourceHandler, captionArray, metadataArray) {
16420 var Cue = _globalWindow2['default'].WebKitDataCue || _globalWindow2['default'].VTTCue;
16421
16422 if (captionArray) {
16423 captionArray.forEach(function (caption) {
16424 this.inbandTextTrack_.addCue(new Cue(caption.startTime + this.timestampOffset, caption.endTime + this.timestampOffset, caption.text));
16425 }, sourceHandler);
16426 }
16427
16428 if (metadataArray) {
16429 (function () {
16430 var videoDuration = durationOfVideo(sourceHandler.mediaSource_.duration);
16431
16432 metadataArray.forEach(function (metadata) {
16433 var time = metadata.cueTime + this.timestampOffset;
16434
16435 metadata.frames.forEach(function (frame) {
16436 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
16437
16438 cue.frame = frame;
16439 cue.value = frame;
16440 deprecateOldCue(cue);
16441
16442 this.metadataTrack_.addCue(cue);
16443 }, this);
16444 }, sourceHandler);
16445
16446 // Updating the metadeta cues so that
16447 // the endTime of each cue is the startTime of the next cue
16448 // the endTime of last cue is the duration of the video
16449 if (sourceHandler.metadataTrack_ && sourceHandler.metadataTrack_.cues && sourceHandler.metadataTrack_.cues.length) {
16450 (function () {
16451 var cues = sourceHandler.metadataTrack_.cues;
16452 var cuesArray = [];
16453
16454 // Create a copy of the TextTrackCueList...
16455 // ...disregarding cues with a falsey value
16456 for (var i = 0; i < cues.length; i++) {
16457 if (cues[i]) {
16458 cuesArray.push(cues[i]);
16459 }
16460 }
16461
16462 // Group cues by their startTime value
16463 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
16464 var timeSlot = obj[cue.startTime] || [];
16465
16466 timeSlot.push(cue);
16467 obj[cue.startTime] = timeSlot;
16468
16469 return obj;
16470 }, {});
16471
16472 // Sort startTimes by ascending order
16473 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
16474 return Number(a) - Number(b);
16475 });
16476
16477 // Map each cue group's endTime to the next group's startTime
16478 sortedStartTimes.forEach(function (startTime, idx) {
16479 var cueGroup = cuesGroupedByStartTime[startTime];
16480 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration;
16481
16482 // Map each cue's endTime the next group's startTime
16483 cueGroup.forEach(function (cue) {
16484 cue.endTime = nextTime;
16485 });
16486 });
16487 })();
16488 }
16489 })();
16490 }
16491};
16492
16493exports['default'] = {
16494 addTextTrackData: addTextTrackData,
16495 durationOfVideo: durationOfVideo
16496};
16497module.exports = exports['default'];
16498}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
16499},{"global/window":30}],63:[function(require,module,exports){
16500/**
16501 * Remove the text track from the player if one with matching kind and
16502 * label properties already exists on the player
16503 *
16504 * @param {Object} player the video.js player object
16505 * @param {String} kind to be considered the text track's `kind` must match
16506 * @param {String} label to be considered the text track's `label` must match
16507 * @private
16508 */
16509'use strict';
16510
16511Object.defineProperty(exports, '__esModule', {
16512 value: true
16513});
16514var removeExistingTrack = function removeExistingTrack(player, kind, label) {
16515 var tracks = player.remoteTextTracks() || [];
16516
16517 for (var i = 0; i < tracks.length; i++) {
16518 var track = tracks[i];
16519
16520 if (track.kind === kind && track.label === label) {
16521 player.removeRemoteTextTrack(track);
16522 }
16523 }
16524};
16525
16526exports.removeExistingTrack = removeExistingTrack;
16527/**
16528 * Cleaup text tracks on video.js if they exist
16529 *
16530 * @param {Object} player the video.js player object
16531 * @private
16532 */
16533var cleanupTextTracks = function cleanupTextTracks(player) {
16534 removeExistingTrack(player, 'captions', 'cc1');
16535 removeExistingTrack(player, 'metadata', 'Timed Metadata');
16536};
16537exports.cleanupTextTracks = cleanupTextTracks;
16538},{}],64:[function(require,module,exports){
16539/**
16540 * @file codec-utils.js
16541 */
16542
16543/**
16544 * Check if a codec string refers to an audio codec.
16545 *
16546 * @param {String} codec codec string to check
16547 * @return {Boolean} if this is an audio codec
16548 * @private
16549 */
16550'use strict';
16551
16552Object.defineProperty(exports, '__esModule', {
16553 value: true
16554});
16555var isAudioCodec = function isAudioCodec(codec) {
16556 return (/mp4a\.\d+.\d+/i.test(codec)
16557 );
16558};
16559
16560/**
16561 * Check if a codec string refers to a video codec.
16562 *
16563 * @param {String} codec codec string to check
16564 * @return {Boolean} if this is a video codec
16565 * @private
16566 */
16567var isVideoCodec = function isVideoCodec(codec) {
16568 return (/avc1\.[\da-f]+/i.test(codec)
16569 );
16570};
16571
16572/**
16573 * Parse a content type header into a type and parameters
16574 * object
16575 *
16576 * @param {String} type the content type header
16577 * @return {Object} the parsed content-type
16578 * @private
16579 */
16580var parseContentType = function parseContentType(type) {
16581 var object = { type: '', parameters: {} };
16582 var parameters = type.trim().split(';');
16583
16584 // first parameter should always be content-type
16585 object.type = parameters.shift().trim();
16586 parameters.forEach(function (parameter) {
16587 var pair = parameter.trim().split('=');
16588
16589 if (pair.length > 1) {
16590 var _name = pair[0].replace(/"/g, '').trim();
16591 var value = pair[1].replace(/"/g, '').trim();
16592
16593 object.parameters[_name] = value;
16594 }
16595 });
16596
16597 return object;
16598};
16599
16600/**
16601 * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
16602 * `avc1.<hhhhhh>`
16603 *
16604 * @param {Array} codecs an array of codec strings to fix
16605 * @return {Array} the translated codec array
16606 * @private
16607 */
16608var translateLegacyCodecs = function translateLegacyCodecs(codecs) {
16609 return codecs.map(function (codec) {
16610 return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
16611 var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
16612 var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
16613
16614 return 'avc1.' + profileHex + '00' + avcLevelHex;
16615 });
16616 });
16617};
16618
16619exports['default'] = {
16620 isAudioCodec: isAudioCodec,
16621 parseContentType: parseContentType,
16622 isVideoCodec: isVideoCodec,
16623 translateLegacyCodecs: translateLegacyCodecs
16624};
16625module.exports = exports['default'];
16626},{}],65:[function(require,module,exports){
16627/**
16628 * @file create-text-tracks-if-necessary.js
16629 */
16630'use strict';
16631
16632Object.defineProperty(exports, '__esModule', {
16633 value: true
16634});
16635
16636var _cleanupTextTracks = require('./cleanup-text-tracks');
16637
16638/**
16639 * Create text tracks on video.js if they exist on a segment.
16640 *
16641 * @param {Object} sourceBuffer the VSB or FSB
16642 * @param {Object} mediaSource the HTML or Flash media source
16643 * @param {Object} segment the segment that may contain the text track
16644 * @private
16645 */
16646var createTextTracksIfNecessary = function createTextTracksIfNecessary(sourceBuffer, mediaSource, segment) {
16647 var player = mediaSource.player_;
16648
16649 // create an in-band caption track if one is present in the segment
16650 if (segment.captions && segment.captions.length && !sourceBuffer.inbandTextTrack_) {
16651 (0, _cleanupTextTracks.removeExistingTrack)(player, 'captions', 'cc1');
16652 sourceBuffer.inbandTextTrack_ = player.addRemoteTextTrack({
16653 kind: 'captions',
16654 label: 'cc1'
16655 }, false).track;
16656 player.tech_.trigger({ type: 'usage', name: 'hls-608' });
16657 }
16658
16659 if (segment.metadata && segment.metadata.length && !sourceBuffer.metadataTrack_) {
16660 (0, _cleanupTextTracks.removeExistingTrack)(player, 'metadata', 'Timed Metadata', true);
16661 sourceBuffer.metadataTrack_ = player.addRemoteTextTrack({
16662 kind: 'metadata',
16663 label: 'Timed Metadata'
16664 }, false).track;
16665 sourceBuffer.metadataTrack_.inBandMetadataTrackDispatchType = segment.metadata.dispatchType;
16666 }
16667};
16668
16669exports['default'] = createTextTracksIfNecessary;
16670module.exports = exports['default'];
16671},{"./cleanup-text-tracks":63}],66:[function(require,module,exports){
16672/**
16673 * @file flash-constants.js
16674 */
16675/**
16676 * The maximum size in bytes for append operations to the video.js
16677 * SWF. Calling through to Flash blocks and can be expensive so
16678 * we chunk data and pass through 4KB at a time, yielding to the
16679 * browser between chunks. This gives a theoretical maximum rate of
16680 * 1MB/s into Flash. Any higher and we begin to drop frames and UI
16681 * responsiveness suffers.
16682 *
16683 * @private
16684 */
16685"use strict";
16686
16687Object.defineProperty(exports, "__esModule", {
16688 value: true
16689});
16690var flashConstants = {
16691 // times in milliseconds
16692 TIME_BETWEEN_CHUNKS: 1,
16693 BYTES_PER_CHUNK: 1024 * 32
16694};
16695
16696exports["default"] = flashConstants;
16697module.exports = exports["default"];
16698},{}],67:[function(require,module,exports){
16699(function (global){
16700/**
16701 * @file flash-media-source.js
16702 */
16703'use strict';
16704
16705Object.defineProperty(exports, '__esModule', {
16706 value: true
16707});
16708
16709var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
16710
16711var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
16712
16713function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
16714
16715function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
16716
16717function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
16718
16719var _globalDocument = require('global/document');
16720
16721var _globalDocument2 = _interopRequireDefault(_globalDocument);
16722
16723var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
16724
16725var _videoJs2 = _interopRequireDefault(_videoJs);
16726
16727var _flashSourceBuffer = require('./flash-source-buffer');
16728
16729var _flashSourceBuffer2 = _interopRequireDefault(_flashSourceBuffer);
16730
16731var _flashConstants = require('./flash-constants');
16732
16733var _flashConstants2 = _interopRequireDefault(_flashConstants);
16734
16735var _codecUtils = require('./codec-utils');
16736
16737var _cleanupTextTracks = require('./cleanup-text-tracks');
16738
16739/**
16740 * A flash implmentation of HTML MediaSources and a polyfill
16741 * for browsers that don't support native or HTML MediaSources..
16742 *
16743 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
16744 * @class FlashMediaSource
16745 * @extends videojs.EventTarget
16746 */
16747
16748var FlashMediaSource = (function (_videojs$EventTarget) {
16749 _inherits(FlashMediaSource, _videojs$EventTarget);
16750
16751 function FlashMediaSource() {
16752 var _this = this;
16753
16754 _classCallCheck(this, FlashMediaSource);
16755
16756 _get(Object.getPrototypeOf(FlashMediaSource.prototype), 'constructor', this).call(this);
16757 this.sourceBuffers = [];
16758 this.readyState = 'closed';
16759
16760 this.on(['sourceopen', 'webkitsourceopen'], function (event) {
16761 // find the swf where we will push media data
16762 _this.swfObj = _globalDocument2['default'].getElementById(event.swfId);
16763 _this.player_ = (0, _videoJs2['default'])(_this.swfObj.parentNode);
16764 _this.tech_ = _this.swfObj.tech;
16765 _this.readyState = 'open';
16766
16767 _this.tech_.on('seeking', function () {
16768 var i = _this.sourceBuffers.length;
16769
16770 while (i--) {
16771 _this.sourceBuffers[i].abort();
16772 }
16773 });
16774
16775 if (_this.tech_.hls) {
16776 _this.tech_.hls.on('dispose', function () {
16777 (0, _cleanupTextTracks.cleanupTextTracks)(_this.player_);
16778 });
16779 }
16780
16781 // trigger load events
16782 if (_this.swfObj) {
16783 _this.swfObj.vjs_load();
16784 }
16785 });
16786 }
16787
16788 /**
16789 * Set or return the presentation duration.
16790 *
16791 * @param {Double} value the duration of the media in seconds
16792 * @param {Double} the current presentation duration
16793 * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
16794 */
16795
16796 /**
16797 * We have this function so that the html and flash interfaces
16798 * are the same.
16799 *
16800 * @private
16801 */
16802
16803 _createClass(FlashMediaSource, [{
16804 key: 'addSeekableRange_',
16805 value: function addSeekableRange_() {}
16806 // intentional no-op
16807
16808 /**
16809 * Create a new flash source buffer and add it to our flash media source.
16810 *
16811 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
16812 * @param {String} type the content-type of the source
16813 * @return {Object} the flash source buffer
16814 */
16815
16816 }, {
16817 key: 'addSourceBuffer',
16818 value: function addSourceBuffer(type) {
16819 var parsedType = (0, _codecUtils.parseContentType)(type);
16820 var sourceBuffer = undefined;
16821
16822 // if this is an FLV type, we'll push data to flash
16823 if (parsedType.type === 'video/mp2t' || parsedType.type === 'audio/mp2t') {
16824 // Flash source buffers
16825 sourceBuffer = new _flashSourceBuffer2['default'](this);
16826 } else {
16827 throw new Error('NotSupportedError (Video.js)');
16828 }
16829
16830 this.sourceBuffers.push(sourceBuffer);
16831 return sourceBuffer;
16832 }
16833
16834 /**
16835 * Signals the end of the stream.
16836 *
16837 * @link https://w3c.github.io/media-source/#widl-MediaSource-endOfStream-void-EndOfStreamError-error
16838 * @param {String=} error Signals that a playback error
16839 * has occurred. If specified, it must be either "network" or
16840 * "decode".
16841 */
16842 }, {
16843 key: 'endOfStream',
16844 value: function endOfStream(error) {
16845 if (error === 'network') {
16846 // MEDIA_ERR_NETWORK
16847 this.tech_.error(2);
16848 } else if (error === 'decode') {
16849 // MEDIA_ERR_DECODE
16850 this.tech_.error(3);
16851 }
16852 if (this.readyState !== 'ended') {
16853 this.readyState = 'ended';
16854 this.swfObj.vjs_endOfStream();
16855 }
16856 }
16857 }]);
16858
16859 return FlashMediaSource;
16860})(_videoJs2['default'].EventTarget);
16861
16862exports['default'] = FlashMediaSource;
16863try {
16864 Object.defineProperty(FlashMediaSource.prototype, 'duration', {
16865 /**
16866 * Return the presentation duration.
16867 *
16868 * @return {Double} the duration of the media in seconds
16869 * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
16870 */
16871 get: function get() {
16872 if (!this.swfObj) {
16873 return NaN;
16874 }
16875 // get the current duration from the SWF
16876 return this.swfObj.vjs_getProperty('duration');
16877 },
16878 /**
16879 * Set the presentation duration.
16880 *
16881 * @param {Double} value the duration of the media in seconds
16882 * @return {Double} the duration of the media in seconds
16883 * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
16884 */
16885 set: function set(value) {
16886 var i = undefined;
16887 var oldDuration = this.swfObj.vjs_getProperty('duration');
16888
16889 this.swfObj.vjs_setProperty('duration', value);
16890
16891 if (value < oldDuration) {
16892 // In MSE, this triggers the range removal algorithm which causes
16893 // an update to occur
16894 for (i = 0; i < this.sourceBuffers.length; i++) {
16895 this.sourceBuffers[i].remove(value, oldDuration);
16896 }
16897 }
16898
16899 return value;
16900 }
16901 });
16902} catch (e) {
16903 // IE8 throws if defineProperty is called on a non-DOM node. We
16904 // don't support IE8 but we shouldn't throw an error if loaded
16905 // there.
16906 FlashMediaSource.prototype.duration = NaN;
16907}
16908
16909for (var property in _flashConstants2['default']) {
16910 FlashMediaSource[property] = _flashConstants2['default'][property];
16911}
16912module.exports = exports['default'];
16913}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
16914},{"./cleanup-text-tracks":63,"./codec-utils":64,"./flash-constants":66,"./flash-source-buffer":68,"global/document":29}],68:[function(require,module,exports){
16915(function (global){
16916/**
16917 * @file flash-source-buffer.js
16918 */
16919'use strict';
16920
16921Object.defineProperty(exports, '__esModule', {
16922 value: true
16923});
16924
16925var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
16926
16927var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
16928
16929function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
16930
16931function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
16932
16933function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
16934
16935var _globalWindow = require('global/window');
16936
16937var _globalWindow2 = _interopRequireDefault(_globalWindow);
16938
16939var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
16940
16941var _videoJs2 = _interopRequireDefault(_videoJs);
16942
16943var _muxJsLibFlv = require('mux.js/lib/flv');
16944
16945var _muxJsLibFlv2 = _interopRequireDefault(_muxJsLibFlv);
16946
16947var _removeCuesFromTrack = require('./remove-cues-from-track');
16948
16949var _removeCuesFromTrack2 = _interopRequireDefault(_removeCuesFromTrack);
16950
16951var _createTextTracksIfNecessary = require('./create-text-tracks-if-necessary');
16952
16953var _createTextTracksIfNecessary2 = _interopRequireDefault(_createTextTracksIfNecessary);
16954
16955var _addTextTrackData = require('./add-text-track-data');
16956
16957var _flashTransmuxerWorker = require('./flash-transmuxer-worker');
16958
16959var _flashTransmuxerWorker2 = _interopRequireDefault(_flashTransmuxerWorker);
16960
16961var _webworkify = require('webworkify');
16962
16963var _webworkify2 = _interopRequireDefault(_webworkify);
16964
16965var _flashConstants = require('./flash-constants');
16966
16967var _flashConstants2 = _interopRequireDefault(_flashConstants);
16968
16969/**
16970 * A wrapper around the setTimeout function that uses
16971 * the flash constant time between ticks value.
16972 *
16973 * @param {Function} func the function callback to run
16974 * @private
16975 */
16976var scheduleTick = function scheduleTick(func) {
16977 // Chrome doesn't invoke requestAnimationFrame callbacks
16978 // in background tabs, so use setTimeout.
16979 _globalWindow2['default'].setTimeout(func, _flashConstants2['default'].TIME_BETWEEN_CHUNKS);
16980};
16981
16982/**
16983 * Generates a random string of max length 6
16984 *
16985 * @return {String} the randomly generated string
16986 * @function generateRandomString
16987 * @private
16988 */
16989var generateRandomString = function generateRandomString() {
16990 return Math.random().toString(36).slice(2, 8);
16991};
16992
16993/**
16994 * Round a number to a specified number of places much like
16995 * toFixed but return a number instead of a string representation.
16996 *
16997 * @param {Number} num A number
16998 * @param {Number} places The number of decimal places which to
16999 * round
17000 * @private
17001 */
17002var toDecimalPlaces = function toDecimalPlaces(num, places) {
17003 if (typeof places !== 'number' || places < 0) {
17004 places = 0;
17005 }
17006
17007 var scale = Math.pow(10, places);
17008
17009 return Math.round(num * scale) / scale;
17010};
17011
17012/**
17013 * A SourceBuffer implementation for Flash rather than HTML.
17014 *
17015 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
17016 * @param {Object} mediaSource the flash media source
17017 * @class FlashSourceBuffer
17018 * @extends videojs.EventTarget
17019 */
17020
17021var FlashSourceBuffer = (function (_videojs$EventTarget) {
17022 _inherits(FlashSourceBuffer, _videojs$EventTarget);
17023
17024 function FlashSourceBuffer(mediaSource) {
17025 var _this = this;
17026
17027 _classCallCheck(this, FlashSourceBuffer);
17028
17029 _get(Object.getPrototypeOf(FlashSourceBuffer.prototype), 'constructor', this).call(this);
17030 var encodedHeader = undefined;
17031
17032 // Start off using the globally defined value but refine
17033 // as we append data into flash
17034 this.chunkSize_ = _flashConstants2['default'].BYTES_PER_CHUNK;
17035
17036 // byte arrays queued to be appended
17037 this.buffer_ = [];
17038
17039 // the total number of queued bytes
17040 this.bufferSize_ = 0;
17041
17042 // to be able to determine the correct position to seek to, we
17043 // need to retain information about the mapping between the
17044 // media timeline and PTS values
17045 this.basePtsOffset_ = NaN;
17046
17047 this.mediaSource_ = mediaSource;
17048
17049 this.audioBufferEnd_ = NaN;
17050 this.videoBufferEnd_ = NaN;
17051
17052 // indicates whether the asynchronous continuation of an operation
17053 // is still being processed
17054 // see https://w3c.github.io/media-source/#widl-SourceBuffer-updating
17055 this.updating = false;
17056 this.timestampOffset_ = 0;
17057
17058 encodedHeader = _globalWindow2['default'].btoa(String.fromCharCode.apply(null, Array.prototype.slice.call(_muxJsLibFlv2['default'].getFlvHeader())));
17059
17060 // create function names with added randomness for the global callbacks flash will use
17061 // to get data from javascript into the swf. Random strings are added as a safety
17062 // measure for pages with multiple players since these functions will be global
17063 // instead of per instance. When making a call to the swf, the browser generates a
17064 // try catch code snippet, but just takes the function name and writes out an unquoted
17065 // call to that function. If the player id has any special characters, this will result
17066 // in an error, so safePlayerId replaces all special characters to '_'
17067 var safePlayerId = this.mediaSource_.player_.id().replace(/[^a-zA-Z0-9]/g, '_');
17068
17069 this.flashEncodedHeaderName_ = 'vjs_flashEncodedHeader_' + safePlayerId + generateRandomString();
17070 this.flashEncodedDataName_ = 'vjs_flashEncodedData_' + safePlayerId + generateRandomString();
17071
17072 _globalWindow2['default'][this.flashEncodedHeaderName_] = function () {
17073 delete _globalWindow2['default'][_this.flashEncodedHeaderName_];
17074 return encodedHeader;
17075 };
17076
17077 this.mediaSource_.swfObj.vjs_appendChunkReady(this.flashEncodedHeaderName_);
17078
17079 this.transmuxer_ = (0, _webworkify2['default'])(_flashTransmuxerWorker2['default']);
17080 this.transmuxer_.postMessage({ action: 'init', options: {} });
17081 this.transmuxer_.onmessage = function (event) {
17082 if (event.data.action === 'data') {
17083 _this.receiveBuffer_(event.data.segment);
17084 }
17085 };
17086
17087 this.one('updateend', function () {
17088 _this.mediaSource_.tech_.trigger('loadedmetadata');
17089 });
17090
17091 Object.defineProperty(this, 'timestampOffset', {
17092 get: function get() {
17093 return this.timestampOffset_;
17094 },
17095 set: function set(val) {
17096 if (typeof val === 'number' && val >= 0) {
17097 this.timestampOffset_ = val;
17098 // We have to tell flash to expect a discontinuity
17099 this.mediaSource_.swfObj.vjs_discontinuity();
17100 // the media <-> PTS mapping must be re-established after
17101 // the discontinuity
17102 this.basePtsOffset_ = NaN;
17103 this.audioBufferEnd_ = NaN;
17104 this.videoBufferEnd_ = NaN;
17105
17106 this.transmuxer_.postMessage({ action: 'reset' });
17107 }
17108 }
17109 });
17110
17111 Object.defineProperty(this, 'buffered', {
17112 get: function get() {
17113 if (!this.mediaSource_ || !this.mediaSource_.swfObj || !('vjs_getProperty' in this.mediaSource_.swfObj)) {
17114 return _videoJs2['default'].createTimeRange();
17115 }
17116
17117 var buffered = this.mediaSource_.swfObj.vjs_getProperty('buffered');
17118
17119 if (buffered && buffered.length) {
17120 buffered[0][0] = toDecimalPlaces(buffered[0][0], 3);
17121 buffered[0][1] = toDecimalPlaces(buffered[0][1], 3);
17122 }
17123 return _videoJs2['default'].createTimeRanges(buffered);
17124 }
17125 });
17126
17127 // On a seek we remove all text track data since flash has no concept
17128 // of a buffered-range and everything else is reset on seek
17129 this.mediaSource_.player_.on('seeked', function () {
17130 (0, _removeCuesFromTrack2['default'])(0, Infinity, _this.metadataTrack_);
17131 (0, _removeCuesFromTrack2['default'])(0, Infinity, _this.inbandTextTrack_);
17132 });
17133
17134 this.mediaSource_.player_.tech_.hls.on('dispose', function () {
17135 _this.transmuxer_.terminate();
17136 });
17137 }
17138
17139 /**
17140 * Append bytes to the sourcebuffers buffer, in this case we
17141 * have to append it to swf object.
17142 *
17143 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
17144 * @param {Array} bytes
17145 */
17146
17147 _createClass(FlashSourceBuffer, [{
17148 key: 'appendBuffer',
17149 value: function appendBuffer(bytes) {
17150 var error = undefined;
17151
17152 if (this.updating) {
17153 error = new Error('SourceBuffer.append() cannot be called ' + 'while an update is in progress');
17154 error.name = 'InvalidStateError';
17155 error.code = 11;
17156 throw error;
17157 }
17158 this.updating = true;
17159 this.mediaSource_.readyState = 'open';
17160 this.trigger({ type: 'update' });
17161
17162 this.transmuxer_.postMessage({
17163 action: 'push',
17164 data: bytes.buffer,
17165 byteOffset: bytes.byteOffset,
17166 byteLength: bytes.byteLength
17167 }, [bytes.buffer]);
17168 this.transmuxer_.postMessage({ action: 'flush' });
17169 }
17170
17171 /**
17172 * Reset the parser and remove any data queued to be sent to the SWF.
17173 *
17174 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
17175 */
17176 }, {
17177 key: 'abort',
17178 value: function abort() {
17179 this.buffer_ = [];
17180 this.bufferSize_ = 0;
17181 this.mediaSource_.swfObj.vjs_abort();
17182
17183 // report any outstanding updates have ended
17184 if (this.updating) {
17185 this.updating = false;
17186 this.trigger({ type: 'updateend' });
17187 }
17188 }
17189
17190 /**
17191 * Flash cannot remove ranges already buffered in the NetStream
17192 * but seeking clears the buffer entirely. For most purposes,
17193 * having this operation act as a no-op is acceptable.
17194 *
17195 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
17196 * @param {Double} start start of the section to remove
17197 * @param {Double} end end of the section to remove
17198 */
17199 }, {
17200 key: 'remove',
17201 value: function remove(start, end) {
17202 (0, _removeCuesFromTrack2['default'])(start, end, this.metadataTrack_);
17203 (0, _removeCuesFromTrack2['default'])(start, end, this.inbandTextTrack_);
17204 this.trigger({ type: 'update' });
17205 this.trigger({ type: 'updateend' });
17206 }
17207
17208 /**
17209 * Receive a buffer from the flv.
17210 *
17211 * @param {Object} segment
17212 * @private
17213 */
17214 }, {
17215 key: 'receiveBuffer_',
17216 value: function receiveBuffer_(segment) {
17217 var _this2 = this;
17218
17219 // create an in-band caption track if one is present in the segment
17220 (0, _createTextTracksIfNecessary2['default'])(this, this.mediaSource_, segment);
17221 (0, _addTextTrackData.addTextTrackData)(this, segment.captions, segment.metadata);
17222
17223 // Do this asynchronously since convertTagsToData_ can be time consuming
17224 scheduleTick(function () {
17225 var flvBytes = _this2.convertTagsToData_(segment);
17226
17227 if (_this2.buffer_.length === 0) {
17228 scheduleTick(_this2.processBuffer_.bind(_this2));
17229 }
17230
17231 if (flvBytes) {
17232 _this2.buffer_.push(flvBytes);
17233 _this2.bufferSize_ += flvBytes.byteLength;
17234 }
17235 });
17236 }
17237
17238 /**
17239 * Append a portion of the current buffer to the SWF.
17240 *
17241 * @private
17242 */
17243 }, {
17244 key: 'processBuffer_',
17245 value: function processBuffer_() {
17246 var _this3 = this;
17247
17248 var chunkSize = _flashConstants2['default'].BYTES_PER_CHUNK;
17249
17250 if (!this.buffer_.length) {
17251 if (this.updating !== false) {
17252 this.updating = false;
17253 this.trigger({ type: 'updateend' });
17254 }
17255 // do nothing if the buffer is empty
17256 return;
17257 }
17258
17259 // concatenate appends up to the max append size
17260 var chunk = this.buffer_[0].subarray(0, chunkSize);
17261
17262 // requeue any bytes that won't make it this round
17263 if (chunk.byteLength < chunkSize || this.buffer_[0].byteLength === chunkSize) {
17264 this.buffer_.shift();
17265 } else {
17266 this.buffer_[0] = this.buffer_[0].subarray(chunkSize);
17267 }
17268
17269 this.bufferSize_ -= chunk.byteLength;
17270
17271 // base64 encode the bytes
17272 var binary = [];
17273 var length = chunk.byteLength;
17274
17275 for (var i = 0; i < length; i++) {
17276 binary.push(String.fromCharCode(chunk[i]));
17277 }
17278 var b64str = _globalWindow2['default'].btoa(binary.join(''));
17279
17280 _globalWindow2['default'][this.flashEncodedDataName_] = function () {
17281 // schedule another processBuffer to process any left over data or to
17282 // trigger updateend
17283 scheduleTick(_this3.processBuffer_.bind(_this3));
17284 delete _globalWindow2['default'][_this3.flashEncodedDataName_];
17285 return b64str;
17286 };
17287
17288 // Notify the swf that segment data is ready to be appended
17289 this.mediaSource_.swfObj.vjs_appendChunkReady(this.flashEncodedDataName_);
17290 }
17291
17292 /**
17293 * Turns an array of flv tags into a Uint8Array representing the
17294 * flv data. Also removes any tags that are before the current
17295 * time so that playback begins at or slightly after the right
17296 * place on a seek
17297 *
17298 * @private
17299 * @param {Object} segmentData object of segment data
17300 */
17301 }, {
17302 key: 'convertTagsToData_',
17303 value: function convertTagsToData_(segmentData) {
17304 var segmentByteLength = 0;
17305 var tech = this.mediaSource_.tech_;
17306 var videoTargetPts = 0;
17307 var segment = undefined;
17308 var videoTags = segmentData.tags.videoTags;
17309 var audioTags = segmentData.tags.audioTags;
17310
17311 // Establish the media timeline to PTS translation if we don't
17312 // have one already
17313 if (isNaN(this.basePtsOffset_) && (videoTags.length || audioTags.length)) {
17314 // We know there is at least one video or audio tag, but since we may not have both,
17315 // we use pts: Infinity for the missing tag. The will force the following Math.min
17316 // call will to use the proper pts value since it will always be less than Infinity
17317 var firstVideoTag = videoTags[0] || { pts: Infinity };
17318 var firstAudioTag = audioTags[0] || { pts: Infinity };
17319
17320 this.basePtsOffset_ = Math.min(firstAudioTag.pts, firstVideoTag.pts);
17321 }
17322
17323 if (tech.seeking()) {
17324 // Do not use previously saved buffer end values while seeking since buffer
17325 // is cleared on all seeks
17326 this.videoBufferEnd_ = NaN;
17327 this.audioBufferEnd_ = NaN;
17328 }
17329
17330 if (isNaN(this.videoBufferEnd_)) {
17331 if (tech.buffered().length) {
17332 videoTargetPts = tech.buffered().end(0) - this.timestampOffset;
17333 }
17334
17335 // Trim to currentTime if seeking
17336 if (tech.seeking()) {
17337 videoTargetPts = Math.max(videoTargetPts, tech.currentTime() - this.timestampOffset);
17338 }
17339
17340 // PTS values are represented in milliseconds
17341 videoTargetPts *= 1e3;
17342 videoTargetPts += this.basePtsOffset_;
17343 } else {
17344 // Add a fudge factor of 0.1 to the last video pts appended since a rendition change
17345 // could append an overlapping segment, in which case there is a high likelyhood
17346 // a tag could have a matching pts to videoBufferEnd_, which would cause
17347 // that tag to get appended by the tag.pts >= targetPts check below even though it
17348 // is a duplicate of what was previously appended
17349 videoTargetPts = this.videoBufferEnd_ + 0.1;
17350 }
17351
17352 // filter complete GOPs with a presentation time less than the seek target/end of buffer
17353 var currentIndex = videoTags.length;
17354
17355 // if the last tag is beyond videoTargetPts, then do not search the list for a GOP
17356 // since our videoTargetPts lies in a future segment
17357 if (currentIndex && videoTags[currentIndex - 1].pts >= videoTargetPts) {
17358 // Start by walking backwards from the end of the list until we reach a tag that
17359 // is equal to or less than videoTargetPts
17360 while (--currentIndex) {
17361 var currentTag = videoTags[currentIndex];
17362
17363 if (currentTag.pts > videoTargetPts) {
17364 continue;
17365 }
17366
17367 // if we see a keyFrame or metadata tag once we've gone below videoTargetPts,
17368 // exit the loop as this is the start of the GOP that we want to append
17369 if (currentTag.keyFrame || currentTag.metaDataTag) {
17370 break;
17371 }
17372 }
17373
17374 // We need to check if there are any metadata tags that come before currentIndex
17375 // as those will be metadata tags associated with the GOP we are appending
17376 // There could be 0 to 2 metadata tags that come before the currentIndex depending
17377 // on what videoTargetPts is and whether the transmuxer prepended metadata tags to this
17378 // key frame
17379 while (currentIndex) {
17380 var nextTag = videoTags[currentIndex - 1];
17381
17382 if (!nextTag.metaDataTag) {
17383 break;
17384 }
17385
17386 currentIndex--;
17387 }
17388 }
17389
17390 var filteredVideoTags = videoTags.slice(currentIndex);
17391
17392 var audioTargetPts = undefined;
17393
17394 if (isNaN(this.audioBufferEnd_)) {
17395 audioTargetPts = videoTargetPts;
17396 } else {
17397 // Add a fudge factor of 0.1 to the last video pts appended since a rendition change
17398 // could append an overlapping segment, in which case there is a high likelyhood
17399 // a tag could have a matching pts to videoBufferEnd_, which would cause
17400 // that tag to get appended by the tag.pts >= targetPts check below even though it
17401 // is a duplicate of what was previously appended
17402 audioTargetPts = this.audioBufferEnd_ + 0.1;
17403 }
17404
17405 if (filteredVideoTags.length) {
17406 // If targetPts intersects a GOP and we appended the tags for the GOP that came
17407 // before targetPts, we want to make sure to trim audio tags at the pts
17408 // of the first video tag to avoid brief moments of silence
17409 audioTargetPts = Math.min(audioTargetPts, filteredVideoTags[0].pts);
17410 }
17411
17412 // skip tags with a presentation time less than the seek target/end of buffer
17413 currentIndex = 0;
17414
17415 while (currentIndex < audioTags.length) {
17416 if (audioTags[currentIndex].pts >= audioTargetPts) {
17417 break;
17418 }
17419
17420 currentIndex++;
17421 }
17422
17423 var filteredAudioTags = audioTags.slice(currentIndex);
17424
17425 // update the audio and video buffer ends
17426 if (filteredAudioTags.length) {
17427 this.audioBufferEnd_ = filteredAudioTags[filteredAudioTags.length - 1].pts;
17428 }
17429 if (filteredVideoTags.length) {
17430 this.videoBufferEnd_ = filteredVideoTags[filteredVideoTags.length - 1].pts;
17431 }
17432
17433 var tags = this.getOrderedTags_(filteredVideoTags, filteredAudioTags);
17434
17435 if (tags.length === 0) {
17436 return;
17437 }
17438
17439 // If we are appending data that comes before our target pts, we want to tell
17440 // the swf to adjust its notion of current time to account for the extra tags
17441 // we are appending to complete the GOP that intersects with targetPts
17442 if (tags[0].pts < videoTargetPts && tech.seeking()) {
17443 var fudgeFactor = 1 / 30;
17444 var currentTime = tech.currentTime();
17445 var diff = (videoTargetPts - tags[0].pts) / 1e3;
17446 var adjustedTime = currentTime - diff;
17447
17448 if (adjustedTime < fudgeFactor) {
17449 adjustedTime = 0;
17450 }
17451
17452 try {
17453 this.mediaSource_.swfObj.vjs_adjustCurrentTime(adjustedTime);
17454 } catch (e) {
17455 // no-op for backwards compatability of swf. If adjustCurrentTime fails,
17456 // the swf may incorrectly report currentTime and buffered ranges
17457 // but should not affect playback over than the time displayed on the
17458 // progress bar is inaccurate
17459 }
17460 }
17461
17462 // concatenate the bytes into a single segment
17463 for (var i = 0; i < tags.length; i++) {
17464 segmentByteLength += tags[i].bytes.byteLength;
17465 }
17466 segment = new Uint8Array(segmentByteLength);
17467 for (var i = 0, j = 0; i < tags.length; i++) {
17468 segment.set(tags[i].bytes, j);
17469 j += tags[i].bytes.byteLength;
17470 }
17471
17472 return segment;
17473 }
17474
17475 /**
17476 * Assemble the FLV tags in decoder order.
17477 *
17478 * @private
17479 * @param {Array} videoTags list of video tags
17480 * @param {Array} audioTags list of audio tags
17481 */
17482 }, {
17483 key: 'getOrderedTags_',
17484 value: function getOrderedTags_(videoTags, audioTags) {
17485 var tag = undefined;
17486 var tags = [];
17487
17488 while (videoTags.length || audioTags.length) {
17489 if (!videoTags.length) {
17490 // only audio tags remain
17491 tag = audioTags.shift();
17492 } else if (!audioTags.length) {
17493 // only video tags remain
17494 tag = videoTags.shift();
17495 } else if (audioTags[0].dts < videoTags[0].dts) {
17496 // audio should be decoded next
17497 tag = audioTags.shift();
17498 } else {
17499 // video should be decoded next
17500 tag = videoTags.shift();
17501 }
17502
17503 tags.push(tag);
17504 }
17505
17506 return tags;
17507 }
17508 }]);
17509
17510 return FlashSourceBuffer;
17511})(_videoJs2['default'].EventTarget);
17512
17513exports['default'] = FlashSourceBuffer;
17514module.exports = exports['default'];
17515}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
17516},{"./add-text-track-data":62,"./create-text-tracks-if-necessary":65,"./flash-constants":66,"./flash-transmuxer-worker":69,"./remove-cues-from-track":71,"global/window":30,"mux.js/lib/flv":44,"webworkify":75}],69:[function(require,module,exports){
17517/**
17518 * @file flash-transmuxer-worker.js
17519 */
17520'use strict';
17521
17522Object.defineProperty(exports, '__esModule', {
17523 value: true
17524});
17525
17526var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
17527
17528function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
17529
17530function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
17531
17532var _globalWindow = require('global/window');
17533
17534var _globalWindow2 = _interopRequireDefault(_globalWindow);
17535
17536var _muxJsLibFlv = require('mux.js/lib/flv');
17537
17538var _muxJsLibFlv2 = _interopRequireDefault(_muxJsLibFlv);
17539
17540/**
17541 * Re-emits transmuxer events by converting them into messages to the
17542 * world outside the worker.
17543 *
17544 * @param {Object} transmuxer the transmuxer to wire events on
17545 * @private
17546 */
17547var wireTransmuxerEvents = function wireTransmuxerEvents(transmuxer) {
17548 transmuxer.on('data', function (segment) {
17549 _globalWindow2['default'].postMessage({
17550 action: 'data',
17551 segment: segment
17552 });
17553 });
17554
17555 transmuxer.on('done', function (data) {
17556 _globalWindow2['default'].postMessage({ action: 'done' });
17557 });
17558};
17559
17560/**
17561 * All incoming messages route through this hash. If no function exists
17562 * to handle an incoming message, then we ignore the message.
17563 *
17564 * @class MessageHandlers
17565 * @param {Object} options the options to initialize with
17566 */
17567
17568var MessageHandlers = (function () {
17569 function MessageHandlers(options) {
17570 _classCallCheck(this, MessageHandlers);
17571
17572 this.options = options || {};
17573 this.init();
17574 }
17575
17576 /**
17577 * Our web wroker interface so that things can talk to mux.js
17578 * that will be running in a web worker. The scope is passed to this by
17579 * webworkify.
17580 *
17581 * @param {Object} self the scope for the web worker
17582 */
17583
17584 /**
17585 * initialize our web worker and wire all the events.
17586 */
17587
17588 _createClass(MessageHandlers, [{
17589 key: 'init',
17590 value: function init() {
17591 if (this.transmuxer) {
17592 this.transmuxer.dispose();
17593 }
17594 this.transmuxer = new _muxJsLibFlv2['default'].Transmuxer(this.options);
17595 wireTransmuxerEvents(this.transmuxer);
17596 }
17597
17598 /**
17599 * Adds data (a ts segment) to the start of the transmuxer pipeline for
17600 * processing.
17601 *
17602 * @param {ArrayBuffer} data data to push into the muxer
17603 */
17604 }, {
17605 key: 'push',
17606 value: function push(data) {
17607 // Cast array buffer to correct type for transmuxer
17608 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
17609
17610 this.transmuxer.push(segment);
17611 }
17612
17613 /**
17614 * Recreate the transmuxer so that the next segment added via `push`
17615 * start with a fresh transmuxer.
17616 */
17617 }, {
17618 key: 'reset',
17619 value: function reset() {
17620 this.init();
17621 }
17622
17623 /**
17624 * Forces the pipeline to finish processing the last segment and emit its
17625 * results.
17626 */
17627 }, {
17628 key: 'flush',
17629 value: function flush() {
17630 this.transmuxer.flush();
17631 }
17632 }]);
17633
17634 return MessageHandlers;
17635})();
17636
17637var FlashTransmuxerWorker = function FlashTransmuxerWorker(self) {
17638 self.onmessage = function (event) {
17639 if (event.data.action === 'init' && event.data.options) {
17640 this.messageHandlers = new MessageHandlers(event.data.options);
17641 return;
17642 }
17643
17644 if (!this.messageHandlers) {
17645 this.messageHandlers = new MessageHandlers();
17646 }
17647
17648 if (event.data && event.data.action && event.data.action !== 'init') {
17649 if (this.messageHandlers[event.data.action]) {
17650 this.messageHandlers[event.data.action](event.data);
17651 }
17652 }
17653 };
17654};
17655
17656exports['default'] = function (self) {
17657 return new FlashTransmuxerWorker(self);
17658};
17659
17660module.exports = exports['default'];
17661},{"global/window":30,"mux.js/lib/flv":44}],70:[function(require,module,exports){
17662(function (global){
17663/**
17664 * @file html-media-source.js
17665 */
17666'use strict';
17667
17668Object.defineProperty(exports, '__esModule', {
17669 value: true
17670});
17671
17672var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
17673
17674var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
17675
17676function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
17677
17678function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
17679
17680function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
17681
17682var _globalWindow = require('global/window');
17683
17684var _globalWindow2 = _interopRequireDefault(_globalWindow);
17685
17686var _globalDocument = require('global/document');
17687
17688var _globalDocument2 = _interopRequireDefault(_globalDocument);
17689
17690var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
17691
17692var _videoJs2 = _interopRequireDefault(_videoJs);
17693
17694var _virtualSourceBuffer = require('./virtual-source-buffer');
17695
17696var _virtualSourceBuffer2 = _interopRequireDefault(_virtualSourceBuffer);
17697
17698var _addTextTrackData = require('./add-text-track-data');
17699
17700var _codecUtils = require('./codec-utils');
17701
17702var _cleanupTextTracks = require('./cleanup-text-tracks');
17703
17704/**
17705 * Our MediaSource implementation in HTML, mimics native
17706 * MediaSource where/if possible.
17707 *
17708 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
17709 * @class HtmlMediaSource
17710 * @extends videojs.EventTarget
17711 */
17712
17713var HtmlMediaSource = (function (_videojs$EventTarget) {
17714 _inherits(HtmlMediaSource, _videojs$EventTarget);
17715
17716 function HtmlMediaSource() {
17717 var _this = this;
17718
17719 _classCallCheck(this, HtmlMediaSource);
17720
17721 _get(Object.getPrototypeOf(HtmlMediaSource.prototype), 'constructor', this).call(this);
17722 var property = undefined;
17723
17724 this.nativeMediaSource_ = new _globalWindow2['default'].MediaSource();
17725 // delegate to the native MediaSource's methods by default
17726 for (property in this.nativeMediaSource_) {
17727 if (!(property in HtmlMediaSource.prototype) && typeof this.nativeMediaSource_[property] === 'function') {
17728 this[property] = this.nativeMediaSource_[property].bind(this.nativeMediaSource_);
17729 }
17730 }
17731
17732 // emulate `duration` and `seekable` until seeking can be
17733 // handled uniformly for live streams
17734 // see https://github.com/w3c/media-source/issues/5
17735 this.duration_ = NaN;
17736 Object.defineProperty(this, 'duration', {
17737 get: function get() {
17738 if (this.duration_ === Infinity) {
17739 return this.duration_;
17740 }
17741 return this.nativeMediaSource_.duration;
17742 },
17743 set: function set(duration) {
17744 this.duration_ = duration;
17745 if (duration !== Infinity) {
17746 this.nativeMediaSource_.duration = duration;
17747 return;
17748 }
17749 }
17750 });
17751 Object.defineProperty(this, 'seekable', {
17752 get: function get() {
17753 if (this.duration_ === Infinity) {
17754 return _videoJs2['default'].createTimeRanges([[0, this.nativeMediaSource_.duration]]);
17755 }
17756 return this.nativeMediaSource_.seekable;
17757 }
17758 });
17759
17760 Object.defineProperty(this, 'readyState', {
17761 get: function get() {
17762 return this.nativeMediaSource_.readyState;
17763 }
17764 });
17765
17766 Object.defineProperty(this, 'activeSourceBuffers', {
17767 get: function get() {
17768 return this.activeSourceBuffers_;
17769 }
17770 });
17771
17772 // the list of virtual and native SourceBuffers created by this
17773 // MediaSource
17774 this.sourceBuffers = [];
17775
17776 this.activeSourceBuffers_ = [];
17777
17778 /**
17779 * update the list of active source buffers based upon various
17780 * imformation from HLS and video.js
17781 *
17782 * @private
17783 */
17784 this.updateActiveSourceBuffers_ = function () {
17785 // Retain the reference but empty the array
17786 _this.activeSourceBuffers_.length = 0;
17787
17788 // If there is only one source buffer, then it will always be active and audio will
17789 // be disabled based on the codec of the source buffer
17790 if (_this.sourceBuffers.length === 1) {
17791 var sourceBuffer = _this.sourceBuffers[0];
17792
17793 sourceBuffer.appendAudioInitSegment_ = true;
17794 sourceBuffer.audioDisabled_ = !sourceBuffer.audioCodec_;
17795 _this.activeSourceBuffers_.push(sourceBuffer);
17796 return;
17797 }
17798
17799 // There are 2 source buffers, a combined (possibly video only) source buffer and
17800 // and an audio only source buffer.
17801 // By default, the audio in the combined virtual source buffer is enabled
17802 // and the audio-only source buffer (if it exists) is disabled.
17803 var disableCombined = false;
17804 var disableAudioOnly = true;
17805
17806 // TODO: maybe we can store the sourcebuffers on the track objects?
17807 // safari may do something like this
17808 for (var i = 0; i < _this.player_.audioTracks().length; i++) {
17809 var track = _this.player_.audioTracks()[i];
17810
17811 if (track.enabled && track.kind !== 'main') {
17812 // The enabled track is an alternate audio track so disable the audio in
17813 // the combined source buffer and enable the audio-only source buffer.
17814 disableCombined = true;
17815 disableAudioOnly = false;
17816 break;
17817 }
17818 }
17819
17820 _this.sourceBuffers.forEach(function (sourceBuffer) {
17821 /* eslinst-disable */
17822 // TODO once codecs are required, we can switch to using the codecs to determine
17823 // what stream is the video stream, rather than relying on videoTracks
17824 /* eslinst-enable */
17825
17826 sourceBuffer.appendAudioInitSegment_ = true;
17827
17828 if (sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
17829 // combined
17830 sourceBuffer.audioDisabled_ = disableCombined;
17831 } else if (sourceBuffer.videoCodec_ && !sourceBuffer.audioCodec_) {
17832 // If the "combined" source buffer is video only, then we do not want
17833 // disable the audio-only source buffer (this is mostly for demuxed
17834 // audio and video hls)
17835 sourceBuffer.audioDisabled_ = true;
17836 disableAudioOnly = false;
17837 } else if (!sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
17838 // audio only
17839 sourceBuffer.audioDisabled_ = disableAudioOnly;
17840 if (disableAudioOnly) {
17841 return;
17842 }
17843 }
17844
17845 _this.activeSourceBuffers_.push(sourceBuffer);
17846 });
17847 };
17848
17849 this.onPlayerMediachange_ = function () {
17850 _this.sourceBuffers.forEach(function (sourceBuffer) {
17851 sourceBuffer.appendAudioInitSegment_ = true;
17852 });
17853 };
17854
17855 // Re-emit MediaSource events on the polyfill
17856 ['sourceopen', 'sourceclose', 'sourceended'].forEach(function (eventName) {
17857 this.nativeMediaSource_.addEventListener(eventName, this.trigger.bind(this));
17858 }, this);
17859
17860 // capture the associated player when the MediaSource is
17861 // successfully attached
17862 this.on('sourceopen', function (event) {
17863 // Get the player this MediaSource is attached to
17864 var video = _globalDocument2['default'].querySelector('[src="' + _this.url_ + '"]');
17865
17866 if (!video) {
17867 return;
17868 }
17869
17870 _this.player_ = (0, _videoJs2['default'])(video.parentNode);
17871
17872 if (_this.player_.audioTracks && _this.player_.audioTracks()) {
17873 _this.player_.audioTracks().on('change', _this.updateActiveSourceBuffers_);
17874 _this.player_.audioTracks().on('addtrack', _this.updateActiveSourceBuffers_);
17875 _this.player_.audioTracks().on('removetrack', _this.updateActiveSourceBuffers_);
17876 }
17877
17878 _this.player_.on('mediachange', _this.onPlayerMediachange_);
17879 });
17880
17881 this.on('sourceended', function (event) {
17882 var duration = (0, _addTextTrackData.durationOfVideo)(_this.duration);
17883
17884 for (var i = 0; i < _this.sourceBuffers.length; i++) {
17885 var sourcebuffer = _this.sourceBuffers[i];
17886 var cues = sourcebuffer.metadataTrack_ && sourcebuffer.metadataTrack_.cues;
17887
17888 if (cues && cues.length) {
17889 cues[cues.length - 1].endTime = duration;
17890 }
17891 }
17892 });
17893
17894 // explicitly terminate any WebWorkers that were created
17895 // by SourceHandlers
17896 this.on('sourceclose', function (event) {
17897 this.sourceBuffers.forEach(function (sourceBuffer) {
17898 if (sourceBuffer.transmuxer_) {
17899 sourceBuffer.transmuxer_.terminate();
17900 }
17901 });
17902
17903 this.sourceBuffers.length = 0;
17904 if (!this.player_) {
17905 return;
17906 }
17907
17908 (0, _cleanupTextTracks.cleanupTextTracks)(this.player_);
17909
17910 if (this.player_.audioTracks && this.player_.audioTracks()) {
17911 this.player_.audioTracks().off('change', this.updateActiveSourceBuffers_);
17912 this.player_.audioTracks().off('addtrack', this.updateActiveSourceBuffers_);
17913 this.player_.audioTracks().off('removetrack', this.updateActiveSourceBuffers_);
17914 }
17915
17916 // We can only change this if the player hasn't been disposed of yet
17917 // because `off` eventually tries to use the el_ property. If it has
17918 // been disposed of, then don't worry about it because there are no
17919 // event handlers left to unbind anyway
17920 if (this.player_.el_) {
17921 this.player_.off('mediachange', this.onPlayerMediachange_);
17922 }
17923 });
17924 }
17925
17926 /**
17927 * Add a range that that can now be seeked to.
17928 *
17929 * @param {Double} start where to start the addition
17930 * @param {Double} end where to end the addition
17931 * @private
17932 */
17933
17934 _createClass(HtmlMediaSource, [{
17935 key: 'addSeekableRange_',
17936 value: function addSeekableRange_(start, end) {
17937 var error = undefined;
17938
17939 if (this.duration !== Infinity) {
17940 error = new Error('MediaSource.addSeekableRange() can only be invoked ' + 'when the duration is Infinity');
17941 error.name = 'InvalidStateError';
17942 error.code = 11;
17943 throw error;
17944 }
17945
17946 if (end > this.nativeMediaSource_.duration || isNaN(this.nativeMediaSource_.duration)) {
17947 this.nativeMediaSource_.duration = end;
17948 }
17949 }
17950
17951 /**
17952 * Add a source buffer to the media source.
17953 *
17954 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
17955 * @param {String} type the content-type of the content
17956 * @return {Object} the created source buffer
17957 */
17958 }, {
17959 key: 'addSourceBuffer',
17960 value: function addSourceBuffer(type) {
17961 var buffer = undefined;
17962 var parsedType = (0, _codecUtils.parseContentType)(type);
17963
17964 // Create a VirtualSourceBuffer to transmux MPEG-2 transport
17965 // stream segments into fragmented MP4s
17966 if (/^(video|audio)\/mp2t$/i.test(parsedType.type)) {
17967 var codecs = [];
17968
17969 if (parsedType.parameters && parsedType.parameters.codecs) {
17970 codecs = parsedType.parameters.codecs.split(',');
17971 codecs = (0, _codecUtils.translateLegacyCodecs)(codecs);
17972 codecs = codecs.filter(function (codec) {
17973 return (0, _codecUtils.isAudioCodec)(codec) || (0, _codecUtils.isVideoCodec)(codec);
17974 });
17975 }
17976
17977 if (codecs.length === 0) {
17978 codecs = ['avc1.4d400d', 'mp4a.40.2'];
17979 }
17980
17981 buffer = new _virtualSourceBuffer2['default'](this, codecs);
17982
17983 if (this.sourceBuffers.length !== 0) {
17984 // If another VirtualSourceBuffer already exists, then we are creating a
17985 // SourceBuffer for an alternate audio track and therefore we know that
17986 // the source has both an audio and video track.
17987 // That means we should trigger the manual creation of the real
17988 // SourceBuffers instead of waiting for the transmuxer to return data
17989 this.sourceBuffers[0].createRealSourceBuffers_();
17990 buffer.createRealSourceBuffers_();
17991
17992 // Automatically disable the audio on the first source buffer if
17993 // a second source buffer is ever created
17994 this.sourceBuffers[0].audioDisabled_ = true;
17995 }
17996 } else {
17997 // delegate to the native implementation
17998 buffer = this.nativeMediaSource_.addSourceBuffer(type);
17999 }
18000
18001 this.sourceBuffers.push(buffer);
18002 return buffer;
18003 }
18004 }]);
18005
18006 return HtmlMediaSource;
18007})(_videoJs2['default'].EventTarget);
18008
18009exports['default'] = HtmlMediaSource;
18010module.exports = exports['default'];
18011}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
18012},{"./add-text-track-data":62,"./cleanup-text-tracks":63,"./codec-utils":64,"./virtual-source-buffer":74,"global/document":29,"global/window":30}],71:[function(require,module,exports){
18013/**
18014 * @file remove-cues-from-track.js
18015 */
18016
18017/**
18018 * Remove cues from a track on video.js.
18019 *
18020 * @param {Double} start start of where we should remove the cue
18021 * @param {Double} end end of where the we should remove the cue
18022 * @param {Object} track the text track to remove the cues from
18023 * @private
18024 */
18025"use strict";
18026
18027Object.defineProperty(exports, "__esModule", {
18028 value: true
18029});
18030var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
18031 var i = undefined;
18032 var cue = undefined;
18033
18034 if (!track) {
18035 return;
18036 }
18037
18038 if (!track.cues) {
18039 return;
18040 }
18041
18042 i = track.cues.length;
18043
18044 while (i--) {
18045 cue = track.cues[i];
18046
18047 // Remove any overlapping cue
18048 if (cue.startTime <= end && cue.endTime >= start) {
18049 track.removeCue(cue);
18050 }
18051 }
18052};
18053
18054exports["default"] = removeCuesFromTrack;
18055module.exports = exports["default"];
18056},{}],72:[function(require,module,exports){
18057/**
18058 * @file transmuxer-worker.js
18059 */
18060
18061/**
18062 * videojs-contrib-media-sources
18063 *
18064 * Copyright (c) 2015 Brightcove
18065 * All rights reserved.
18066 *
18067 * Handles communication between the browser-world and the mux.js
18068 * transmuxer running inside of a WebWorker by exposing a simple
18069 * message-based interface to a Transmuxer object.
18070 */
18071'use strict';
18072
18073Object.defineProperty(exports, '__esModule', {
18074 value: true
18075});
18076
18077var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
18078
18079function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
18080
18081function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
18082
18083var _globalWindow = require('global/window');
18084
18085var _globalWindow2 = _interopRequireDefault(_globalWindow);
18086
18087var _muxJsLibMp4 = require('mux.js/lib/mp4');
18088
18089var _muxJsLibMp42 = _interopRequireDefault(_muxJsLibMp4);
18090
18091/**
18092 * Re-emits transmuxer events by converting them into messages to the
18093 * world outside the worker.
18094 *
18095 * @param {Object} transmuxer the transmuxer to wire events on
18096 * @private
18097 */
18098var wireTransmuxerEvents = function wireTransmuxerEvents(transmuxer) {
18099 transmuxer.on('data', function (segment) {
18100 // transfer ownership of the underlying ArrayBuffer
18101 // instead of doing a copy to save memory
18102 // ArrayBuffers are transferable but generic TypedArrays are not
18103 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
18104 var initArray = segment.initSegment;
18105
18106 segment.initSegment = {
18107 data: initArray.buffer,
18108 byteOffset: initArray.byteOffset,
18109 byteLength: initArray.byteLength
18110 };
18111
18112 var typedArray = segment.data;
18113
18114 segment.data = typedArray.buffer;
18115 _globalWindow2['default'].postMessage({
18116 action: 'data',
18117 segment: segment,
18118 byteOffset: typedArray.byteOffset,
18119 byteLength: typedArray.byteLength
18120 }, [segment.data]);
18121 });
18122
18123 if (transmuxer.captionStream) {
18124 transmuxer.captionStream.on('data', function (caption) {
18125 _globalWindow2['default'].postMessage({
18126 action: 'caption',
18127 data: caption
18128 });
18129 });
18130 }
18131
18132 transmuxer.on('done', function (data) {
18133 _globalWindow2['default'].postMessage({ action: 'done' });
18134 });
18135};
18136
18137/**
18138 * All incoming messages route through this hash. If no function exists
18139 * to handle an incoming message, then we ignore the message.
18140 *
18141 * @class MessageHandlers
18142 * @param {Object} options the options to initialize with
18143 */
18144
18145var MessageHandlers = (function () {
18146 function MessageHandlers(options) {
18147 _classCallCheck(this, MessageHandlers);
18148
18149 this.options = options || {};
18150 this.init();
18151 }
18152
18153 /**
18154 * Our web wroker interface so that things can talk to mux.js
18155 * that will be running in a web worker. the scope is passed to this by
18156 * webworkify.
18157 *
18158 * @param {Object} self the scope for the web worker
18159 */
18160
18161 /**
18162 * initialize our web worker and wire all the events.
18163 */
18164
18165 _createClass(MessageHandlers, [{
18166 key: 'init',
18167 value: function init() {
18168 if (this.transmuxer) {
18169 this.transmuxer.dispose();
18170 }
18171 this.transmuxer = new _muxJsLibMp42['default'].Transmuxer(this.options);
18172 wireTransmuxerEvents(this.transmuxer);
18173 }
18174
18175 /**
18176 * Adds data (a ts segment) to the start of the transmuxer pipeline for
18177 * processing.
18178 *
18179 * @param {ArrayBuffer} data data to push into the muxer
18180 */
18181 }, {
18182 key: 'push',
18183 value: function push(data) {
18184 // Cast array buffer to correct type for transmuxer
18185 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
18186
18187 this.transmuxer.push(segment);
18188 }
18189
18190 /**
18191 * Recreate the transmuxer so that the next segment added via `push`
18192 * start with a fresh transmuxer.
18193 */
18194 }, {
18195 key: 'reset',
18196 value: function reset() {
18197 this.init();
18198 }
18199
18200 /**
18201 * Set the value that will be used as the `baseMediaDecodeTime` time for the
18202 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
18203 * set relative to the first based on the PTS values.
18204 *
18205 * @param {Object} data used to set the timestamp offset in the muxer
18206 */
18207 }, {
18208 key: 'setTimestampOffset',
18209 value: function setTimestampOffset(data) {
18210 var timestampOffset = data.timestampOffset || 0;
18211
18212 this.transmuxer.setBaseMediaDecodeTime(Math.round(timestampOffset * 90000));
18213 }
18214 }, {
18215 key: 'setAudioAppendStart',
18216 value: function setAudioAppendStart(data) {
18217 this.transmuxer.setAudioAppendStart(Math.ceil(data.appendStart * 90000));
18218 }
18219
18220 /**
18221 * Forces the pipeline to finish processing the last segment and emit it's
18222 * results.
18223 *
18224 * @param {Object} data event data, not really used
18225 */
18226 }, {
18227 key: 'flush',
18228 value: function flush(data) {
18229 this.transmuxer.flush();
18230 }
18231 }]);
18232
18233 return MessageHandlers;
18234})();
18235
18236var TransmuxerWorker = function TransmuxerWorker(self) {
18237 self.onmessage = function (event) {
18238 if (event.data.action === 'init' && event.data.options) {
18239 this.messageHandlers = new MessageHandlers(event.data.options);
18240 return;
18241 }
18242
18243 if (!this.messageHandlers) {
18244 this.messageHandlers = new MessageHandlers();
18245 }
18246
18247 if (event.data && event.data.action && event.data.action !== 'init') {
18248 if (this.messageHandlers[event.data.action]) {
18249 this.messageHandlers[event.data.action](event.data);
18250 }
18251 }
18252 };
18253};
18254
18255exports['default'] = function (self) {
18256 return new TransmuxerWorker(self);
18257};
18258
18259module.exports = exports['default'];
18260},{"global/window":30,"mux.js/lib/mp4":53}],73:[function(require,module,exports){
18261(function (global){
18262/**
18263 * @file videojs-contrib-media-sources.js
18264 */
18265'use strict';
18266
18267Object.defineProperty(exports, '__esModule', {
18268 value: true
18269});
18270
18271function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
18272
18273var _globalWindow = require('global/window');
18274
18275var _globalWindow2 = _interopRequireDefault(_globalWindow);
18276
18277var _flashMediaSource = require('./flash-media-source');
18278
18279var _flashMediaSource2 = _interopRequireDefault(_flashMediaSource);
18280
18281var _htmlMediaSource = require('./html-media-source');
18282
18283var _htmlMediaSource2 = _interopRequireDefault(_htmlMediaSource);
18284
18285var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
18286
18287var _videoJs2 = _interopRequireDefault(_videoJs);
18288
18289var urlCount = 0;
18290
18291// ------------
18292// Media Source
18293// ------------
18294
18295var defaults = {
18296 // how to determine the MediaSource implementation to use. There
18297 // are three available modes:
18298 // - auto: use native MediaSources where available and Flash
18299 // everywhere else
18300 // - html5: always use native MediaSources
18301 // - flash: always use the Flash MediaSource polyfill
18302 mode: 'auto'
18303};
18304
18305// store references to the media sources so they can be connected
18306// to a video element (a swf object)
18307// TODO: can we store this somewhere local to this module?
18308_videoJs2['default'].mediaSources = {};
18309
18310/**
18311 * Provide a method for a swf object to notify JS that a
18312 * media source is now open.
18313 *
18314 * @param {String} msObjectURL string referencing the MSE Object URL
18315 * @param {String} swfId the swf id
18316 */
18317var open = function open(msObjectURL, swfId) {
18318 var mediaSource = _videoJs2['default'].mediaSources[msObjectURL];
18319
18320 if (mediaSource) {
18321 mediaSource.trigger({ type: 'sourceopen', swfId: swfId });
18322 } else {
18323 throw new Error('Media Source not found (Video.js)');
18324 }
18325};
18326
18327/**
18328 * Check to see if the native MediaSource object exists and supports
18329 * an MP4 container with both H.264 video and AAC-LC audio.
18330 *
18331 * @return {Boolean} if native media sources are supported
18332 */
18333var supportsNativeMediaSources = function supportsNativeMediaSources() {
18334 return !!_globalWindow2['default'].MediaSource && !!_globalWindow2['default'].MediaSource.isTypeSupported && _globalWindow2['default'].MediaSource.isTypeSupported('video/mp4;codecs="avc1.4d400d,mp4a.40.2"');
18335};
18336
18337/**
18338 * An emulation of the MediaSource API so that we can support
18339 * native and non-native functionality such as flash and
18340 * video/mp2t videos. returns an instance of HtmlMediaSource or
18341 * FlashMediaSource depending on what is supported and what options
18342 * are passed in.
18343 *
18344 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/MediaSource
18345 * @param {Object} options options to use during setup.
18346 */
18347var MediaSource = function MediaSource(options) {
18348 var settings = _videoJs2['default'].mergeOptions(defaults, options);
18349
18350 this.MediaSource = {
18351 open: open,
18352 supportsNativeMediaSources: supportsNativeMediaSources
18353 };
18354
18355 // determine whether HTML MediaSources should be used
18356 if (settings.mode === 'html5' || settings.mode === 'auto' && supportsNativeMediaSources()) {
18357 return new _htmlMediaSource2['default']();
18358 } else if (_videoJs2['default'].getTech('Flash')) {
18359 return new _flashMediaSource2['default']();
18360 }
18361
18362 throw new Error('Cannot use Flash or Html5 to create a MediaSource for this video');
18363};
18364
18365exports.MediaSource = MediaSource;
18366MediaSource.open = open;
18367MediaSource.supportsNativeMediaSources = supportsNativeMediaSources;
18368
18369/**
18370 * A wrapper around the native URL for our MSE object
18371 * implementation, this object is exposed under videojs.URL
18372 *
18373 * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/URL
18374 */
18375var URL = {
18376 /**
18377 * A wrapper around the native createObjectURL for our objects.
18378 * This function maps a native or emulated mediaSource to a blob
18379 * url so that it can be loaded into video.js
18380 *
18381 * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/createObjectURL
18382 * @param {MediaSource} object the object to create a blob url to
18383 */
18384 createObjectURL: function createObjectURL(object) {
18385 var objectUrlPrefix = 'blob:vjs-media-source/';
18386 var url = undefined;
18387
18388 // use the native MediaSource to generate an object URL
18389 if (object instanceof _htmlMediaSource2['default']) {
18390 url = _globalWindow2['default'].URL.createObjectURL(object.nativeMediaSource_);
18391 object.url_ = url;
18392 return url;
18393 }
18394 // if the object isn't an emulated MediaSource, delegate to the
18395 // native implementation
18396 if (!(object instanceof _flashMediaSource2['default'])) {
18397 url = _globalWindow2['default'].URL.createObjectURL(object);
18398 object.url_ = url;
18399 return url;
18400 }
18401
18402 // build a URL that can be used to map back to the emulated
18403 // MediaSource
18404 url = objectUrlPrefix + urlCount;
18405
18406 urlCount++;
18407
18408 // setup the mapping back to object
18409 _videoJs2['default'].mediaSources[url] = object;
18410
18411 return url;
18412 }
18413};
18414
18415exports.URL = URL;
18416_videoJs2['default'].MediaSource = MediaSource;
18417_videoJs2['default'].URL = URL;
18418}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
18419},{"./flash-media-source":67,"./html-media-source":70,"global/window":30}],74:[function(require,module,exports){
18420(function (global){
18421/**
18422 * @file virtual-source-buffer.js
18423 */
18424'use strict';
18425
18426Object.defineProperty(exports, '__esModule', {
18427 value: true
18428});
18429
18430var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
18431
18432var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
18433
18434function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
18435
18436function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
18437
18438function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
18439
18440var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
18441
18442var _videoJs2 = _interopRequireDefault(_videoJs);
18443
18444var _createTextTracksIfNecessary = require('./create-text-tracks-if-necessary');
18445
18446var _createTextTracksIfNecessary2 = _interopRequireDefault(_createTextTracksIfNecessary);
18447
18448var _removeCuesFromTrack = require('./remove-cues-from-track');
18449
18450var _removeCuesFromTrack2 = _interopRequireDefault(_removeCuesFromTrack);
18451
18452var _addTextTrackData = require('./add-text-track-data');
18453
18454var _webworkify = require('webworkify');
18455
18456var _webworkify2 = _interopRequireDefault(_webworkify);
18457
18458var _transmuxerWorker = require('./transmuxer-worker');
18459
18460var _transmuxerWorker2 = _interopRequireDefault(_transmuxerWorker);
18461
18462var _codecUtils = require('./codec-utils');
18463
18464// We create a wrapper around the SourceBuffer so that we can manage the
18465// state of the `updating` property manually. We have to do this because
18466// Firefox changes `updating` to false long before triggering `updateend`
18467// events and that was causing strange problems in videojs-contrib-hls
18468var makeWrappedSourceBuffer = function makeWrappedSourceBuffer(mediaSource, mimeType) {
18469 var sourceBuffer = mediaSource.addSourceBuffer(mimeType);
18470 var wrapper = Object.create(null);
18471
18472 wrapper.updating = false;
18473 wrapper.realBuffer_ = sourceBuffer;
18474
18475 var _loop = function (key) {
18476 if (typeof sourceBuffer[key] === 'function') {
18477 wrapper[key] = function () {
18478 return sourceBuffer[key].apply(sourceBuffer, arguments);
18479 };
18480 } else if (typeof wrapper[key] === 'undefined') {
18481 Object.defineProperty(wrapper, key, {
18482 get: function get() {
18483 return sourceBuffer[key];
18484 },
18485 set: function set(v) {
18486 return sourceBuffer[key] = v;
18487 }
18488 });
18489 }
18490 };
18491
18492 for (var key in sourceBuffer) {
18493 _loop(key);
18494 }
18495
18496 return wrapper;
18497};
18498
18499/**
18500 * VirtualSourceBuffers exist so that we can transmux non native formats
18501 * into a native format, but keep the same api as a native source buffer.
18502 * It creates a transmuxer, that works in its own thread (a web worker) and
18503 * that transmuxer muxes the data into a native format. VirtualSourceBuffer will
18504 * then send all of that data to the naive sourcebuffer so that it is
18505 * indestinguishable from a natively supported format.
18506 *
18507 * @param {HtmlMediaSource} mediaSource the parent mediaSource
18508 * @param {Array} codecs array of codecs that we will be dealing with
18509 * @class VirtualSourceBuffer
18510 * @extends video.js.EventTarget
18511 */
18512
18513var VirtualSourceBuffer = (function (_videojs$EventTarget) {
18514 _inherits(VirtualSourceBuffer, _videojs$EventTarget);
18515
18516 function VirtualSourceBuffer(mediaSource, codecs) {
18517 var _this = this;
18518
18519 _classCallCheck(this, VirtualSourceBuffer);
18520
18521 _get(Object.getPrototypeOf(VirtualSourceBuffer.prototype), 'constructor', this).call(this, _videoJs2['default'].EventTarget);
18522 this.timestampOffset_ = 0;
18523 this.pendingBuffers_ = [];
18524 this.bufferUpdating_ = false;
18525
18526 this.mediaSource_ = mediaSource;
18527 this.codecs_ = codecs;
18528 this.audioCodec_ = null;
18529 this.videoCodec_ = null;
18530 this.audioDisabled_ = false;
18531 this.appendAudioInitSegment_ = true;
18532
18533 var options = {
18534 remux: false
18535 };
18536
18537 this.codecs_.forEach(function (codec) {
18538 if ((0, _codecUtils.isAudioCodec)(codec)) {
18539 _this.audioCodec_ = codec;
18540 } else if ((0, _codecUtils.isVideoCodec)(codec)) {
18541 _this.videoCodec_ = codec;
18542 }
18543 });
18544
18545 // append muxed segments to their respective native buffers as
18546 // soon as they are available
18547 this.transmuxer_ = (0, _webworkify2['default'])(_transmuxerWorker2['default']);
18548 this.transmuxer_.postMessage({ action: 'init', options: options });
18549
18550 this.transmuxer_.onmessage = function (event) {
18551 if (event.data.action === 'data') {
18552 return _this.data_(event);
18553 }
18554
18555 if (event.data.action === 'done') {
18556 return _this.done_(event);
18557 }
18558 };
18559
18560 // this timestampOffset is a property with the side-effect of resetting
18561 // baseMediaDecodeTime in the transmuxer on the setter
18562 Object.defineProperty(this, 'timestampOffset', {
18563 get: function get() {
18564 return this.timestampOffset_;
18565 },
18566 set: function set(val) {
18567 if (typeof val === 'number' && val >= 0) {
18568 this.timestampOffset_ = val;
18569 this.appendAudioInitSegment_ = true;
18570
18571 // We have to tell the transmuxer to set the baseMediaDecodeTime to
18572 // the desired timestampOffset for the next segment
18573 this.transmuxer_.postMessage({
18574 action: 'setTimestampOffset',
18575 timestampOffset: val
18576 });
18577 }
18578 }
18579 });
18580
18581 // setting the append window affects both source buffers
18582 Object.defineProperty(this, 'appendWindowStart', {
18583 get: function get() {
18584 return (this.videoBuffer_ || this.audioBuffer_).appendWindowStart;
18585 },
18586 set: function set(start) {
18587 if (this.videoBuffer_) {
18588 this.videoBuffer_.appendWindowStart = start;
18589 }
18590 if (this.audioBuffer_) {
18591 this.audioBuffer_.appendWindowStart = start;
18592 }
18593 }
18594 });
18595
18596 // this buffer is "updating" if either of its native buffers are
18597 Object.defineProperty(this, 'updating', {
18598 get: function get() {
18599 return !!(this.bufferUpdating_ || !this.audioDisabled_ && this.audioBuffer_ && this.audioBuffer_.updating || this.videoBuffer_ && this.videoBuffer_.updating);
18600 }
18601 });
18602
18603 // the buffered property is the intersection of the buffered
18604 // ranges of the native source buffers
18605 Object.defineProperty(this, 'buffered', {
18606 get: function get() {
18607 var start = null;
18608 var end = null;
18609 var arity = 0;
18610 var extents = [];
18611 var ranges = [];
18612
18613 // neither buffer has been created yet
18614 if (!this.videoBuffer_ && !this.audioBuffer_) {
18615 return _videoJs2['default'].createTimeRange();
18616 }
18617
18618 // only one buffer is configured
18619 if (!this.videoBuffer_) {
18620 return this.audioBuffer_.buffered;
18621 }
18622 if (!this.audioBuffer_) {
18623 return this.videoBuffer_.buffered;
18624 }
18625
18626 // both buffers are configured
18627 if (this.audioDisabled_) {
18628 return this.videoBuffer_.buffered;
18629 }
18630
18631 // both buffers are empty
18632 if (this.videoBuffer_.buffered.length === 0 && this.audioBuffer_.buffered.length === 0) {
18633 return _videoJs2['default'].createTimeRange();
18634 }
18635
18636 // Handle the case where we have both buffers and create an
18637 // intersection of the two
18638 var videoBuffered = this.videoBuffer_.buffered;
18639 var audioBuffered = this.audioBuffer_.buffered;
18640 var count = videoBuffered.length;
18641
18642 // A) Gather up all start and end times
18643 while (count--) {
18644 extents.push({ time: videoBuffered.start(count), type: 'start' });
18645 extents.push({ time: videoBuffered.end(count), type: 'end' });
18646 }
18647 count = audioBuffered.length;
18648 while (count--) {
18649 extents.push({ time: audioBuffered.start(count), type: 'start' });
18650 extents.push({ time: audioBuffered.end(count), type: 'end' });
18651 }
18652 // B) Sort them by time
18653 extents.sort(function (a, b) {
18654 return a.time - b.time;
18655 });
18656
18657 // C) Go along one by one incrementing arity for start and decrementing
18658 // arity for ends
18659 for (count = 0; count < extents.length; count++) {
18660 if (extents[count].type === 'start') {
18661 arity++;
18662
18663 // D) If arity is ever incremented to 2 we are entering an
18664 // overlapping range
18665 if (arity === 2) {
18666 start = extents[count].time;
18667 }
18668 } else if (extents[count].type === 'end') {
18669 arity--;
18670
18671 // E) If arity is ever decremented to 1 we leaving an
18672 // overlapping range
18673 if (arity === 1) {
18674 end = extents[count].time;
18675 }
18676 }
18677
18678 // F) Record overlapping ranges
18679 if (start !== null && end !== null) {
18680 ranges.push([start, end]);
18681 start = null;
18682 end = null;
18683 }
18684 }
18685
18686 return _videoJs2['default'].createTimeRanges(ranges);
18687 }
18688 });
18689 }
18690
18691 /**
18692 * When we get a data event from the transmuxer
18693 * we call this function and handle the data that
18694 * was sent to us
18695 *
18696 * @private
18697 * @param {Event} event the data event from the transmuxer
18698 */
18699
18700 _createClass(VirtualSourceBuffer, [{
18701 key: 'data_',
18702 value: function data_(event) {
18703 var segment = event.data.segment;
18704
18705 // Cast ArrayBuffer to TypedArray
18706 segment.data = new Uint8Array(segment.data, event.data.byteOffset, event.data.byteLength);
18707
18708 segment.initSegment = new Uint8Array(segment.initSegment.data, segment.initSegment.byteOffset, segment.initSegment.byteLength);
18709
18710 (0, _createTextTracksIfNecessary2['default'])(this, this.mediaSource_, segment);
18711
18712 // Add the segments to the pendingBuffers array
18713 this.pendingBuffers_.push(segment);
18714 return;
18715 }
18716
18717 /**
18718 * When we get a done event from the transmuxer
18719 * we call this function and we process all
18720 * of the pending data that we have been saving in the
18721 * data_ function
18722 *
18723 * @private
18724 * @param {Event} event the done event from the transmuxer
18725 */
18726 }, {
18727 key: 'done_',
18728 value: function done_(event) {
18729 // Don't process and append data if the mediaSource is closed
18730 if (this.mediaSource_.readyState === 'closed') {
18731 this.pendingBuffers_.length = 0;
18732 return;
18733 }
18734
18735 // All buffers should have been flushed from the muxer
18736 // start processing anything we have received
18737 this.processPendingSegments_();
18738 return;
18739 }
18740
18741 /**
18742 * Create our internal native audio/video source buffers and add
18743 * event handlers to them with the following conditions:
18744 * 1. they do not already exist on the mediaSource
18745 * 2. this VSB has a codec for them
18746 *
18747 * @private
18748 */
18749 }, {
18750 key: 'createRealSourceBuffers_',
18751 value: function createRealSourceBuffers_() {
18752 var _this2 = this;
18753
18754 var types = ['audio', 'video'];
18755
18756 types.forEach(function (type) {
18757 // Don't create a SourceBuffer of this type if we don't have a
18758 // codec for it
18759 if (!_this2[type + 'Codec_']) {
18760 return;
18761 }
18762
18763 // Do nothing if a SourceBuffer of this type already exists
18764 if (_this2[type + 'Buffer_']) {
18765 return;
18766 }
18767
18768 var buffer = null;
18769
18770 // If the mediasource already has a SourceBuffer for the codec
18771 // use that
18772 if (_this2.mediaSource_[type + 'Buffer_']) {
18773 buffer = _this2.mediaSource_[type + 'Buffer_'];
18774 // In multiple audio track cases, the audio source buffer is disabled
18775 // on the main VirtualSourceBuffer by the HTMLMediaSource much earlier
18776 // than createRealSourceBuffers_ is called to create the second
18777 // VirtualSourceBuffer because that happens as a side-effect of
18778 // videojs-contrib-hls starting the audioSegmentLoader. As a result,
18779 // the audioBuffer is essentially "ownerless" and no one will toggle
18780 // the `updating` state back to false once the `updateend` event is received
18781 //
18782 // Setting `updating` to false manually will work around this
18783 // situation and allow work to continue
18784 buffer.updating = false;
18785 } else {
18786 var codecProperty = type + 'Codec_';
18787 var mimeType = type + '/mp4;codecs="' + _this2[codecProperty] + '"';
18788
18789 buffer = makeWrappedSourceBuffer(_this2.mediaSource_.nativeMediaSource_, mimeType);
18790
18791 _this2.mediaSource_[type + 'Buffer_'] = buffer;
18792 }
18793
18794 _this2[type + 'Buffer_'] = buffer;
18795
18796 // Wire up the events to the SourceBuffer
18797 ['update', 'updatestart', 'updateend'].forEach(function (event) {
18798 buffer.addEventListener(event, function () {
18799 // if audio is disabled
18800 if (type === 'audio' && _this2.audioDisabled_) {
18801 return;
18802 }
18803
18804 if (event === 'updateend') {
18805 _this2[type + 'Buffer_'].updating = false;
18806 }
18807
18808 var shouldTrigger = types.every(function (t) {
18809 // skip checking audio's updating status if audio
18810 // is not enabled
18811 if (t === 'audio' && _this2.audioDisabled_) {
18812 return true;
18813 }
18814 // if the other type if updating we don't trigger
18815 if (type !== t && _this2[t + 'Buffer_'] && _this2[t + 'Buffer_'].updating) {
18816 return false;
18817 }
18818 return true;
18819 });
18820
18821 if (shouldTrigger) {
18822 return _this2.trigger(event);
18823 }
18824 });
18825 });
18826 });
18827 }
18828
18829 /**
18830 * Emulate the native mediasource function, but our function will
18831 * send all of the proposed segments to the transmuxer so that we
18832 * can transmux them before we append them to our internal
18833 * native source buffers in the correct format.
18834 *
18835 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
18836 * @param {Uint8Array} segment the segment to append to the buffer
18837 */
18838 }, {
18839 key: 'appendBuffer',
18840 value: function appendBuffer(segment) {
18841 // Start the internal "updating" state
18842 this.bufferUpdating_ = true;
18843
18844 if (this.audioBuffer_ && this.audioBuffer_.buffered.length) {
18845 var audioBuffered = this.audioBuffer_.buffered;
18846
18847 this.transmuxer_.postMessage({
18848 action: 'setAudioAppendStart',
18849 appendStart: audioBuffered.end(audioBuffered.length - 1)
18850 });
18851 }
18852
18853 this.transmuxer_.postMessage({
18854 action: 'push',
18855 // Send the typed-array of data as an ArrayBuffer so that
18856 // it can be sent as a "Transferable" and avoid the costly
18857 // memory copy
18858 data: segment.buffer,
18859
18860 // To recreate the original typed-array, we need information
18861 // about what portion of the ArrayBuffer it was a view into
18862 byteOffset: segment.byteOffset,
18863 byteLength: segment.byteLength
18864 }, [segment.buffer]);
18865 this.transmuxer_.postMessage({ action: 'flush' });
18866 }
18867
18868 /**
18869 * Emulate the native mediasource function and remove parts
18870 * of the buffer from any of our internal buffers that exist
18871 *
18872 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
18873 * @param {Double} start position to start the remove at
18874 * @param {Double} end position to end the remove at
18875 */
18876 }, {
18877 key: 'remove',
18878 value: function remove(start, end) {
18879 if (this.videoBuffer_) {
18880 this.videoBuffer_.updating = true;
18881 this.videoBuffer_.remove(start, end);
18882 }
18883 if (!this.audioDisabled_ && this.audioBuffer_) {
18884 this.audioBuffer_.updating = true;
18885 this.audioBuffer_.remove(start, end);
18886 }
18887
18888 // Remove Metadata Cues (id3)
18889 (0, _removeCuesFromTrack2['default'])(start, end, this.metadataTrack_);
18890
18891 // Remove Any Captions
18892 (0, _removeCuesFromTrack2['default'])(start, end, this.inbandTextTrack_);
18893 }
18894
18895 /**
18896 * Process any segments that the muxer has output
18897 * Concatenate segments together based on type and append them into
18898 * their respective sourceBuffers
18899 *
18900 * @private
18901 */
18902 }, {
18903 key: 'processPendingSegments_',
18904 value: function processPendingSegments_() {
18905 var sortedSegments = {
18906 video: {
18907 segments: [],
18908 bytes: 0
18909 },
18910 audio: {
18911 segments: [],
18912 bytes: 0
18913 },
18914 captions: [],
18915 metadata: []
18916 };
18917
18918 // Sort segments into separate video/audio arrays and
18919 // keep track of their total byte lengths
18920 sortedSegments = this.pendingBuffers_.reduce(function (segmentObj, segment) {
18921 var type = segment.type;
18922 var data = segment.data;
18923 var initSegment = segment.initSegment;
18924
18925 segmentObj[type].segments.push(data);
18926 segmentObj[type].bytes += data.byteLength;
18927
18928 segmentObj[type].initSegment = initSegment;
18929
18930 // Gather any captions into a single array
18931 if (segment.captions) {
18932 segmentObj.captions = segmentObj.captions.concat(segment.captions);
18933 }
18934
18935 if (segment.info) {
18936 segmentObj[type].info = segment.info;
18937 }
18938
18939 // Gather any metadata into a single array
18940 if (segment.metadata) {
18941 segmentObj.metadata = segmentObj.metadata.concat(segment.metadata);
18942 }
18943
18944 return segmentObj;
18945 }, sortedSegments);
18946
18947 // Create the real source buffers if they don't exist by now since we
18948 // finally are sure what tracks are contained in the source
18949 if (!this.videoBuffer_ && !this.audioBuffer_) {
18950 // Remove any codecs that may have been specified by default but
18951 // are no longer applicable now
18952 if (sortedSegments.video.bytes === 0) {
18953 this.videoCodec_ = null;
18954 }
18955 if (sortedSegments.audio.bytes === 0) {
18956 this.audioCodec_ = null;
18957 }
18958
18959 this.createRealSourceBuffers_();
18960 }
18961
18962 if (sortedSegments.audio.info) {
18963 this.mediaSource_.trigger({ type: 'audioinfo', info: sortedSegments.audio.info });
18964 }
18965 if (sortedSegments.video.info) {
18966 this.mediaSource_.trigger({ type: 'videoinfo', info: sortedSegments.video.info });
18967 }
18968
18969 if (this.appendAudioInitSegment_) {
18970 if (!this.audioDisabled_ && this.audioBuffer_) {
18971 sortedSegments.audio.segments.unshift(sortedSegments.audio.initSegment);
18972 sortedSegments.audio.bytes += sortedSegments.audio.initSegment.byteLength;
18973 }
18974 this.appendAudioInitSegment_ = false;
18975 }
18976
18977 // Merge multiple video and audio segments into one and append
18978 if (this.videoBuffer_) {
18979 sortedSegments.video.segments.unshift(sortedSegments.video.initSegment);
18980 sortedSegments.video.bytes += sortedSegments.video.initSegment.byteLength;
18981 this.concatAndAppendSegments_(sortedSegments.video, this.videoBuffer_);
18982 // TODO: are video tracks the only ones with text tracks?
18983 (0, _addTextTrackData.addTextTrackData)(this, sortedSegments.captions, sortedSegments.metadata);
18984 }
18985
18986 if (!this.audioDisabled_ && this.audioBuffer_) {
18987 this.concatAndAppendSegments_(sortedSegments.audio, this.audioBuffer_);
18988 }
18989
18990 this.pendingBuffers_.length = 0;
18991
18992 // We are no longer in the internal "updating" state
18993 this.bufferUpdating_ = false;
18994 }
18995
18996 /**
18997 * Combine all segments into a single Uint8Array and then append them
18998 * to the destination buffer
18999 *
19000 * @param {Object} segmentObj
19001 * @param {SourceBuffer} destinationBuffer native source buffer to append data to
19002 * @private
19003 */
19004 }, {
19005 key: 'concatAndAppendSegments_',
19006 value: function concatAndAppendSegments_(segmentObj, destinationBuffer) {
19007 var offset = 0;
19008 var tempBuffer = undefined;
19009
19010 if (segmentObj.bytes) {
19011 tempBuffer = new Uint8Array(segmentObj.bytes);
19012
19013 // Combine the individual segments into one large typed-array
19014 segmentObj.segments.forEach(function (segment) {
19015 tempBuffer.set(segment, offset);
19016 offset += segment.byteLength;
19017 });
19018
19019 try {
19020 destinationBuffer.updating = true;
19021 destinationBuffer.appendBuffer(tempBuffer);
19022 } catch (error) {
19023 if (this.mediaSource_.player_) {
19024 this.mediaSource_.player_.error({
19025 code: -3,
19026 type: 'APPEND_BUFFER_ERR',
19027 message: error.message,
19028 originalError: error
19029 });
19030 }
19031 }
19032 }
19033 }
19034
19035 /**
19036 * Emulate the native mediasource function. abort any soureBuffer
19037 * actions and throw out any un-appended data.
19038 *
19039 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
19040 */
19041 }, {
19042 key: 'abort',
19043 value: function abort() {
19044 if (this.videoBuffer_) {
19045 this.videoBuffer_.abort();
19046 }
19047 if (!this.audioDisabled_ && this.audioBuffer_) {
19048 this.audioBuffer_.abort();
19049 }
19050 if (this.transmuxer_) {
19051 this.transmuxer_.postMessage({ action: 'reset' });
19052 }
19053 this.pendingBuffers_.length = 0;
19054 this.bufferUpdating_ = false;
19055 }
19056 }]);
19057
19058 return VirtualSourceBuffer;
19059})(_videoJs2['default'].EventTarget);
19060
19061exports['default'] = VirtualSourceBuffer;
19062module.exports = exports['default'];
19063}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
19064},{"./add-text-track-data":62,"./codec-utils":64,"./create-text-tracks-if-necessary":65,"./remove-cues-from-track":71,"./transmuxer-worker":72,"webworkify":75}],75:[function(require,module,exports){
19065var bundleFn = arguments[3];
19066var sources = arguments[4];
19067var cache = arguments[5];
19068
19069var stringify = JSON.stringify;
19070
19071module.exports = function (fn) {
19072 var keys = [];
19073 var wkey;
19074 var cacheKeys = Object.keys(cache);
19075
19076 for (var i = 0, l = cacheKeys.length; i < l; i++) {
19077 var key = cacheKeys[i];
19078 if (cache[key].exports === fn) {
19079 wkey = key;
19080 break;
19081 }
19082 }
19083
19084 if (!wkey) {
19085 wkey = Math.floor(Math.pow(16, 8) * Math.random()).toString(16);
19086 var wcache = {};
19087 for (var i = 0, l = cacheKeys.length; i < l; i++) {
19088 var key = cacheKeys[i];
19089 wcache[key] = key;
19090 }
19091 sources[wkey] = [
19092 Function(['require','module','exports'], '(' + fn + ')(self)'),
19093 wcache
19094 ];
19095 }
19096 var skey = Math.floor(Math.pow(16, 8) * Math.random()).toString(16);
19097
19098 var scache = {}; scache[wkey] = wkey;
19099 sources[skey] = [
19100 Function(['require'],'require(' + stringify(wkey) + ')(self)'),
19101 scache
19102 ];
19103
19104 var src = '(' + bundleFn + ')({'
19105 + Object.keys(sources).map(function (key) {
19106 return stringify(key) + ':['
19107 + sources[key][0]
19108 + ',' + stringify(sources[key][1]) + ']'
19109 ;
19110 }).join(',')
19111 + '},{},[' + stringify(skey) + '])'
19112 ;
19113
19114 var URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
19115
19116 return new Worker(URL.createObjectURL(
19117 new Blob([src], { type: 'text/javascript' })
19118 ));
19119};
19120
19121},{}],76:[function(require,module,exports){
19122(function (global){
19123/**
19124 * @file videojs-contrib-hls.js
19125 *
19126 * The main file for the HLS project.
19127 * License: https://github.com/videojs/videojs-contrib-hls/blob/master/LICENSE
19128 */
19129'use strict';
19130
19131var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
19132
19133var _get = function get(_x4, _x5, _x6) { var _again = true; _function: while (_again) { var object = _x4, property = _x5, receiver = _x6; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x4 = parent; _x5 = property; _x6 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
19134
19135function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
19136
19137function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
19138
19139function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
19140
19141var _globalDocument = require('global/document');
19142
19143var _globalDocument2 = _interopRequireDefault(_globalDocument);
19144
19145var _playlistLoader = require('./playlist-loader');
19146
19147var _playlistLoader2 = _interopRequireDefault(_playlistLoader);
19148
19149var _playlist = require('./playlist');
19150
19151var _playlist2 = _interopRequireDefault(_playlist);
19152
19153var _xhr = require('./xhr');
19154
19155var _xhr2 = _interopRequireDefault(_xhr);
19156
19157var _aesDecrypter = require('aes-decrypter');
19158
19159var _binUtils = require('./bin-utils');
19160
19161var _binUtils2 = _interopRequireDefault(_binUtils);
19162
19163var _videojsContribMediaSources = require('videojs-contrib-media-sources');
19164
19165var _m3u8Parser = require('m3u8-parser');
19166
19167var _m3u8Parser2 = _interopRequireDefault(_m3u8Parser);
19168
19169var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
19170
19171var _videoJs2 = _interopRequireDefault(_videoJs);
19172
19173var _masterPlaylistController = require('./master-playlist-controller');
19174
19175var _config = require('./config');
19176
19177var _config2 = _interopRequireDefault(_config);
19178
19179var _renditionMixin = require('./rendition-mixin');
19180
19181var _renditionMixin2 = _interopRequireDefault(_renditionMixin);
19182
19183var _globalWindow = require('global/window');
19184
19185var _globalWindow2 = _interopRequireDefault(_globalWindow);
19186
19187var _playbackWatcher = require('./playback-watcher');
19188
19189var _playbackWatcher2 = _interopRequireDefault(_playbackWatcher);
19190
19191var _reloadSourceOnError = require('./reload-source-on-error');
19192
19193var _reloadSourceOnError2 = _interopRequireDefault(_reloadSourceOnError);
19194
19195var _playlistSelectorsJs = require('./playlist-selectors.js');
19196
19197// 0.5 MB/s
19198var INITIAL_BANDWIDTH_DESKTOP = 4194304;
19199// 0.0625 MB/s
19200var INITIAL_BANDWIDTH_MOBILE = 500000;
19201
19202var Hls = {
19203 PlaylistLoader: _playlistLoader2['default'],
19204 Playlist: _playlist2['default'],
19205 Decrypter: _aesDecrypter.Decrypter,
19206 AsyncStream: _aesDecrypter.AsyncStream,
19207 decrypt: _aesDecrypter.decrypt,
19208 utils: _binUtils2['default'],
19209
19210 STANDARD_PLAYLIST_SELECTOR: _playlistSelectorsJs.lastBandwidthSelector,
19211 comparePlaylistBandwidth: _playlistSelectorsJs.comparePlaylistBandwidth,
19212 comparePlaylistResolution: _playlistSelectorsJs.comparePlaylistResolution,
19213
19214 xhr: (0, _xhr2['default'])()
19215};
19216
19217// Define getter/setters for config properites
19218['GOAL_BUFFER_LENGTH', 'MAX_GOAL_BUFFER_LENGTH', 'GOAL_BUFFER_LENGTH_RATE', 'BUFFER_LOW_WATER_LINE', 'MAX_BUFFER_LOW_WATER_LINE', 'BUFFER_LOW_WATER_LINE_RATE', 'BANDWIDTH_VARIANCE'].forEach(function (prop) {
19219 Object.defineProperty(Hls, prop, {
19220 get: function get() {
19221 _videoJs2['default'].log.warn('using Hls.' + prop + ' is UNSAFE be sure you know what you are doing');
19222 return _config2['default'][prop];
19223 },
19224 set: function set(value) {
19225 _videoJs2['default'].log.warn('using Hls.' + prop + ' is UNSAFE be sure you know what you are doing');
19226
19227 if (typeof value !== 'number' || value < 0) {
19228 _videoJs2['default'].log.warn('value of Hls.' + prop + ' must be greater than or equal to 0');
19229 return;
19230 }
19231
19232 _config2['default'][prop] = value;
19233 }
19234 });
19235});
19236
19237/**
19238 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in hls.
19239 *
19240 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
19241 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
19242 * @function handleHlsMediaChange
19243 */
19244var handleHlsMediaChange = function handleHlsMediaChange(qualityLevels, playlistLoader) {
19245 var newPlaylist = playlistLoader.media();
19246 var selectedIndex = -1;
19247
19248 for (var i = 0; i < qualityLevels.length; i++) {
19249 if (qualityLevels[i].id === newPlaylist.uri) {
19250 selectedIndex = i;
19251 break;
19252 }
19253 }
19254
19255 qualityLevels.selectedIndex_ = selectedIndex;
19256 qualityLevels.trigger({
19257 selectedIndex: selectedIndex,
19258 type: 'change'
19259 });
19260};
19261
19262/**
19263 * Adds quality levels to list once playlist metadata is available
19264 *
19265 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
19266 * @param {Object} hls Hls object to listen to for media events.
19267 * @function handleHlsLoadedMetadata
19268 */
19269var handleHlsLoadedMetadata = function handleHlsLoadedMetadata(qualityLevels, hls) {
19270 hls.representations().forEach(function (rep) {
19271 qualityLevels.addQualityLevel(rep);
19272 });
19273 handleHlsMediaChange(qualityLevels, hls.playlists);
19274};
19275
19276// HLS is a source handler, not a tech. Make sure attempts to use it
19277// as one do not cause exceptions.
19278Hls.canPlaySource = function () {
19279 return _videoJs2['default'].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
19280};
19281
19282/**
19283 * Whether the browser has built-in HLS support.
19284 */
19285Hls.supportsNativeHls = (function () {
19286 var video = _globalDocument2['default'].createElement('video');
19287
19288 // native HLS is definitely not supported if HTML5 video isn't
19289 if (!_videoJs2['default'].getTech('Html5').isSupported()) {
19290 return false;
19291 }
19292
19293 // HLS manifests can go by many mime-types
19294 var canPlay = [
19295 // Apple santioned
19296 'application/vnd.apple.mpegurl',
19297 // Apple sanctioned for backwards compatibility
19298 'audio/mpegurl',
19299 // Very common
19300 'audio/x-mpegurl',
19301 // Very common
19302 'application/x-mpegurl',
19303 // Included for completeness
19304 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
19305
19306 return canPlay.some(function (canItPlay) {
19307 return (/maybe|probably/i.test(video.canPlayType(canItPlay))
19308 );
19309 });
19310})();
19311
19312/**
19313 * HLS is a source handler, not a tech. Make sure attempts to use it
19314 * as one do not cause exceptions.
19315 */
19316Hls.isSupported = function () {
19317 return _videoJs2['default'].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
19318};
19319
19320var USER_AGENT = _globalWindow2['default'].navigator && _globalWindow2['default'].navigator.userAgent || '';
19321
19322/**
19323 * Determines whether the browser supports a change in the audio configuration
19324 * during playback. Currently only Firefox 48 and below do not support this.
19325 * window.isSecureContext is a propterty that was added to window in firefox 49,
19326 * so we can use it to detect Firefox 49+.
19327 *
19328 * @return {Boolean} Whether the browser supports audio config change during playback
19329 */
19330Hls.supportsAudioInfoChange_ = function () {
19331 if (_videoJs2['default'].browser.IS_FIREFOX) {
19332 var firefoxVersionMap = /Firefox\/([\d.]+)/i.exec(USER_AGENT);
19333 var version = parseInt(firefoxVersionMap[1], 10);
19334
19335 return version >= 49;
19336 }
19337 return true;
19338};
19339
19340var Component = _videoJs2['default'].getComponent('Component');
19341
19342/**
19343 * The Hls Handler object, where we orchestrate all of the parts
19344 * of HLS to interact with video.js
19345 *
19346 * @class HlsHandler
19347 * @extends videojs.Component
19348 * @param {Object} source the soruce object
19349 * @param {Tech} tech the parent tech object
19350 * @param {Object} options optional and required options
19351 */
19352
19353var HlsHandler = (function (_Component) {
19354 _inherits(HlsHandler, _Component);
19355
19356 function HlsHandler(source, tech, options) {
19357 var _this = this;
19358
19359 _classCallCheck(this, HlsHandler);
19360
19361 _get(Object.getPrototypeOf(HlsHandler.prototype), 'constructor', this).call(this, tech, options.hls);
19362
19363 // tech.player() is deprecated but setup a reference to HLS for
19364 // backwards-compatibility
19365 if (tech.options_ && tech.options_.playerId) {
19366 var _player = (0, _videoJs2['default'])(tech.options_.playerId);
19367
19368 if (!_player.hasOwnProperty('hls')) {
19369 Object.defineProperty(_player, 'hls', {
19370 get: function get() {
19371 _videoJs2['default'].log.warn('player.hls is deprecated. Use player.tech_.hls instead.');
19372 tech.trigger({ type: 'usage', name: 'hls-player-access' });
19373 return _this;
19374 }
19375 });
19376 }
19377 }
19378
19379 this.tech_ = tech;
19380 this.source_ = source;
19381 this.stats = {};
19382 this.ignoreNextSeekingEvent_ = false;
19383 this.setOptions_();
19384
19385 // overriding native HLS only works if audio tracks have been emulated
19386 // error early if we're misconfigured:
19387 if (this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
19388 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
19389 }
19390
19391 // listen for fullscreenchange events for this player so that we
19392 // can adjust our quality selection quickly
19393 this.on(_globalDocument2['default'], ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
19394 var fullscreenElement = _globalDocument2['default'].fullscreenElement || _globalDocument2['default'].webkitFullscreenElement || _globalDocument2['default'].mozFullScreenElement || _globalDocument2['default'].msFullscreenElement;
19395
19396 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
19397 _this.masterPlaylistController_.fastQualityChange_();
19398 }
19399 });
19400
19401 this.on(this.tech_, 'seeking', function () {
19402 if (this.ignoreNextSeekingEvent_) {
19403 this.ignoreNextSeekingEvent_ = false;
19404 return;
19405 }
19406
19407 this.setCurrentTime(this.tech_.currentTime());
19408 });
19409 this.on(this.tech_, 'error', function () {
19410 if (this.masterPlaylistController_) {
19411 this.masterPlaylistController_.pauseLoading();
19412 }
19413 });
19414
19415 this.audioTrackChange_ = function () {
19416 _this.masterPlaylistController_.setupAudio();
19417 _this.tech_.trigger({ type: 'usage', name: 'hls-audio-change' });
19418 };
19419
19420 this.textTrackChange_ = function () {
19421 _this.masterPlaylistController_.setupSubtitles();
19422 };
19423
19424 this.on(this.tech_, 'play', this.play);
19425 }
19426
19427 /**
19428 * The Source Handler object, which informs video.js what additional
19429 * MIME types are supported and sets up playback. It is registered
19430 * automatically to the appropriate tech based on the capabilities of
19431 * the browser it is running in. It is not necessary to use or modify
19432 * this object in normal usage.
19433 */
19434
19435 _createClass(HlsHandler, [{
19436 key: 'setOptions_',
19437 value: function setOptions_() {
19438 var _this2 = this;
19439
19440 // defaults
19441 this.options_.withCredentials = this.options_.withCredentials || false;
19442
19443 if (typeof this.options_.blacklistDuration !== 'number') {
19444 this.options_.blacklistDuration = 5 * 60;
19445 }
19446
19447 // start playlist selection at a reasonable bandwidth for
19448 // broadband internet (0.5 MB/s) or mobile (0.0625 MB/s)
19449 if (typeof this.options_.bandwidth !== 'number') {
19450 // only use Android for mobile because iOS does not support MSE (and uses
19451 // native HLS)
19452 this.options_.bandwidth = _videoJs2['default'].browser.IS_ANDROID ? INITIAL_BANDWIDTH_MOBILE : INITIAL_BANDWIDTH_DESKTOP;
19453 }
19454
19455 // grab options passed to player.src
19456 ['withCredentials', 'bandwidth'].forEach(function (option) {
19457 if (typeof _this2.source_[option] !== 'undefined') {
19458 _this2.options_[option] = _this2.source_[option];
19459 }
19460 });
19461
19462 this.bandwidth = this.options_.bandwidth;
19463 }
19464
19465 /**
19466 * called when player.src gets called, handle a new source
19467 *
19468 * @param {Object} src the source object to handle
19469 */
19470 }, {
19471 key: 'src',
19472 value: function src(_src) {
19473 var _this3 = this;
19474
19475 // do nothing if the src is falsey
19476 if (!_src) {
19477 return;
19478 }
19479 this.setOptions_();
19480 // add master playlist controller options
19481 this.options_.url = this.source_.src;
19482 this.options_.tech = this.tech_;
19483 this.options_.externHls = Hls;
19484
19485 this.masterPlaylistController_ = new _masterPlaylistController.MasterPlaylistController(this.options_);
19486 this.playbackWatcher_ = new _playbackWatcher2['default'](_videoJs2['default'].mergeOptions(this.options_, {
19487 seekable: function seekable() {
19488 return _this3.seekable();
19489 }
19490 }));
19491
19492 this.masterPlaylistController_.on('error', function () {
19493 var player = _videoJs2['default'].players[_this3.tech_.options_.playerId];
19494
19495 player.error(_this3.masterPlaylistController_.error);
19496 });
19497
19498 // `this` in selectPlaylist should be the HlsHandler for backwards
19499 // compatibility with < v2
19500 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : Hls.STANDARD_PLAYLIST_SELECTOR.bind(this);
19501
19502 // re-expose some internal objects for backwards compatibility with < v2
19503 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
19504 this.mediaSource = this.masterPlaylistController_.mediaSource;
19505
19506 // Proxy assignment of some properties to the master playlist
19507 // controller. Using a custom property for backwards compatibility
19508 // with < v2
19509 Object.defineProperties(this, {
19510 selectPlaylist: {
19511 get: function get() {
19512 return this.masterPlaylistController_.selectPlaylist;
19513 },
19514 set: function set(selectPlaylist) {
19515 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
19516 }
19517 },
19518 throughput: {
19519 get: function get() {
19520 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
19521 },
19522 set: function set(throughput) {
19523 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput;
19524 // By setting `count` to 1 the throughput value becomes the starting value
19525 // for the cumulative average
19526 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
19527 }
19528 },
19529 bandwidth: {
19530 get: function get() {
19531 return this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
19532 },
19533 set: function set(bandwidth) {
19534 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth;
19535 // setting the bandwidth manually resets the throughput counter
19536 // `count` is set to zero that current value of `rate` isn't included
19537 // in the cumulative average
19538 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
19539 rate: 0,
19540 count: 0
19541 };
19542 }
19543 },
19544 /**
19545 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
19546 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
19547 * the entire process after that - decryption, transmuxing, and appending - provided
19548 * by `throughput`.
19549 *
19550 * Since the two process are serial, the overall system bandwidth is given by:
19551 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
19552 */
19553 systemBandwidth: {
19554 get: function get() {
19555 var invBandwidth = 1 / (this.bandwidth || 1);
19556 var invThroughput = undefined;
19557
19558 if (this.throughput > 0) {
19559 invThroughput = 1 / this.throughput;
19560 } else {
19561 invThroughput = 0;
19562 }
19563
19564 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
19565
19566 return systemBitrate;
19567 },
19568 set: function set() {
19569 _videoJs2['default'].log.error('The "systemBandwidth" property is read-only');
19570 }
19571 }
19572 });
19573
19574 Object.defineProperties(this.stats, {
19575 bandwidth: {
19576 get: function get() {
19577 return _this3.bandwidth || 0;
19578 },
19579 enumerable: true
19580 },
19581 mediaRequests: {
19582 get: function get() {
19583 return _this3.masterPlaylistController_.mediaRequests_() || 0;
19584 },
19585 enumerable: true
19586 },
19587 mediaRequestsAborted: {
19588 get: function get() {
19589 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
19590 },
19591 enumerable: true
19592 },
19593 mediaRequestsTimedout: {
19594 get: function get() {
19595 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
19596 },
19597 enumerable: true
19598 },
19599 mediaRequestsErrored: {
19600 get: function get() {
19601 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
19602 },
19603 enumerable: true
19604 },
19605 mediaTransferDuration: {
19606 get: function get() {
19607 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
19608 },
19609 enumerable: true
19610 },
19611 mediaBytesTransferred: {
19612 get: function get() {
19613 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
19614 },
19615 enumerable: true
19616 },
19617 mediaSecondsLoaded: {
19618 get: function get() {
19619 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
19620 },
19621 enumerable: true
19622 }
19623 });
19624
19625 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
19626
19627 this.masterPlaylistController_.on('sourceopen', function () {
19628 _this3.tech_.audioTracks().addEventListener('change', _this3.audioTrackChange_);
19629 _this3.tech_.remoteTextTracks().addEventListener('change', _this3.textTrackChange_);
19630 });
19631
19632 this.masterPlaylistController_.on('selectedinitialmedia', function () {
19633 // Add the manual rendition mix-in to HlsHandler
19634 (0, _renditionMixin2['default'])(_this3);
19635 });
19636
19637 this.masterPlaylistController_.on('audioupdate', function () {
19638 // clear current audioTracks
19639 _this3.tech_.clearTracks('audio');
19640 _this3.masterPlaylistController_.activeAudioGroup().forEach(function (audioTrack) {
19641 _this3.tech_.audioTracks().addTrack(audioTrack);
19642 });
19643 });
19644
19645 // the bandwidth of the primary segment loader is our best
19646 // estimate of overall bandwidth
19647 this.on(this.masterPlaylistController_, 'progress', function () {
19648 this.tech_.trigger('progress');
19649 });
19650
19651 // In the live case, we need to ignore the very first `seeking` event since
19652 // that will be the result of the seek-to-live behavior
19653 this.on(this.masterPlaylistController_, 'firstplay', function () {
19654 this.ignoreNextSeekingEvent_ = true;
19655 });
19656
19657 this.tech_.ready(function () {
19658 return _this3.setupQualityLevels_();
19659 });
19660
19661 // do nothing if the tech has been disposed already
19662 // this can occur if someone sets the src in player.ready(), for instance
19663 if (!this.tech_.el()) {
19664 return;
19665 }
19666
19667 this.tech_.src(_videoJs2['default'].URL.createObjectURL(this.masterPlaylistController_.mediaSource));
19668 }
19669
19670 /**
19671 * Initializes the quality levels and sets listeners to update them.
19672 *
19673 * @method setupQualityLevels_
19674 * @private
19675 */
19676 }, {
19677 key: 'setupQualityLevels_',
19678 value: function setupQualityLevels_() {
19679 var _this4 = this;
19680
19681 var player = _videoJs2['default'].players[this.tech_.options_.playerId];
19682
19683 if (player && player.qualityLevels) {
19684 this.qualityLevels_ = player.qualityLevels();
19685
19686 this.masterPlaylistController_.on('selectedinitialmedia', function () {
19687 handleHlsLoadedMetadata(_this4.qualityLevels_, _this4);
19688 });
19689
19690 this.playlists.on('mediachange', function () {
19691 handleHlsMediaChange(_this4.qualityLevels_, _this4.playlists);
19692 });
19693 }
19694 }
19695
19696 /**
19697 * a helper for grabbing the active audio group from MasterPlaylistController
19698 *
19699 * @private
19700 */
19701 }, {
19702 key: 'activeAudioGroup_',
19703 value: function activeAudioGroup_() {
19704 return this.masterPlaylistController_.activeAudioGroup();
19705 }
19706
19707 /**
19708 * Begin playing the video.
19709 */
19710 }, {
19711 key: 'play',
19712 value: function play() {
19713 this.masterPlaylistController_.play();
19714 }
19715
19716 /**
19717 * a wrapper around the function in MasterPlaylistController
19718 */
19719 }, {
19720 key: 'setCurrentTime',
19721 value: function setCurrentTime(currentTime) {
19722 this.masterPlaylistController_.setCurrentTime(currentTime);
19723 }
19724
19725 /**
19726 * a wrapper around the function in MasterPlaylistController
19727 */
19728 }, {
19729 key: 'duration',
19730 value: function duration() {
19731 return this.masterPlaylistController_.duration();
19732 }
19733
19734 /**
19735 * a wrapper around the function in MasterPlaylistController
19736 */
19737 }, {
19738 key: 'seekable',
19739 value: function seekable() {
19740 return this.masterPlaylistController_.seekable();
19741 }
19742
19743 /**
19744 * Abort all outstanding work and cleanup.
19745 */
19746 }, {
19747 key: 'dispose',
19748 value: function dispose() {
19749 if (this.playbackWatcher_) {
19750 this.playbackWatcher_.dispose();
19751 }
19752 if (this.masterPlaylistController_) {
19753 this.masterPlaylistController_.dispose();
19754 }
19755 if (this.qualityLevels_) {
19756 this.qualityLevels_.dispose();
19757 }
19758 this.tech_.audioTracks().removeEventListener('change', this.audioTrackChange_);
19759 this.tech_.remoteTextTracks().removeEventListener('change', this.textTrackChange_);
19760 _get(Object.getPrototypeOf(HlsHandler.prototype), 'dispose', this).call(this);
19761 }
19762 }]);
19763
19764 return HlsHandler;
19765})(Component);
19766
19767var HlsSourceHandler = function HlsSourceHandler(mode) {
19768 return {
19769 canHandleSource: function canHandleSource(srcObj) {
19770 var options = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
19771
19772 var localOptions = _videoJs2['default'].mergeOptions(_videoJs2['default'].options, options);
19773
19774 // this forces video.js to skip this tech/mode if its not the one we have been
19775 // overriden to use, by returing that we cannot handle the source.
19776 if (localOptions.hls && localOptions.hls.mode && localOptions.hls.mode !== mode) {
19777 return false;
19778 }
19779 return HlsSourceHandler.canPlayType(srcObj.type, localOptions);
19780 },
19781 handleSource: function handleSource(source, tech) {
19782 var options = arguments.length <= 2 || arguments[2] === undefined ? {} : arguments[2];
19783
19784 var localOptions = _videoJs2['default'].mergeOptions(_videoJs2['default'].options, options, { hls: { mode: mode } });
19785
19786 if (mode === 'flash') {
19787 // We need to trigger this asynchronously to give others the chance
19788 // to bind to the event when a source is set at player creation
19789 tech.setTimeout(function () {
19790 tech.trigger('loadstart');
19791 }, 1);
19792 }
19793
19794 tech.hls = new HlsHandler(source, tech, localOptions);
19795 tech.hls.xhr = (0, _xhr2['default'])();
19796
19797 tech.hls.src(source.src);
19798 return tech.hls;
19799 },
19800 canPlayType: function canPlayType(type) {
19801 var options = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
19802
19803 var localOptions = _videoJs2['default'].mergeOptions(_videoJs2['default'].options, options);
19804
19805 if (HlsSourceHandler.canPlayType(type, localOptions)) {
19806 return 'maybe';
19807 }
19808 return '';
19809 }
19810 };
19811};
19812
19813HlsSourceHandler.canPlayType = function (type, options) {
19814 // No support for IE 10 or below
19815 if (_videoJs2['default'].browser.IE_VERSION && _videoJs2['default'].browser.IE_VERSION <= 10) {
19816 return false;
19817 }
19818
19819 var mpegurlRE = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
19820
19821 // favor native HLS support if it's available
19822 if (!options.hls.overrideNative && Hls.supportsNativeHls) {
19823 return false;
19824 }
19825 return mpegurlRE.test(type);
19826};
19827
19828if (typeof _videoJs2['default'].MediaSource === 'undefined' || typeof _videoJs2['default'].URL === 'undefined') {
19829 _videoJs2['default'].MediaSource = _videojsContribMediaSources.MediaSource;
19830 _videoJs2['default'].URL = _videojsContribMediaSources.URL;
19831}
19832
19833var flashTech = _videoJs2['default'].getTech('Flash');
19834
19835// register source handlers with the appropriate techs
19836if (_videojsContribMediaSources.MediaSource.supportsNativeMediaSources()) {
19837 _videoJs2['default'].getTech('Html5').registerSourceHandler(HlsSourceHandler('html5'), 0);
19838}
19839if (_globalWindow2['default'].Uint8Array && flashTech) {
19840 flashTech.registerSourceHandler(HlsSourceHandler('flash'));
19841}
19842
19843_videoJs2['default'].HlsHandler = HlsHandler;
19844_videoJs2['default'].HlsSourceHandler = HlsSourceHandler;
19845_videoJs2['default'].Hls = Hls;
19846if (!_videoJs2['default'].use) {
19847 _videoJs2['default'].registerComponent('Hls', Hls);
19848}
19849_videoJs2['default'].m3u8 = _m3u8Parser2['default'];
19850_videoJs2['default'].options.hls = _videoJs2['default'].options.hls || {};
19851
19852if (_videoJs2['default'].registerPlugin) {
19853 _videoJs2['default'].registerPlugin('reloadSourceOnError', _reloadSourceOnError2['default']);
19854} else {
19855 _videoJs2['default'].plugin('reloadSourceOnError', _reloadSourceOnError2['default']);
19856}
19857
19858module.exports = {
19859 Hls: Hls,
19860 HlsHandler: HlsHandler,
19861 HlsSourceHandler: HlsSourceHandler
19862};
19863}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
19864},{"./bin-utils":2,"./config":3,"./master-playlist-controller":5,"./playback-watcher":7,"./playlist":10,"./playlist-loader":8,"./playlist-selectors.js":9,"./reload-source-on-error":12,"./rendition-mixin":13,"./xhr":19,"aes-decrypter":23,"global/document":29,"global/window":30,"m3u8-parser":31,"videojs-contrib-media-sources":73}]},{},[76])(76)
19865});
\No newline at end of file